1 2 #include <petsc/private/isimpl.h> /*I "petscis.h" I*/ 3 #include <petsc/private/hashmapi.h> 4 #include <petscsf.h> 5 #include <petscviewer.h> 6 7 PetscClassId IS_LTOGM_CLASSID; 8 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping, PetscInt *, PetscInt **, PetscInt **, PetscInt ***); 9 10 typedef struct { 11 PetscInt *globals; 12 } ISLocalToGlobalMapping_Basic; 13 14 typedef struct { 15 PetscHMapI globalht; 16 } ISLocalToGlobalMapping_Hash; 17 18 /*@C 19 ISGetPointRange - Returns a description of the points in an IS suitable for traversal 20 21 Not collective 22 23 Input Parameter: 24 . pointIS - The IS object 25 26 Output Parameters: 27 + pStart - The first index, see notes 28 . pEnd - One past the last index, see notes 29 - points - The indices, see notes 30 31 Notes: 32 If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern 33 $ ISGetPointRange(is, &pStart, &pEnd, &points); 34 $ for (p = pStart; p < pEnd; ++p) { 35 $ const PetscInt point = points ? points[p] : p; 36 $ } 37 $ ISRestorePointRange(is, &pstart, &pEnd, &points); 38 39 Level: intermediate 40 41 .seealso: `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` 42 @*/ 43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) { 44 PetscInt numCells, step = 1; 45 PetscBool isStride; 46 47 PetscFunctionBeginHot; 48 *pStart = 0; 49 *points = NULL; 50 PetscCall(ISGetLocalSize(pointIS, &numCells)); 51 PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride)); 52 if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step)); 53 *pEnd = *pStart + numCells; 54 if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points)); 55 PetscFunctionReturn(0); 56 } 57 58 /*@C 59 ISRestorePointRange - Destroys the traversal description 60 61 Not collective 62 63 Input Parameters: 64 + pointIS - The IS object 65 . pStart - The first index, from ISGetPointRange() 66 . pEnd - One past the last index, from ISGetPointRange() 67 - points - The indices, from ISGetPointRange() 68 69 Notes: 70 If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern 71 $ ISGetPointRange(is, &pStart, &pEnd, &points); 72 $ for (p = pStart; p < pEnd; ++p) { 73 $ const PetscInt point = points ? points[p] : p; 74 $ } 75 $ ISRestorePointRange(is, &pstart, &pEnd, &points); 76 77 Level: intermediate 78 79 .seealso: `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` 80 @*/ 81 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) { 82 PetscInt step = 1; 83 PetscBool isStride; 84 85 PetscFunctionBeginHot; 86 PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride)); 87 if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step)); 88 if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points)); 89 PetscFunctionReturn(0); 90 } 91 92 /*@C 93 ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given 94 95 Not collective 96 97 Input Parameters: 98 + subpointIS - The IS object to be configured 99 . pStar t - The first index of the subrange 100 . pEnd - One past the last index for the subrange 101 - points - The indices for the entire range, from ISGetPointRange() 102 103 Output Parameters: 104 . subpointIS - The IS object now configured to be a subrange 105 106 Notes: 107 The input IS will now respond properly to calls to ISGetPointRange() and return the subrange. 108 109 Level: intermediate 110 111 .seealso: `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()` 112 @*/ 113 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points) { 114 PetscFunctionBeginHot; 115 if (points) { 116 PetscCall(ISSetType(subpointIS, ISGENERAL)); 117 PetscCall(ISGeneralSetIndices(subpointIS, pEnd - pStart, &points[pStart], PETSC_USE_POINTER)); 118 } else { 119 PetscCall(ISSetType(subpointIS, ISSTRIDE)); 120 PetscCall(ISStrideSetStride(subpointIS, pEnd - pStart, pStart, 1)); 121 } 122 PetscFunctionReturn(0); 123 } 124 125 /* -----------------------------------------------------------------------------------------*/ 126 127 /* 128 Creates the global mapping information in the ISLocalToGlobalMapping structure 129 130 If the user has not selected how to handle the global to local mapping then use HASH for "large" problems 131 */ 132 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping) { 133 PetscInt i, *idx = mapping->indices, n = mapping->n, end, start; 134 135 PetscFunctionBegin; 136 if (mapping->data) PetscFunctionReturn(0); 137 end = 0; 138 start = PETSC_MAX_INT; 139 140 for (i = 0; i < n; i++) { 141 if (idx[i] < 0) continue; 142 if (idx[i] < start) start = idx[i]; 143 if (idx[i] > end) end = idx[i]; 144 } 145 if (start > end) { 146 start = 0; 147 end = -1; 148 } 149 mapping->globalstart = start; 150 mapping->globalend = end; 151 if (!((PetscObject)mapping)->type_name) { 152 if ((end - start) > PetscMax(4 * n, 1000000)) { 153 PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGHASH)); 154 } else { 155 PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGBASIC)); 156 } 157 } 158 PetscTryTypeMethod(mapping, globaltolocalmappingsetup); 159 PetscFunctionReturn(0); 160 } 161 162 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping) { 163 PetscInt i, *idx = mapping->indices, n = mapping->n, end, start, *globals; 164 ISLocalToGlobalMapping_Basic *map; 165 166 PetscFunctionBegin; 167 start = mapping->globalstart; 168 end = mapping->globalend; 169 PetscCall(PetscNew(&map)); 170 PetscCall(PetscMalloc1(end - start + 2, &globals)); 171 map->globals = globals; 172 for (i = 0; i < end - start + 1; i++) globals[i] = -1; 173 for (i = 0; i < n; i++) { 174 if (idx[i] < 0) continue; 175 globals[idx[i] - start] = i; 176 } 177 mapping->data = (void *)map; 178 PetscFunctionReturn(0); 179 } 180 181 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping) { 182 PetscInt i, *idx = mapping->indices, n = mapping->n; 183 ISLocalToGlobalMapping_Hash *map; 184 185 PetscFunctionBegin; 186 PetscCall(PetscNew(&map)); 187 PetscCall(PetscHMapICreate(&map->globalht)); 188 for (i = 0; i < n; i++) { 189 if (idx[i] < 0) continue; 190 PetscCall(PetscHMapISet(map->globalht, idx[i], i)); 191 } 192 mapping->data = (void *)map; 193 PetscFunctionReturn(0); 194 } 195 196 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping) { 197 ISLocalToGlobalMapping_Basic *map = (ISLocalToGlobalMapping_Basic *)mapping->data; 198 199 PetscFunctionBegin; 200 if (!map) PetscFunctionReturn(0); 201 PetscCall(PetscFree(map->globals)); 202 PetscCall(PetscFree(mapping->data)); 203 PetscFunctionReturn(0); 204 } 205 206 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping) { 207 ISLocalToGlobalMapping_Hash *map = (ISLocalToGlobalMapping_Hash *)mapping->data; 208 209 PetscFunctionBegin; 210 if (!map) PetscFunctionReturn(0); 211 PetscCall(PetscHMapIDestroy(&map->globalht)); 212 PetscCall(PetscFree(mapping->data)); 213 PetscFunctionReturn(0); 214 } 215 216 #define GTOLTYPE _Basic 217 #define GTOLNAME _Basic 218 #define GTOLBS mapping->bs 219 #define GTOL(g, local) \ 220 do { \ 221 local = map->globals[g / bs - start]; \ 222 if (local >= 0) local = bs * local + (g % bs); \ 223 } while (0) 224 225 #include <../src/vec/is/utils/isltog.h> 226 227 #define GTOLTYPE _Basic 228 #define GTOLNAME Block_Basic 229 #define GTOLBS 1 230 #define GTOL(g, local) \ 231 do { local = map->globals[g - start]; } while (0) 232 #include <../src/vec/is/utils/isltog.h> 233 234 #define GTOLTYPE _Hash 235 #define GTOLNAME _Hash 236 #define GTOLBS mapping->bs 237 #define GTOL(g, local) \ 238 do { \ 239 (void)PetscHMapIGet(map->globalht, g / bs, &local); \ 240 if (local >= 0) local = bs * local + (g % bs); \ 241 } while (0) 242 #include <../src/vec/is/utils/isltog.h> 243 244 #define GTOLTYPE _Hash 245 #define GTOLNAME Block_Hash 246 #define GTOLBS 1 247 #define GTOL(g, local) \ 248 do { (void)PetscHMapIGet(map->globalht, g, &local); } while (0) 249 #include <../src/vec/is/utils/isltog.h> 250 251 /*@ 252 ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object 253 254 Not Collective 255 256 Input Parameter: 257 . ltog - local to global mapping 258 259 Output Parameter: 260 . nltog - the duplicated local to global mapping 261 262 Level: advanced 263 264 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 265 @*/ 266 PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog, ISLocalToGlobalMapping *nltog) { 267 ISLocalToGlobalMappingType l2gtype; 268 269 PetscFunctionBegin; 270 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 271 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog), ltog->bs, ltog->n, ltog->indices, PETSC_COPY_VALUES, nltog)); 272 PetscCall(ISLocalToGlobalMappingGetType(ltog, &l2gtype)); 273 PetscCall(ISLocalToGlobalMappingSetType(*nltog, l2gtype)); 274 PetscFunctionReturn(0); 275 } 276 277 /*@ 278 ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping 279 280 Not Collective 281 282 Input Parameter: 283 . ltog - local to global mapping 284 285 Output Parameter: 286 . n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length 287 288 Level: advanced 289 290 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 291 @*/ 292 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping, PetscInt *n) { 293 PetscFunctionBegin; 294 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 295 PetscValidIntPointer(n, 2); 296 *n = mapping->bs * mapping->n; 297 PetscFunctionReturn(0); 298 } 299 300 /*@C 301 ISLocalToGlobalMappingViewFromOptions - View from Options 302 303 Collective on ISLocalToGlobalMapping 304 305 Input Parameters: 306 + A - the local to global mapping object 307 . obj - Optional object 308 - name - command line option 309 310 Level: intermediate 311 .seealso: `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()` 312 @*/ 313 PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A, PetscObject obj, const char name[]) { 314 PetscFunctionBegin; 315 PetscValidHeaderSpecific(A, IS_LTOGM_CLASSID, 1); 316 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name)); 317 PetscFunctionReturn(0); 318 } 319 320 /*@C 321 ISLocalToGlobalMappingView - View a local to global mapping 322 323 Not Collective 324 325 Input Parameters: 326 + ltog - local to global mapping 327 - viewer - viewer 328 329 Level: advanced 330 331 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 332 @*/ 333 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping, PetscViewer viewer) { 334 PetscInt i; 335 PetscMPIInt rank; 336 PetscBool iascii; 337 338 PetscFunctionBegin; 339 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 340 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping), &viewer)); 341 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 342 343 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping), &rank)); 344 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 345 if (iascii) { 346 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping, viewer)); 347 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 348 for (i = 0; i < mapping->n; i++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n", rank, i, mapping->indices[i])); 349 PetscCall(PetscViewerFlush(viewer)); 350 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 351 } 352 PetscFunctionReturn(0); 353 } 354 355 /*@ 356 ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) 357 ordering and a global parallel ordering. 358 359 Not collective 360 361 Input Parameter: 362 . is - index set containing the global numbers for each local number 363 364 Output Parameter: 365 . mapping - new mapping data structure 366 367 Notes: 368 the block size of the IS determines the block size of the mapping 369 Level: advanced 370 371 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetFromOptions()` 372 @*/ 373 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is, ISLocalToGlobalMapping *mapping) { 374 PetscInt n, bs; 375 const PetscInt *indices; 376 MPI_Comm comm; 377 PetscBool isblock; 378 379 PetscFunctionBegin; 380 PetscValidHeaderSpecific(is, IS_CLASSID, 1); 381 PetscValidPointer(mapping, 2); 382 383 PetscCall(PetscObjectGetComm((PetscObject)is, &comm)); 384 PetscCall(ISGetLocalSize(is, &n)); 385 PetscCall(PetscObjectTypeCompare((PetscObject)is, ISBLOCK, &isblock)); 386 if (!isblock) { 387 PetscCall(ISGetIndices(is, &indices)); 388 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n, indices, PETSC_COPY_VALUES, mapping)); 389 PetscCall(ISRestoreIndices(is, &indices)); 390 } else { 391 PetscCall(ISGetBlockSize(is, &bs)); 392 PetscCall(ISBlockGetIndices(is, &indices)); 393 PetscCall(ISLocalToGlobalMappingCreate(comm, bs, n / bs, indices, PETSC_COPY_VALUES, mapping)); 394 PetscCall(ISBlockRestoreIndices(is, &indices)); 395 } 396 PetscFunctionReturn(0); 397 } 398 399 /*@C 400 ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) 401 ordering and a global parallel ordering. 402 403 Collective 404 405 Input Parameters: 406 + sf - star forest mapping contiguous local indices to (rank, offset) 407 - start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically 408 409 Output Parameter: 410 . mapping - new mapping data structure 411 412 Level: advanced 413 414 Notes: 415 If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang. 416 417 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()` 418 @*/ 419 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf, PetscInt start, ISLocalToGlobalMapping *mapping) { 420 PetscInt i, maxlocal, nroots, nleaves, *globals, *ltog; 421 MPI_Comm comm; 422 423 PetscFunctionBegin; 424 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 425 PetscValidPointer(mapping, 3); 426 PetscCall(PetscObjectGetComm((PetscObject)sf, &comm)); 427 PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, NULL)); 428 if (start == PETSC_DECIDE) { 429 start = 0; 430 PetscCallMPI(MPI_Exscan(&nroots, &start, 1, MPIU_INT, MPI_SUM, comm)); 431 } else PetscCheck(start >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE"); 432 PetscCall(PetscSFGetLeafRange(sf, NULL, &maxlocal)); 433 ++maxlocal; 434 PetscCall(PetscMalloc1(nroots, &globals)); 435 PetscCall(PetscMalloc1(maxlocal, <og)); 436 for (i = 0; i < nroots; i++) globals[i] = start + i; 437 for (i = 0; i < maxlocal; i++) ltog[i] = -1; 438 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, globals, ltog, MPI_REPLACE)); 439 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, globals, ltog, MPI_REPLACE)); 440 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, maxlocal, ltog, PETSC_OWN_POINTER, mapping)); 441 PetscCall(PetscFree(globals)); 442 PetscFunctionReturn(0); 443 } 444 445 /*@ 446 ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping 447 448 Not collective 449 450 Input Parameters: 451 + mapping - mapping data structure 452 - bs - the blocksize 453 454 Level: advanced 455 456 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()` 457 @*/ 458 PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping, PetscInt bs) { 459 PetscInt *nid; 460 const PetscInt *oid; 461 PetscInt i, cn, on, obs, nn; 462 463 PetscFunctionBegin; 464 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 465 PetscCheck(bs >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid block size %" PetscInt_FMT, bs); 466 if (bs == mapping->bs) PetscFunctionReturn(0); 467 on = mapping->n; 468 obs = mapping->bs; 469 oid = mapping->indices; 470 nn = (on * obs) / bs; 471 PetscCheck((on * obs) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT, bs, obs, on); 472 473 PetscCall(PetscMalloc1(nn, &nid)); 474 PetscCall(ISLocalToGlobalMappingGetIndices(mapping, &oid)); 475 for (i = 0; i < nn; i++) { 476 PetscInt j; 477 for (j = 0, cn = 0; j < bs - 1; j++) { 478 if (oid[i * bs + j] < 0) { 479 cn++; 480 continue; 481 } 482 PetscCheck(oid[i * bs + j] == oid[i * bs + j + 1] - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT, bs, obs, oid[i * bs + j], oid[i * bs + j + 1]); 483 } 484 if (oid[i * bs + j] < 0) cn++; 485 if (cn) { 486 PetscCheck(cn == bs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT, bs, obs, cn); 487 nid[i] = -1; 488 } else { 489 nid[i] = oid[i * bs] / bs; 490 } 491 } 492 PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping, &oid)); 493 494 mapping->n = nn; 495 mapping->bs = bs; 496 PetscCall(PetscFree(mapping->indices)); 497 mapping->indices = nid; 498 mapping->globalstart = 0; 499 mapping->globalend = 0; 500 501 /* reset the cached information */ 502 PetscCall(PetscFree(mapping->info_procs)); 503 PetscCall(PetscFree(mapping->info_numprocs)); 504 if (mapping->info_indices) { 505 PetscInt i; 506 507 PetscCall(PetscFree((mapping->info_indices)[0])); 508 for (i = 1; i < mapping->info_nproc; i++) PetscCall(PetscFree(mapping->info_indices[i])); 509 PetscCall(PetscFree(mapping->info_indices)); 510 } 511 mapping->info_cached = PETSC_FALSE; 512 513 PetscTryTypeMethod(mapping, destroy); 514 PetscFunctionReturn(0); 515 } 516 517 /*@ 518 ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping 519 ordering and a global parallel ordering. 520 521 Not Collective 522 523 Input Parameters: 524 . mapping - mapping data structure 525 526 Output Parameter: 527 . bs - the blocksize 528 529 Level: advanced 530 531 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()` 532 @*/ 533 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping, PetscInt *bs) { 534 PetscFunctionBegin; 535 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 536 *bs = mapping->bs; 537 PetscFunctionReturn(0); 538 } 539 540 /*@ 541 ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) 542 ordering and a global parallel ordering. 543 544 Not Collective, but communicator may have more than one process 545 546 Input Parameters: 547 + comm - MPI communicator 548 . bs - the block size 549 . n - the number of local elements divided by the block size, or equivalently the number of block indices 550 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs 551 - mode - see PetscCopyMode 552 553 Output Parameter: 554 . mapping - new mapping data structure 555 556 Notes: 557 There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1 558 559 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 560 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 561 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 562 563 Level: advanced 564 565 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH` 566 `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType` 567 @*/ 568 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm, PetscInt bs, PetscInt n, const PetscInt indices[], PetscCopyMode mode, ISLocalToGlobalMapping *mapping) { 569 PetscInt *in; 570 571 PetscFunctionBegin; 572 if (n) PetscValidIntPointer(indices, 4); 573 PetscValidPointer(mapping, 6); 574 575 *mapping = NULL; 576 PetscCall(ISInitializePackage()); 577 578 PetscCall(PetscHeaderCreate(*mapping, IS_LTOGM_CLASSID, "ISLocalToGlobalMapping", "Local to global mapping", "IS", comm, ISLocalToGlobalMappingDestroy, ISLocalToGlobalMappingView)); 579 (*mapping)->n = n; 580 (*mapping)->bs = bs; 581 if (mode == PETSC_COPY_VALUES) { 582 PetscCall(PetscMalloc1(n, &in)); 583 PetscCall(PetscArraycpy(in, indices, n)); 584 (*mapping)->indices = in; 585 (*mapping)->dealloc_indices = PETSC_TRUE; 586 } else if (mode == PETSC_OWN_POINTER) { 587 (*mapping)->indices = (PetscInt *)indices; 588 (*mapping)->dealloc_indices = PETSC_TRUE; 589 } else if (mode == PETSC_USE_POINTER) { 590 (*mapping)->indices = (PetscInt *)indices; 591 } else SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid mode %d", mode); 592 PetscFunctionReturn(0); 593 } 594 595 PetscFunctionList ISLocalToGlobalMappingList = NULL; 596 597 /*@ 598 ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database. 599 600 Not collective 601 602 Input Parameters: 603 . mapping - mapping data structure 604 605 Level: advanced 606 607 @*/ 608 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping) { 609 char type[256]; 610 ISLocalToGlobalMappingType defaulttype = "Not set"; 611 PetscBool flg; 612 613 PetscFunctionBegin; 614 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 615 PetscCall(ISLocalToGlobalMappingRegisterAll()); 616 PetscObjectOptionsBegin((PetscObject)mapping); 617 PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type", "ISLocalToGlobalMapping method", "ISLocalToGlobalMappingSetType", ISLocalToGlobalMappingList, (char *)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype, type, 256, &flg)); 618 if (flg) PetscCall(ISLocalToGlobalMappingSetType(mapping, type)); 619 PetscOptionsEnd(); 620 PetscFunctionReturn(0); 621 } 622 623 /*@ 624 ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) 625 ordering and a global parallel ordering. 626 627 Note Collective 628 629 Input Parameters: 630 . mapping - mapping data structure 631 632 Level: advanced 633 634 .seealso: `ISLocalToGlobalMappingCreate()` 635 @*/ 636 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) { 637 PetscFunctionBegin; 638 if (!*mapping) PetscFunctionReturn(0); 639 PetscValidHeaderSpecific((*mapping), IS_LTOGM_CLASSID, 1); 640 if (--((PetscObject)(*mapping))->refct > 0) { 641 *mapping = NULL; 642 PetscFunctionReturn(0); 643 } 644 if ((*mapping)->dealloc_indices) PetscCall(PetscFree((*mapping)->indices)); 645 PetscCall(PetscFree((*mapping)->info_procs)); 646 PetscCall(PetscFree((*mapping)->info_numprocs)); 647 if ((*mapping)->info_indices) { 648 PetscInt i; 649 650 PetscCall(PetscFree(((*mapping)->info_indices)[0])); 651 for (i = 1; i < (*mapping)->info_nproc; i++) PetscCall(PetscFree(((*mapping)->info_indices)[i])); 652 PetscCall(PetscFree((*mapping)->info_indices)); 653 } 654 if ((*mapping)->info_nodei) PetscCall(PetscFree(((*mapping)->info_nodei)[0])); 655 PetscCall(PetscFree2((*mapping)->info_nodec, (*mapping)->info_nodei)); 656 if ((*mapping)->ops->destroy) PetscCall((*(*mapping)->ops->destroy)(*mapping)); 657 PetscCall(PetscHeaderDestroy(mapping)); 658 *mapping = NULL; 659 PetscFunctionReturn(0); 660 } 661 662 /*@ 663 ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering 664 a new index set using the global numbering defined in an ISLocalToGlobalMapping 665 context. 666 667 Collective on is 668 669 Input Parameters: 670 + mapping - mapping between local and global numbering 671 - is - index set in local numbering 672 673 Output Parameters: 674 . newis - index set in global numbering 675 676 Notes: 677 The output IS will have the same communicator of the input IS. 678 679 Level: advanced 680 681 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 682 `ISLocalToGlobalMappingDestroy()`, `ISGlobalToLocalMappingApply()` 683 @*/ 684 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping, IS is, IS *newis) { 685 PetscInt n, *idxout; 686 const PetscInt *idxin; 687 688 PetscFunctionBegin; 689 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 690 PetscValidHeaderSpecific(is, IS_CLASSID, 2); 691 PetscValidPointer(newis, 3); 692 693 PetscCall(ISGetLocalSize(is, &n)); 694 PetscCall(ISGetIndices(is, &idxin)); 695 PetscCall(PetscMalloc1(n, &idxout)); 696 PetscCall(ISLocalToGlobalMappingApply(mapping, n, idxin, idxout)); 697 PetscCall(ISRestoreIndices(is, &idxin)); 698 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is), n, idxout, PETSC_OWN_POINTER, newis)); 699 PetscFunctionReturn(0); 700 } 701 702 /*@ 703 ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering 704 and converts them to the global numbering. 705 706 Not collective 707 708 Input Parameters: 709 + mapping - the local to global mapping context 710 . N - number of integers 711 - in - input indices in local numbering 712 713 Output Parameter: 714 . out - indices in global numbering 715 716 Notes: 717 The in and out array parameters may be identical. 718 719 Level: advanced 720 721 .seealso: `ISLocalToGlobalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`, 722 `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`, 723 `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()` 724 725 @*/ 726 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[]) { 727 PetscInt i, bs, Nmax; 728 729 PetscFunctionBegin; 730 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 731 bs = mapping->bs; 732 Nmax = bs * mapping->n; 733 if (bs == 1) { 734 const PetscInt *idx = mapping->indices; 735 for (i = 0; i < N; i++) { 736 if (in[i] < 0) { 737 out[i] = in[i]; 738 continue; 739 } 740 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 741 out[i] = idx[in[i]]; 742 } 743 } else { 744 const PetscInt *idx = mapping->indices; 745 for (i = 0; i < N; i++) { 746 if (in[i] < 0) { 747 out[i] = in[i]; 748 continue; 749 } 750 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 751 out[i] = idx[in[i] / bs] * bs + (in[i] % bs); 752 } 753 } 754 PetscFunctionReturn(0); 755 } 756 757 /*@ 758 ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering 759 760 Not collective 761 762 Input Parameters: 763 + mapping - the local to global mapping context 764 . N - number of integers 765 - in - input indices in local block numbering 766 767 Output Parameter: 768 . out - indices in global block numbering 769 770 Notes: 771 The in and out array parameters may be identical. 772 773 Example: 774 If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0 775 (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3. 776 777 Level: advanced 778 779 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`, 780 `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`, 781 `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()` 782 783 @*/ 784 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[]) { 785 PetscInt i, Nmax; 786 const PetscInt *idx; 787 788 PetscFunctionBegin; 789 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 790 Nmax = mapping->n; 791 idx = mapping->indices; 792 for (i = 0; i < N; i++) { 793 if (in[i] < 0) { 794 out[i] = in[i]; 795 continue; 796 } 797 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 798 out[i] = idx[in[i]]; 799 } 800 PetscFunctionReturn(0); 801 } 802 803 /*@ 804 ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers 805 specified with a global numbering. 806 807 Not collective 808 809 Input Parameters: 810 + mapping - mapping between local and global numbering 811 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 812 IS_GTOLM_DROP - drops the indices with no local value from the output list 813 . n - number of global indices to map 814 - idx - global indices to map 815 816 Output Parameters: 817 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 818 - idxout - local index of each global index, one must pass in an array long enough 819 to hold all the indices. You can call ISGlobalToLocalMappingApply() with 820 idxout == NULL to determine the required length (returned in nout) 821 and then allocate the required space and call ISGlobalToLocalMappingApply() 822 a second time to set the values. 823 824 Notes: 825 Either nout or idxout may be NULL. idx and idxout may be identical. 826 827 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 828 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 829 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 830 831 Level: advanced 832 833 Developer Note: The manual page states that idx and idxout may be identical but the calling 834 sequence declares idx as const so it cannot be the same as idxout. 835 836 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, 837 `ISLocalToGlobalMappingDestroy()` 838 @*/ 839 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[]) { 840 PetscFunctionBegin; 841 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 842 if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping)); 843 PetscUseTypeMethod(mapping, globaltolocalmappingapply, type, n, idx, nout, idxout); 844 PetscFunctionReturn(0); 845 } 846 847 /*@ 848 ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering 849 a new index set using the local numbering defined in an ISLocalToGlobalMapping 850 context. 851 852 Not collective 853 854 Input Parameters: 855 + mapping - mapping between local and global numbering 856 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 857 IS_GTOLM_DROP - drops the indices with no local value from the output list 858 - is - index set in global numbering 859 860 Output Parameters: 861 . newis - index set in local numbering 862 863 Notes: 864 The output IS will be sequential, as it encodes a purely local operation 865 866 Level: advanced 867 868 .seealso: `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 869 `ISLocalToGlobalMappingDestroy()` 870 @*/ 871 PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, IS is, IS *newis) { 872 PetscInt n, nout, *idxout; 873 const PetscInt *idxin; 874 875 PetscFunctionBegin; 876 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 877 PetscValidHeaderSpecific(is, IS_CLASSID, 3); 878 PetscValidPointer(newis, 4); 879 880 PetscCall(ISGetLocalSize(is, &n)); 881 PetscCall(ISGetIndices(is, &idxin)); 882 if (type == IS_GTOLM_MASK) { 883 PetscCall(PetscMalloc1(n, &idxout)); 884 } else { 885 PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, NULL)); 886 PetscCall(PetscMalloc1(nout, &idxout)); 887 } 888 PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, idxout)); 889 PetscCall(ISRestoreIndices(is, &idxin)); 890 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nout, idxout, PETSC_OWN_POINTER, newis)); 891 PetscFunctionReturn(0); 892 } 893 894 /*@ 895 ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers 896 specified with a block global numbering. 897 898 Not collective 899 900 Input Parameters: 901 + mapping - mapping between local and global numbering 902 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 903 IS_GTOLM_DROP - drops the indices with no local value from the output list 904 . n - number of global indices to map 905 - idx - global indices to map 906 907 Output Parameters: 908 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 909 - idxout - local index of each global index, one must pass in an array long enough 910 to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with 911 idxout == NULL to determine the required length (returned in nout) 912 and then allocate the required space and call ISGlobalToLocalMappingApplyBlock() 913 a second time to set the values. 914 915 Notes: 916 Either nout or idxout may be NULL. idx and idxout may be identical. 917 918 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 919 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 920 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 921 922 Level: advanced 923 924 Developer Note: The manual page states that idx and idxout may be identical but the calling 925 sequence declares idx as const so it cannot be the same as idxout. 926 927 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 928 `ISLocalToGlobalMappingDestroy()` 929 @*/ 930 PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[]) { 931 PetscFunctionBegin; 932 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 933 if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping)); 934 PetscUseTypeMethod(mapping, globaltolocalmappingapplyblock, type, n, idx, nout, idxout); 935 PetscFunctionReturn(0); 936 } 937 938 /*@C 939 ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and 940 each index shared by more than one processor 941 942 Collective on ISLocalToGlobalMapping 943 944 Input Parameter: 945 . mapping - the mapping from local to global indexing 946 947 Output Parameters: 948 + nproc - number of processors that are connected to this one 949 . proc - neighboring processors 950 . numproc - number of indices for each subdomain (processor) 951 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 952 953 Level: advanced 954 955 Fortran Usage: 956 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 957 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 958 PetscInt indices[nproc][numprocmax],ierr) 959 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 960 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 961 962 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 963 `ISLocalToGlobalMappingRestoreInfo()` 964 @*/ 965 PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) { 966 PetscFunctionBegin; 967 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 968 if (mapping->info_cached) { 969 *nproc = mapping->info_nproc; 970 *procs = mapping->info_procs; 971 *numprocs = mapping->info_numprocs; 972 *indices = mapping->info_indices; 973 } else { 974 PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping, nproc, procs, numprocs, indices)); 975 } 976 PetscFunctionReturn(0); 977 } 978 979 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) { 980 PetscMPIInt size, rank, tag1, tag2, tag3, *len, *source, imdex; 981 PetscInt i, n = mapping->n, Ng, ng, max = 0, *lindices = mapping->indices; 982 PetscInt *nprocs, *owner, nsends, *sends, j, *starts, nmax, nrecvs, *recvs, proc; 983 PetscInt cnt, scale, *ownedsenders, *nownedsenders, rstart; 984 PetscInt node, nownedm, nt, *sends2, nsends2, *starts2, *lens2, *dest, nrecvs2, *starts3, *recvs2, k, *bprocs, *tmp; 985 PetscInt first_procs, first_numprocs, *first_indices; 986 MPI_Request *recv_waits, *send_waits; 987 MPI_Status recv_status, *send_status, *recv_statuses; 988 MPI_Comm comm; 989 PetscBool debug = PETSC_FALSE; 990 991 PetscFunctionBegin; 992 PetscCall(PetscObjectGetComm((PetscObject)mapping, &comm)); 993 PetscCallMPI(MPI_Comm_size(comm, &size)); 994 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 995 if (size == 1) { 996 *nproc = 0; 997 *procs = NULL; 998 PetscCall(PetscNew(numprocs)); 999 (*numprocs)[0] = 0; 1000 PetscCall(PetscNew(indices)); 1001 (*indices)[0] = NULL; 1002 /* save info for reuse */ 1003 mapping->info_nproc = *nproc; 1004 mapping->info_procs = *procs; 1005 mapping->info_numprocs = *numprocs; 1006 mapping->info_indices = *indices; 1007 mapping->info_cached = PETSC_TRUE; 1008 PetscFunctionReturn(0); 1009 } 1010 1011 PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options, NULL, "-islocaltoglobalmappinggetinfo_debug", &debug, NULL)); 1012 1013 /* 1014 Notes on ISLocalToGlobalMappingGetBlockInfo 1015 1016 globally owned node - the nodes that have been assigned to this processor in global 1017 numbering, just for this routine. 1018 1019 nontrivial globally owned node - node assigned to this processor that is on a subdomain 1020 boundary (i.e. is has more than one local owner) 1021 1022 locally owned node - node that exists on this processors subdomain 1023 1024 nontrivial locally owned node - node that is not in the interior (i.e. has more than one 1025 local subdomain 1026 */ 1027 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag1)); 1028 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag2)); 1029 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag3)); 1030 1031 for (i = 0; i < n; i++) { 1032 if (lindices[i] > max) max = lindices[i]; 1033 } 1034 PetscCall(MPIU_Allreduce(&max, &Ng, 1, MPIU_INT, MPI_MAX, comm)); 1035 Ng++; 1036 PetscCallMPI(MPI_Comm_size(comm, &size)); 1037 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1038 scale = Ng / size + 1; 1039 ng = scale; 1040 if (rank == size - 1) ng = Ng - scale * (size - 1); 1041 ng = PetscMax(1, ng); 1042 rstart = scale * rank; 1043 1044 /* determine ownership ranges of global indices */ 1045 PetscCall(PetscMalloc1(2 * size, &nprocs)); 1046 PetscCall(PetscArrayzero(nprocs, 2 * size)); 1047 1048 /* determine owners of each local node */ 1049 PetscCall(PetscMalloc1(n, &owner)); 1050 for (i = 0; i < n; i++) { 1051 proc = lindices[i] / scale; /* processor that globally owns this index */ 1052 nprocs[2 * proc + 1] = 1; /* processor globally owns at least one of ours */ 1053 owner[i] = proc; 1054 nprocs[2 * proc]++; /* count of how many that processor globally owns of ours */ 1055 } 1056 nsends = 0; 1057 for (i = 0; i < size; i++) nsends += nprocs[2 * i + 1]; 1058 PetscCall(PetscInfo(mapping, "Number of global owners for my local data %" PetscInt_FMT "\n", nsends)); 1059 1060 /* inform other processors of number of messages and max length*/ 1061 PetscCall(PetscMaxSum(comm, nprocs, &nmax, &nrecvs)); 1062 PetscCall(PetscInfo(mapping, "Number of local owners for my global data %" PetscInt_FMT "\n", nrecvs)); 1063 1064 /* post receives for owned rows */ 1065 PetscCall(PetscMalloc1((2 * nrecvs + 1) * (nmax + 1), &recvs)); 1066 PetscCall(PetscMalloc1(nrecvs + 1, &recv_waits)); 1067 for (i = 0; i < nrecvs; i++) PetscCallMPI(MPI_Irecv(recvs + 2 * nmax * i, 2 * nmax, MPIU_INT, MPI_ANY_SOURCE, tag1, comm, recv_waits + i)); 1068 1069 /* pack messages containing lists of local nodes to owners */ 1070 PetscCall(PetscMalloc1(2 * n + 1, &sends)); 1071 PetscCall(PetscMalloc1(size + 1, &starts)); 1072 starts[0] = 0; 1073 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2]; 1074 for (i = 0; i < n; i++) { 1075 sends[starts[owner[i]]++] = lindices[i]; 1076 sends[starts[owner[i]]++] = i; 1077 } 1078 PetscCall(PetscFree(owner)); 1079 starts[0] = 0; 1080 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2]; 1081 1082 /* send the messages */ 1083 PetscCall(PetscMalloc1(nsends + 1, &send_waits)); 1084 PetscCall(PetscMalloc1(nsends + 1, &dest)); 1085 cnt = 0; 1086 for (i = 0; i < size; i++) { 1087 if (nprocs[2 * i]) { 1088 PetscCallMPI(MPI_Isend(sends + starts[i], 2 * nprocs[2 * i], MPIU_INT, i, tag1, comm, send_waits + cnt)); 1089 dest[cnt] = i; 1090 cnt++; 1091 } 1092 } 1093 PetscCall(PetscFree(starts)); 1094 1095 /* wait on receives */ 1096 PetscCall(PetscMalloc1(nrecvs + 1, &source)); 1097 PetscCall(PetscMalloc1(nrecvs + 1, &len)); 1098 cnt = nrecvs; 1099 PetscCall(PetscCalloc1(ng + 1, &nownedsenders)); 1100 while (cnt) { 1101 PetscCallMPI(MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status)); 1102 /* unpack receives into our local space */ 1103 PetscCallMPI(MPI_Get_count(&recv_status, MPIU_INT, &len[imdex])); 1104 source[imdex] = recv_status.MPI_SOURCE; 1105 len[imdex] = len[imdex] / 2; 1106 /* count how many local owners for each of my global owned indices */ 1107 for (i = 0; i < len[imdex]; i++) nownedsenders[recvs[2 * imdex * nmax + 2 * i] - rstart]++; 1108 cnt--; 1109 } 1110 PetscCall(PetscFree(recv_waits)); 1111 1112 /* count how many globally owned indices are on an edge multiplied by how many processors own them. */ 1113 nownedm = 0; 1114 for (i = 0; i < ng; i++) { 1115 if (nownedsenders[i] > 1) nownedm += nownedsenders[i]; 1116 } 1117 1118 /* create single array to contain rank of all local owners of each globally owned index */ 1119 PetscCall(PetscMalloc1(nownedm + 1, &ownedsenders)); 1120 PetscCall(PetscMalloc1(ng + 1, &starts)); 1121 starts[0] = 0; 1122 for (i = 1; i < ng; i++) { 1123 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1124 else starts[i] = starts[i - 1]; 1125 } 1126 1127 /* for each nontrivial globally owned node list all arriving processors */ 1128 for (i = 0; i < nrecvs; i++) { 1129 for (j = 0; j < len[i]; j++) { 1130 node = recvs[2 * i * nmax + 2 * j] - rstart; 1131 if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i]; 1132 } 1133 } 1134 1135 if (debug) { /* ----------------------------------- */ 1136 starts[0] = 0; 1137 for (i = 1; i < ng; i++) { 1138 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1139 else starts[i] = starts[i - 1]; 1140 } 1141 for (i = 0; i < ng; i++) { 1142 if (nownedsenders[i] > 1) { 1143 PetscCall(PetscSynchronizedPrintf(comm, "[%d] global node %" PetscInt_FMT " local owner processors: ", rank, i + rstart)); 1144 for (j = 0; j < nownedsenders[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", ownedsenders[starts[i] + j])); 1145 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1146 } 1147 } 1148 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1149 } /* ----------------------------------- */ 1150 1151 /* wait on original sends */ 1152 if (nsends) { 1153 PetscCall(PetscMalloc1(nsends, &send_status)); 1154 PetscCallMPI(MPI_Waitall(nsends, send_waits, send_status)); 1155 PetscCall(PetscFree(send_status)); 1156 } 1157 PetscCall(PetscFree(send_waits)); 1158 PetscCall(PetscFree(sends)); 1159 PetscCall(PetscFree(nprocs)); 1160 1161 /* pack messages to send back to local owners */ 1162 starts[0] = 0; 1163 for (i = 1; i < ng; i++) { 1164 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1165 else starts[i] = starts[i - 1]; 1166 } 1167 nsends2 = nrecvs; 1168 PetscCall(PetscMalloc1(nsends2 + 1, &nprocs)); /* length of each message */ 1169 for (i = 0; i < nrecvs; i++) { 1170 nprocs[i] = 1; 1171 for (j = 0; j < len[i]; j++) { 1172 node = recvs[2 * i * nmax + 2 * j] - rstart; 1173 if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node]; 1174 } 1175 } 1176 nt = 0; 1177 for (i = 0; i < nsends2; i++) nt += nprocs[i]; 1178 1179 PetscCall(PetscMalloc1(nt + 1, &sends2)); 1180 PetscCall(PetscMalloc1(nsends2 + 1, &starts2)); 1181 1182 starts2[0] = 0; 1183 for (i = 1; i < nsends2; i++) starts2[i] = starts2[i - 1] + nprocs[i - 1]; 1184 /* 1185 Each message is 1 + nprocs[i] long, and consists of 1186 (0) the number of nodes being sent back 1187 (1) the local node number, 1188 (2) the number of processors sharing it, 1189 (3) the processors sharing it 1190 */ 1191 for (i = 0; i < nsends2; i++) { 1192 cnt = 1; 1193 sends2[starts2[i]] = 0; 1194 for (j = 0; j < len[i]; j++) { 1195 node = recvs[2 * i * nmax + 2 * j] - rstart; 1196 if (nownedsenders[node] > 1) { 1197 sends2[starts2[i]]++; 1198 sends2[starts2[i] + cnt++] = recvs[2 * i * nmax + 2 * j + 1]; 1199 sends2[starts2[i] + cnt++] = nownedsenders[node]; 1200 PetscCall(PetscArraycpy(&sends2[starts2[i] + cnt], &ownedsenders[starts[node]], nownedsenders[node])); 1201 cnt += nownedsenders[node]; 1202 } 1203 } 1204 } 1205 1206 /* receive the message lengths */ 1207 nrecvs2 = nsends; 1208 PetscCall(PetscMalloc1(nrecvs2 + 1, &lens2)); 1209 PetscCall(PetscMalloc1(nrecvs2 + 1, &starts3)); 1210 PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits)); 1211 for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(&lens2[i], 1, MPIU_INT, dest[i], tag2, comm, recv_waits + i)); 1212 1213 /* send the message lengths */ 1214 for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Send(&nprocs[i], 1, MPIU_INT, source[i], tag2, comm)); 1215 1216 /* wait on receives of lens */ 1217 if (nrecvs2) { 1218 PetscCall(PetscMalloc1(nrecvs2, &recv_statuses)); 1219 PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses)); 1220 PetscCall(PetscFree(recv_statuses)); 1221 } 1222 PetscCall(PetscFree(recv_waits)); 1223 1224 starts3[0] = 0; 1225 nt = 0; 1226 for (i = 0; i < nrecvs2 - 1; i++) { 1227 starts3[i + 1] = starts3[i] + lens2[i]; 1228 nt += lens2[i]; 1229 } 1230 if (nrecvs2) nt += lens2[nrecvs2 - 1]; 1231 1232 PetscCall(PetscMalloc1(nt + 1, &recvs2)); 1233 PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits)); 1234 for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(recvs2 + starts3[i], lens2[i], MPIU_INT, dest[i], tag3, comm, recv_waits + i)); 1235 1236 /* send the messages */ 1237 PetscCall(PetscMalloc1(nsends2 + 1, &send_waits)); 1238 for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Isend(sends2 + starts2[i], nprocs[i], MPIU_INT, source[i], tag3, comm, send_waits + i)); 1239 1240 /* wait on receives */ 1241 if (nrecvs2) { 1242 PetscCall(PetscMalloc1(nrecvs2, &recv_statuses)); 1243 PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses)); 1244 PetscCall(PetscFree(recv_statuses)); 1245 } 1246 PetscCall(PetscFree(recv_waits)); 1247 PetscCall(PetscFree(nprocs)); 1248 1249 if (debug) { /* ----------------------------------- */ 1250 cnt = 0; 1251 for (i = 0; i < nrecvs2; i++) { 1252 nt = recvs2[cnt++]; 1253 for (j = 0; j < nt; j++) { 1254 PetscCall(PetscSynchronizedPrintf(comm, "[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ", rank, recvs2[cnt], recvs2[cnt + 1])); 1255 for (k = 0; k < recvs2[cnt + 1]; k++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", recvs2[cnt + 2 + k])); 1256 cnt += 2 + recvs2[cnt + 1]; 1257 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1258 } 1259 } 1260 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1261 } /* ----------------------------------- */ 1262 1263 /* count number subdomains for each local node */ 1264 PetscCall(PetscCalloc1(size, &nprocs)); 1265 cnt = 0; 1266 for (i = 0; i < nrecvs2; i++) { 1267 nt = recvs2[cnt++]; 1268 for (j = 0; j < nt; j++) { 1269 for (k = 0; k < recvs2[cnt + 1]; k++) nprocs[recvs2[cnt + 2 + k]]++; 1270 cnt += 2 + recvs2[cnt + 1]; 1271 } 1272 } 1273 nt = 0; 1274 for (i = 0; i < size; i++) nt += (nprocs[i] > 0); 1275 *nproc = nt; 1276 PetscCall(PetscMalloc1(nt + 1, procs)); 1277 PetscCall(PetscMalloc1(nt + 1, numprocs)); 1278 PetscCall(PetscMalloc1(nt + 1, indices)); 1279 for (i = 0; i < nt + 1; i++) (*indices)[i] = NULL; 1280 PetscCall(PetscMalloc1(size, &bprocs)); 1281 cnt = 0; 1282 for (i = 0; i < size; i++) { 1283 if (nprocs[i] > 0) { 1284 bprocs[i] = cnt; 1285 (*procs)[cnt] = i; 1286 (*numprocs)[cnt] = nprocs[i]; 1287 PetscCall(PetscMalloc1(nprocs[i], &(*indices)[cnt])); 1288 cnt++; 1289 } 1290 } 1291 1292 /* make the list of subdomains for each nontrivial local node */ 1293 PetscCall(PetscArrayzero(*numprocs, nt)); 1294 cnt = 0; 1295 for (i = 0; i < nrecvs2; i++) { 1296 nt = recvs2[cnt++]; 1297 for (j = 0; j < nt; j++) { 1298 for (k = 0; k < recvs2[cnt + 1]; k++) (*indices)[bprocs[recvs2[cnt + 2 + k]]][(*numprocs)[bprocs[recvs2[cnt + 2 + k]]]++] = recvs2[cnt]; 1299 cnt += 2 + recvs2[cnt + 1]; 1300 } 1301 } 1302 PetscCall(PetscFree(bprocs)); 1303 PetscCall(PetscFree(recvs2)); 1304 1305 /* sort the node indexing by their global numbers */ 1306 nt = *nproc; 1307 for (i = 0; i < nt; i++) { 1308 PetscCall(PetscMalloc1((*numprocs)[i], &tmp)); 1309 for (j = 0; j < (*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]]; 1310 PetscCall(PetscSortIntWithArray((*numprocs)[i], tmp, (*indices)[i])); 1311 PetscCall(PetscFree(tmp)); 1312 } 1313 1314 if (debug) { /* ----------------------------------- */ 1315 nt = *nproc; 1316 for (i = 0; i < nt; i++) { 1317 PetscCall(PetscSynchronizedPrintf(comm, "[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ", rank, (*procs)[i], (*numprocs)[i])); 1318 for (j = 0; j < (*numprocs)[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", (*indices)[i][j])); 1319 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1320 } 1321 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1322 } /* ----------------------------------- */ 1323 1324 /* wait on sends */ 1325 if (nsends2) { 1326 PetscCall(PetscMalloc1(nsends2, &send_status)); 1327 PetscCallMPI(MPI_Waitall(nsends2, send_waits, send_status)); 1328 PetscCall(PetscFree(send_status)); 1329 } 1330 1331 PetscCall(PetscFree(starts3)); 1332 PetscCall(PetscFree(dest)); 1333 PetscCall(PetscFree(send_waits)); 1334 1335 PetscCall(PetscFree(nownedsenders)); 1336 PetscCall(PetscFree(ownedsenders)); 1337 PetscCall(PetscFree(starts)); 1338 PetscCall(PetscFree(starts2)); 1339 PetscCall(PetscFree(lens2)); 1340 1341 PetscCall(PetscFree(source)); 1342 PetscCall(PetscFree(len)); 1343 PetscCall(PetscFree(recvs)); 1344 PetscCall(PetscFree(nprocs)); 1345 PetscCall(PetscFree(sends2)); 1346 1347 /* put the information about myself as the first entry in the list */ 1348 first_procs = (*procs)[0]; 1349 first_numprocs = (*numprocs)[0]; 1350 first_indices = (*indices)[0]; 1351 for (i = 0; i < *nproc; i++) { 1352 if ((*procs)[i] == rank) { 1353 (*procs)[0] = (*procs)[i]; 1354 (*numprocs)[0] = (*numprocs)[i]; 1355 (*indices)[0] = (*indices)[i]; 1356 (*procs)[i] = first_procs; 1357 (*numprocs)[i] = first_numprocs; 1358 (*indices)[i] = first_indices; 1359 break; 1360 } 1361 } 1362 1363 /* save info for reuse */ 1364 mapping->info_nproc = *nproc; 1365 mapping->info_procs = *procs; 1366 mapping->info_numprocs = *numprocs; 1367 mapping->info_indices = *indices; 1368 mapping->info_cached = PETSC_TRUE; 1369 PetscFunctionReturn(0); 1370 } 1371 1372 /*@C 1373 ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo() 1374 1375 Collective on ISLocalToGlobalMapping 1376 1377 Input Parameter: 1378 . mapping - the mapping from local to global indexing 1379 1380 Output Parameters: 1381 + nproc - number of processors that are connected to this one 1382 . proc - neighboring processors 1383 . numproc - number of indices for each processor 1384 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1385 1386 Level: advanced 1387 1388 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1389 `ISLocalToGlobalMappingGetInfo()` 1390 @*/ 1391 PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) { 1392 PetscFunctionBegin; 1393 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1394 if (mapping->info_free) { 1395 PetscCall(PetscFree(*numprocs)); 1396 if (*indices) { 1397 PetscInt i; 1398 1399 PetscCall(PetscFree((*indices)[0])); 1400 for (i = 1; i < *nproc; i++) PetscCall(PetscFree((*indices)[i])); 1401 PetscCall(PetscFree(*indices)); 1402 } 1403 } 1404 *nproc = 0; 1405 *procs = NULL; 1406 *numprocs = NULL; 1407 *indices = NULL; 1408 PetscFunctionReturn(0); 1409 } 1410 1411 /*@C 1412 ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and 1413 each index shared by more than one processor 1414 1415 Collective on ISLocalToGlobalMapping 1416 1417 Input Parameter: 1418 . mapping - the mapping from local to global indexing 1419 1420 Output Parameters: 1421 + nproc - number of processors that are connected to this one 1422 . proc - neighboring processors 1423 . numproc - number of indices for each subdomain (processor) 1424 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 1425 1426 Level: advanced 1427 1428 Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed. 1429 1430 Fortran Usage: 1431 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1432 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1433 PetscInt indices[nproc][numprocmax],ierr) 1434 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 1435 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 1436 1437 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1438 `ISLocalToGlobalMappingRestoreInfo()` 1439 @*/ 1440 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) { 1441 PetscInt **bindices = NULL, *bnumprocs = NULL, bs, i, j, k; 1442 1443 PetscFunctionBegin; 1444 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1445 bs = mapping->bs; 1446 PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping, nproc, procs, &bnumprocs, &bindices)); 1447 if (bs > 1) { /* we need to expand the cached info */ 1448 PetscCall(PetscCalloc1(*nproc, &*indices)); 1449 PetscCall(PetscCalloc1(*nproc, &*numprocs)); 1450 for (i = 0; i < *nproc; i++) { 1451 PetscCall(PetscMalloc1(bs * bnumprocs[i], &(*indices)[i])); 1452 for (j = 0; j < bnumprocs[i]; j++) { 1453 for (k = 0; k < bs; k++) (*indices)[i][j * bs + k] = bs * bindices[i][j] + k; 1454 } 1455 (*numprocs)[i] = bnumprocs[i] * bs; 1456 } 1457 mapping->info_free = PETSC_TRUE; 1458 } else { 1459 *numprocs = bnumprocs; 1460 *indices = bindices; 1461 } 1462 PetscFunctionReturn(0); 1463 } 1464 1465 /*@C 1466 ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo() 1467 1468 Collective on ISLocalToGlobalMapping 1469 1470 Input Parameter: 1471 . mapping - the mapping from local to global indexing 1472 1473 Output Parameters: 1474 + nproc - number of processors that are connected to this one 1475 . proc - neighboring processors 1476 . numproc - number of indices for each processor 1477 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1478 1479 Level: advanced 1480 1481 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1482 `ISLocalToGlobalMappingGetInfo()` 1483 @*/ 1484 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) { 1485 PetscFunctionBegin; 1486 PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping, nproc, procs, numprocs, indices)); 1487 PetscFunctionReturn(0); 1488 } 1489 1490 /*@C 1491 ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node 1492 1493 Collective on ISLocalToGlobalMapping 1494 1495 Input Parameter: 1496 . mapping - the mapping from local to global indexing 1497 1498 Output Parameters: 1499 + nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize()) 1500 . count - number of neighboring processors per node 1501 - indices - indices of processes sharing the node (sorted) 1502 1503 Level: advanced 1504 1505 Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed. 1506 1507 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1508 `ISLocalToGlobalMappingGetInfo()`, `ISLocalToGlobalMappingRestoreNodeInfo()` 1509 @*/ 1510 PetscErrorCode ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[]) { 1511 PetscInt n; 1512 1513 PetscFunctionBegin; 1514 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1515 PetscCall(ISLocalToGlobalMappingGetSize(mapping, &n)); 1516 if (!mapping->info_nodec) { 1517 PetscInt i, m, n_neigh, *neigh, *n_shared, **shared; 1518 1519 PetscCall(PetscMalloc2(n + 1, &mapping->info_nodec, n, &mapping->info_nodei)); 1520 PetscCall(ISLocalToGlobalMappingGetInfo(mapping, &n_neigh, &neigh, &n_shared, &shared)); 1521 for (i = 0; i < n; i++) mapping->info_nodec[i] = 1; 1522 m = n; 1523 mapping->info_nodec[n] = 0; 1524 for (i = 1; i < n_neigh; i++) { 1525 PetscInt j; 1526 1527 m += n_shared[i]; 1528 for (j = 0; j < n_shared[i]; j++) mapping->info_nodec[shared[i][j]] += 1; 1529 } 1530 if (n) PetscCall(PetscMalloc1(m, &mapping->info_nodei[0])); 1531 for (i = 1; i < n; i++) mapping->info_nodei[i] = mapping->info_nodei[i - 1] + mapping->info_nodec[i - 1]; 1532 PetscCall(PetscArrayzero(mapping->info_nodec, n)); 1533 for (i = 0; i < n; i++) { 1534 mapping->info_nodec[i] = 1; 1535 mapping->info_nodei[i][0] = neigh[0]; 1536 } 1537 for (i = 1; i < n_neigh; i++) { 1538 PetscInt j; 1539 1540 for (j = 0; j < n_shared[i]; j++) { 1541 PetscInt k = shared[i][j]; 1542 1543 mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i]; 1544 mapping->info_nodec[k] += 1; 1545 } 1546 } 1547 for (i = 0; i < n; i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i], mapping->info_nodei[i])); 1548 PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping, &n_neigh, &neigh, &n_shared, &shared)); 1549 } 1550 if (nnodes) *nnodes = n; 1551 if (count) *count = mapping->info_nodec; 1552 if (indices) *indices = mapping->info_nodei; 1553 PetscFunctionReturn(0); 1554 } 1555 1556 /*@C 1557 ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo() 1558 1559 Collective on ISLocalToGlobalMapping 1560 1561 Input Parameter: 1562 . mapping - the mapping from local to global indexing 1563 1564 Output Parameters: 1565 + nnodes - number of local nodes 1566 . count - number of neighboring processors per node 1567 - indices - indices of processes sharing the node (sorted) 1568 1569 Level: advanced 1570 1571 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1572 `ISLocalToGlobalMappingGetInfo()` 1573 @*/ 1574 PetscErrorCode ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[]) { 1575 PetscFunctionBegin; 1576 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1577 if (nnodes) *nnodes = 0; 1578 if (count) *count = NULL; 1579 if (indices) *indices = NULL; 1580 PetscFunctionReturn(0); 1581 } 1582 1583 /*@C 1584 ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped 1585 1586 Not Collective 1587 1588 Input Parameter: 1589 . ltog - local to global mapping 1590 1591 Output Parameter: 1592 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize() 1593 1594 Level: advanced 1595 1596 Notes: 1597 ISLocalToGlobalMappingGetSize() returns the length the this array 1598 1599 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()` 1600 @*/ 1601 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) { 1602 PetscFunctionBegin; 1603 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1604 PetscValidPointer(array, 2); 1605 if (ltog->bs == 1) { 1606 *array = ltog->indices; 1607 } else { 1608 PetscInt *jj, k, i, j, n = ltog->n, bs = ltog->bs; 1609 const PetscInt *ii; 1610 1611 PetscCall(PetscMalloc1(bs * n, &jj)); 1612 *array = jj; 1613 k = 0; 1614 ii = ltog->indices; 1615 for (i = 0; i < n; i++) 1616 for (j = 0; j < bs; j++) jj[k++] = bs * ii[i] + j; 1617 } 1618 PetscFunctionReturn(0); 1619 } 1620 1621 /*@C 1622 ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices() 1623 1624 Not Collective 1625 1626 Input Parameters: 1627 + ltog - local to global mapping 1628 - array - array of indices 1629 1630 Level: advanced 1631 1632 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` 1633 @*/ 1634 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) { 1635 PetscFunctionBegin; 1636 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1637 PetscValidPointer(array, 2); 1638 PetscCheck(ltog->bs != 1 || *array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer"); 1639 1640 if (ltog->bs > 1) PetscCall(PetscFree(*(void **)array)); 1641 PetscFunctionReturn(0); 1642 } 1643 1644 /*@C 1645 ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block 1646 1647 Not Collective 1648 1649 Input Parameter: 1650 . ltog - local to global mapping 1651 1652 Output Parameter: 1653 . array - array of indices 1654 1655 Level: advanced 1656 1657 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()` 1658 @*/ 1659 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) { 1660 PetscFunctionBegin; 1661 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1662 PetscValidPointer(array, 2); 1663 *array = ltog->indices; 1664 PetscFunctionReturn(0); 1665 } 1666 1667 /*@C 1668 ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices() 1669 1670 Not Collective 1671 1672 Input Parameters: 1673 + ltog - local to global mapping 1674 - array - array of indices 1675 1676 Level: advanced 1677 1678 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` 1679 @*/ 1680 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) { 1681 PetscFunctionBegin; 1682 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1683 PetscValidPointer(array, 2); 1684 PetscCheck(*array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer"); 1685 *array = NULL; 1686 PetscFunctionReturn(0); 1687 } 1688 1689 /*@C 1690 ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings 1691 1692 Not Collective 1693 1694 Input Parameters: 1695 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate 1696 . n - number of mappings to concatenate 1697 - ltogs - local to global mappings 1698 1699 Output Parameter: 1700 . ltogcat - new mapping 1701 1702 Note: this currently always returns a mapping with block size of 1 1703 1704 Developer Note: If all the input mapping have the same block size we could easily handle that as a special case 1705 1706 Level: advanced 1707 1708 .seealso: `ISLocalToGlobalMappingCreate()` 1709 @*/ 1710 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm, PetscInt n, const ISLocalToGlobalMapping ltogs[], ISLocalToGlobalMapping *ltogcat) { 1711 PetscInt i, cnt, m, *idx; 1712 1713 PetscFunctionBegin; 1714 PetscCheck(n >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "Must have a non-negative number of mappings, given %" PetscInt_FMT, n); 1715 if (n > 0) PetscValidPointer(ltogs, 3); 1716 for (i = 0; i < n; i++) PetscValidHeaderSpecific(ltogs[i], IS_LTOGM_CLASSID, 3); 1717 PetscValidPointer(ltogcat, 4); 1718 for (cnt = 0, i = 0; i < n; i++) { 1719 PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m)); 1720 cnt += m; 1721 } 1722 PetscCall(PetscMalloc1(cnt, &idx)); 1723 for (cnt = 0, i = 0; i < n; i++) { 1724 const PetscInt *subidx; 1725 PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m)); 1726 PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i], &subidx)); 1727 PetscCall(PetscArraycpy(&idx[cnt], subidx, m)); 1728 PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i], &subidx)); 1729 cnt += m; 1730 } 1731 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, cnt, idx, PETSC_OWN_POINTER, ltogcat)); 1732 PetscFunctionReturn(0); 1733 } 1734 1735 /*MC 1736 ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1737 used this is good for only small and moderate size problems. 1738 1739 Options Database Keys: 1740 . -islocaltoglobalmapping_type basic - select this method 1741 1742 Level: beginner 1743 1744 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH` 1745 M*/ 1746 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog) { 1747 PetscFunctionBegin; 1748 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Basic; 1749 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Basic; 1750 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic; 1751 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Basic; 1752 PetscFunctionReturn(0); 1753 } 1754 1755 /*MC 1756 ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1757 used this is good for large memory problems. 1758 1759 Options Database Keys: 1760 . -islocaltoglobalmapping_type hash - select this method 1761 1762 Notes: 1763 This is selected automatically for large problems if the user does not set the type. 1764 1765 Level: beginner 1766 1767 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH` 1768 M*/ 1769 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog) { 1770 PetscFunctionBegin; 1771 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Hash; 1772 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Hash; 1773 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash; 1774 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Hash; 1775 PetscFunctionReturn(0); 1776 } 1777 1778 /*@C 1779 ISLocalToGlobalMappingRegister - Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping 1780 1781 Not Collective 1782 1783 Input Parameters: 1784 + sname - name of a new method 1785 - routine_create - routine to create method context 1786 1787 Notes: 1788 ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings. 1789 1790 Sample usage: 1791 .vb 1792 ISLocalToGlobalMappingRegister("my_mapper",MyCreate); 1793 .ve 1794 1795 Then, your mapping can be chosen with the procedural interface via 1796 $ ISLocalToGlobalMappingSetType(ltog,"my_mapper") 1797 or at runtime via the option 1798 $ -islocaltoglobalmapping_type my_mapper 1799 1800 Level: advanced 1801 1802 .seealso: `ISLocalToGlobalMappingRegisterAll()`, `ISLocalToGlobalMappingRegisterDestroy()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH` 1803 1804 @*/ 1805 PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[], PetscErrorCode (*function)(ISLocalToGlobalMapping)) { 1806 PetscFunctionBegin; 1807 PetscCall(ISInitializePackage()); 1808 PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList, sname, function)); 1809 PetscFunctionReturn(0); 1810 } 1811 1812 /*@C 1813 ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach. 1814 1815 Logically Collective on ISLocalToGlobalMapping 1816 1817 Input Parameters: 1818 + ltog - the ISLocalToGlobalMapping object 1819 - type - a known method 1820 1821 Options Database Key: 1822 . -islocaltoglobalmapping_type <method> - Sets the method; use -help for a list 1823 of available methods (for instance, basic or hash) 1824 1825 Notes: 1826 See "petsc/include/petscis.h" for available methods 1827 1828 Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and 1829 then set the ISLocalToGlobalMapping type from the options database rather than by using 1830 this routine. 1831 1832 Level: intermediate 1833 1834 Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they 1835 are accessed by ISLocalToGlobalMappingSetType(). 1836 1837 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingGetType()` 1838 @*/ 1839 PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type) { 1840 PetscBool match; 1841 PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL; 1842 1843 PetscFunctionBegin; 1844 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1845 if (type) PetscValidCharPointer(type, 2); 1846 1847 PetscCall(PetscObjectTypeCompare((PetscObject)ltog, type, &match)); 1848 if (match) PetscFunctionReturn(0); 1849 1850 /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */ 1851 if (type) { 1852 PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList, type, &r)); 1853 PetscCheck(r, PetscObjectComm((PetscObject)ltog), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unable to find requested ISLocalToGlobalMapping type %s", type); 1854 } 1855 /* Destroy the previous private LTOG context */ 1856 PetscTryTypeMethod(ltog, destroy); 1857 ltog->ops->destroy = NULL; 1858 1859 PetscCall(PetscObjectChangeTypeName((PetscObject)ltog, type)); 1860 if (r) PetscCall((*r)(ltog)); 1861 PetscFunctionReturn(0); 1862 } 1863 1864 /*@C 1865 ISLocalToGlobalMappingGetType - Get the type of the l2g map 1866 1867 Not Collective 1868 1869 Input Parameter: 1870 . ltog - the ISLocalToGlobalMapping object 1871 1872 Output Parameter: 1873 . type - the type 1874 1875 Level: intermediate 1876 1877 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()` 1878 @*/ 1879 PetscErrorCode ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type) { 1880 PetscFunctionBegin; 1881 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1882 PetscValidPointer(type, 2); 1883 *type = ((PetscObject)ltog)->type_name; 1884 PetscFunctionReturn(0); 1885 } 1886 1887 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE; 1888 1889 /*@C 1890 ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package. 1891 1892 Not Collective 1893 1894 Level: advanced 1895 1896 .seealso: `ISRegister()`, `ISLocalToGlobalRegister()` 1897 @*/ 1898 PetscErrorCode ISLocalToGlobalMappingRegisterAll(void) { 1899 PetscFunctionBegin; 1900 if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0); 1901 ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE; 1902 PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic)); 1903 PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash)); 1904 PetscFunctionReturn(0); 1905 } 1906