1 2 #include <petsc/private/isimpl.h> /*I "petscis.h" I*/ 3 #include <petsc/private/hashmapi.h> 4 #include <petscsf.h> 5 #include <petscviewer.h> 6 7 PetscClassId IS_LTOGM_CLASSID; 8 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping, PetscInt *, PetscInt **, PetscInt **, PetscInt ***); 9 10 typedef struct { 11 PetscInt *globals; 12 } ISLocalToGlobalMapping_Basic; 13 14 typedef struct { 15 PetscHMapI globalht; 16 } ISLocalToGlobalMapping_Hash; 17 18 /*@C 19 ISGetPointRange - Returns a description of the points in an IS suitable for traversal 20 21 Not collective 22 23 Input Parameter: 24 . pointIS - The IS object 25 26 Output Parameters: 27 + pStart - The first index, see notes 28 . pEnd - One past the last index, see notes 29 - points - The indices, see notes 30 31 Notes: 32 If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern 33 $ ISGetPointRange(is, &pStart, &pEnd, &points); 34 $ for (p = pStart; p < pEnd; ++p) { 35 $ const PetscInt point = points ? points[p] : p; 36 $ } 37 $ ISRestorePointRange(is, &pstart, &pEnd, &points); 38 39 Level: intermediate 40 41 .seealso: `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` 42 @*/ 43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) { 44 PetscInt numCells, step = 1; 45 PetscBool isStride; 46 47 PetscFunctionBeginHot; 48 *pStart = 0; 49 *points = NULL; 50 PetscCall(ISGetLocalSize(pointIS, &numCells)); 51 PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride)); 52 if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step)); 53 *pEnd = *pStart + numCells; 54 if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points)); 55 PetscFunctionReturn(0); 56 } 57 58 /*@C 59 ISRestorePointRange - Destroys the traversal description 60 61 Not collective 62 63 Input Parameters: 64 + pointIS - The IS object 65 . pStart - The first index, from ISGetPointRange() 66 . pEnd - One past the last index, from ISGetPointRange() 67 - points - The indices, from ISGetPointRange() 68 69 Notes: 70 If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern 71 $ ISGetPointRange(is, &pStart, &pEnd, &points); 72 $ for (p = pStart; p < pEnd; ++p) { 73 $ const PetscInt point = points ? points[p] : p; 74 $ } 75 $ ISRestorePointRange(is, &pstart, &pEnd, &points); 76 77 Level: intermediate 78 79 .seealso: `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` 80 @*/ 81 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) { 82 PetscInt step = 1; 83 PetscBool isStride; 84 85 PetscFunctionBeginHot; 86 PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride)); 87 if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step)); 88 if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points)); 89 PetscFunctionReturn(0); 90 } 91 92 /*@C 93 ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given 94 95 Not collective 96 97 Input Parameters: 98 + subpointIS - The IS object to be configured 99 . pStar t - The first index of the subrange 100 . pEnd - One past the last index for the subrange 101 - points - The indices for the entire range, from ISGetPointRange() 102 103 Output Parameters: 104 . subpointIS - The IS object now configured to be a subrange 105 106 Notes: 107 The input IS will now respond properly to calls to ISGetPointRange() and return the subrange. 108 109 Level: intermediate 110 111 .seealso: `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()` 112 @*/ 113 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points) { 114 PetscFunctionBeginHot; 115 if (points) { 116 PetscCall(ISSetType(subpointIS, ISGENERAL)); 117 PetscCall(ISGeneralSetIndices(subpointIS, pEnd - pStart, &points[pStart], PETSC_USE_POINTER)); 118 } else { 119 PetscCall(ISSetType(subpointIS, ISSTRIDE)); 120 PetscCall(ISStrideSetStride(subpointIS, pEnd - pStart, pStart, 1)); 121 } 122 PetscFunctionReturn(0); 123 } 124 125 /* -----------------------------------------------------------------------------------------*/ 126 127 /* 128 Creates the global mapping information in the ISLocalToGlobalMapping structure 129 130 If the user has not selected how to handle the global to local mapping then use HASH for "large" problems 131 */ 132 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping) { 133 PetscInt i, *idx = mapping->indices, n = mapping->n, end, start; 134 135 PetscFunctionBegin; 136 if (mapping->data) PetscFunctionReturn(0); 137 end = 0; 138 start = PETSC_MAX_INT; 139 140 for (i = 0; i < n; i++) { 141 if (idx[i] < 0) continue; 142 if (idx[i] < start) start = idx[i]; 143 if (idx[i] > end) end = idx[i]; 144 } 145 if (start > end) { 146 start = 0; 147 end = -1; 148 } 149 mapping->globalstart = start; 150 mapping->globalend = end; 151 if (!((PetscObject)mapping)->type_name) { 152 if ((end - start) > PetscMax(4 * n, 1000000)) { 153 PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGHASH)); 154 } else { 155 PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGBASIC)); 156 } 157 } 158 PetscTryTypeMethod(mapping, globaltolocalmappingsetup); 159 PetscFunctionReturn(0); 160 } 161 162 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping) { 163 PetscInt i, *idx = mapping->indices, n = mapping->n, end, start, *globals; 164 ISLocalToGlobalMapping_Basic *map; 165 166 PetscFunctionBegin; 167 start = mapping->globalstart; 168 end = mapping->globalend; 169 PetscCall(PetscNew(&map)); 170 PetscCall(PetscMalloc1(end - start + 2, &globals)); 171 map->globals = globals; 172 for (i = 0; i < end - start + 1; i++) globals[i] = -1; 173 for (i = 0; i < n; i++) { 174 if (idx[i] < 0) continue; 175 globals[idx[i] - start] = i; 176 } 177 mapping->data = (void *)map; 178 PetscCall(PetscLogObjectMemory((PetscObject)mapping, (end - start + 1) * sizeof(PetscInt))); 179 PetscFunctionReturn(0); 180 } 181 182 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping) { 183 PetscInt i, *idx = mapping->indices, n = mapping->n; 184 ISLocalToGlobalMapping_Hash *map; 185 186 PetscFunctionBegin; 187 PetscCall(PetscNew(&map)); 188 PetscCall(PetscHMapICreate(&map->globalht)); 189 for (i = 0; i < n; i++) { 190 if (idx[i] < 0) continue; 191 PetscCall(PetscHMapISet(map->globalht, idx[i], i)); 192 } 193 mapping->data = (void *)map; 194 PetscCall(PetscLogObjectMemory((PetscObject)mapping, 2 * n * sizeof(PetscInt))); 195 PetscFunctionReturn(0); 196 } 197 198 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping) { 199 ISLocalToGlobalMapping_Basic *map = (ISLocalToGlobalMapping_Basic *)mapping->data; 200 201 PetscFunctionBegin; 202 if (!map) PetscFunctionReturn(0); 203 PetscCall(PetscFree(map->globals)); 204 PetscCall(PetscFree(mapping->data)); 205 PetscFunctionReturn(0); 206 } 207 208 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping) { 209 ISLocalToGlobalMapping_Hash *map = (ISLocalToGlobalMapping_Hash *)mapping->data; 210 211 PetscFunctionBegin; 212 if (!map) PetscFunctionReturn(0); 213 PetscCall(PetscHMapIDestroy(&map->globalht)); 214 PetscCall(PetscFree(mapping->data)); 215 PetscFunctionReturn(0); 216 } 217 218 #define GTOLTYPE _Basic 219 #define GTOLNAME _Basic 220 #define GTOLBS mapping->bs 221 #define GTOL(g, local) \ 222 do { \ 223 local = map->globals[g / bs - start]; \ 224 if (local >= 0) local = bs * local + (g % bs); \ 225 } while (0) 226 227 #include <../src/vec/is/utils/isltog.h> 228 229 #define GTOLTYPE _Basic 230 #define GTOLNAME Block_Basic 231 #define GTOLBS 1 232 #define GTOL(g, local) \ 233 do { local = map->globals[g - start]; } while (0) 234 #include <../src/vec/is/utils/isltog.h> 235 236 #define GTOLTYPE _Hash 237 #define GTOLNAME _Hash 238 #define GTOLBS mapping->bs 239 #define GTOL(g, local) \ 240 do { \ 241 (void)PetscHMapIGet(map->globalht, g / bs, &local); \ 242 if (local >= 0) local = bs * local + (g % bs); \ 243 } while (0) 244 #include <../src/vec/is/utils/isltog.h> 245 246 #define GTOLTYPE _Hash 247 #define GTOLNAME Block_Hash 248 #define GTOLBS 1 249 #define GTOL(g, local) \ 250 do { (void)PetscHMapIGet(map->globalht, g, &local); } while (0) 251 #include <../src/vec/is/utils/isltog.h> 252 253 /*@ 254 ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object 255 256 Not Collective 257 258 Input Parameter: 259 . ltog - local to global mapping 260 261 Output Parameter: 262 . nltog - the duplicated local to global mapping 263 264 Level: advanced 265 266 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 267 @*/ 268 PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog, ISLocalToGlobalMapping *nltog) { 269 ISLocalToGlobalMappingType l2gtype; 270 271 PetscFunctionBegin; 272 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 273 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog), ltog->bs, ltog->n, ltog->indices, PETSC_COPY_VALUES, nltog)); 274 PetscCall(ISLocalToGlobalMappingGetType(ltog, &l2gtype)); 275 PetscCall(ISLocalToGlobalMappingSetType(*nltog, l2gtype)); 276 PetscFunctionReturn(0); 277 } 278 279 /*@ 280 ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping 281 282 Not Collective 283 284 Input Parameter: 285 . ltog - local to global mapping 286 287 Output Parameter: 288 . n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length 289 290 Level: advanced 291 292 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 293 @*/ 294 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping, PetscInt *n) { 295 PetscFunctionBegin; 296 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 297 PetscValidIntPointer(n, 2); 298 *n = mapping->bs * mapping->n; 299 PetscFunctionReturn(0); 300 } 301 302 /*@C 303 ISLocalToGlobalMappingViewFromOptions - View from Options 304 305 Collective on ISLocalToGlobalMapping 306 307 Input Parameters: 308 + A - the local to global mapping object 309 . obj - Optional object 310 - name - command line option 311 312 Level: intermediate 313 .seealso: `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()` 314 @*/ 315 PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A, PetscObject obj, const char name[]) { 316 PetscFunctionBegin; 317 PetscValidHeaderSpecific(A, IS_LTOGM_CLASSID, 1); 318 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name)); 319 PetscFunctionReturn(0); 320 } 321 322 /*@C 323 ISLocalToGlobalMappingView - View a local to global mapping 324 325 Not Collective 326 327 Input Parameters: 328 + ltog - local to global mapping 329 - viewer - viewer 330 331 Level: advanced 332 333 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 334 @*/ 335 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping, PetscViewer viewer) { 336 PetscInt i; 337 PetscMPIInt rank; 338 PetscBool iascii; 339 340 PetscFunctionBegin; 341 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 342 if (!viewer) { PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping), &viewer)); } 343 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 344 345 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping), &rank)); 346 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 347 if (iascii) { 348 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping, viewer)); 349 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 350 for (i = 0; i < mapping->n; i++) { PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n", rank, i, mapping->indices[i])); } 351 PetscCall(PetscViewerFlush(viewer)); 352 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 353 } 354 PetscFunctionReturn(0); 355 } 356 357 /*@ 358 ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) 359 ordering and a global parallel ordering. 360 361 Not collective 362 363 Input Parameter: 364 . is - index set containing the global numbers for each local number 365 366 Output Parameter: 367 . mapping - new mapping data structure 368 369 Notes: 370 the block size of the IS determines the block size of the mapping 371 Level: advanced 372 373 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetFromOptions()` 374 @*/ 375 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is, ISLocalToGlobalMapping *mapping) { 376 PetscInt n, bs; 377 const PetscInt *indices; 378 MPI_Comm comm; 379 PetscBool isblock; 380 381 PetscFunctionBegin; 382 PetscValidHeaderSpecific(is, IS_CLASSID, 1); 383 PetscValidPointer(mapping, 2); 384 385 PetscCall(PetscObjectGetComm((PetscObject)is, &comm)); 386 PetscCall(ISGetLocalSize(is, &n)); 387 PetscCall(PetscObjectTypeCompare((PetscObject)is, ISBLOCK, &isblock)); 388 if (!isblock) { 389 PetscCall(ISGetIndices(is, &indices)); 390 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n, indices, PETSC_COPY_VALUES, mapping)); 391 PetscCall(ISRestoreIndices(is, &indices)); 392 } else { 393 PetscCall(ISGetBlockSize(is, &bs)); 394 PetscCall(ISBlockGetIndices(is, &indices)); 395 PetscCall(ISLocalToGlobalMappingCreate(comm, bs, n / bs, indices, PETSC_COPY_VALUES, mapping)); 396 PetscCall(ISBlockRestoreIndices(is, &indices)); 397 } 398 PetscFunctionReturn(0); 399 } 400 401 /*@C 402 ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) 403 ordering and a global parallel ordering. 404 405 Collective 406 407 Input Parameters: 408 + sf - star forest mapping contiguous local indices to (rank, offset) 409 - start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically 410 411 Output Parameter: 412 . mapping - new mapping data structure 413 414 Level: advanced 415 416 Notes: 417 If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang. 418 419 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()` 420 @*/ 421 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf, PetscInt start, ISLocalToGlobalMapping *mapping) { 422 PetscInt i, maxlocal, nroots, nleaves, *globals, *ltog; 423 MPI_Comm comm; 424 425 PetscFunctionBegin; 426 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 427 PetscValidPointer(mapping, 3); 428 PetscCall(PetscObjectGetComm((PetscObject)sf, &comm)); 429 PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, NULL)); 430 if (start == PETSC_DECIDE) { 431 start = 0; 432 PetscCallMPI(MPI_Exscan(&nroots, &start, 1, MPIU_INT, MPI_SUM, comm)); 433 } else PetscCheck(start >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE"); 434 PetscCall(PetscSFGetLeafRange(sf, NULL, &maxlocal)); 435 ++maxlocal; 436 PetscCall(PetscMalloc1(nroots, &globals)); 437 PetscCall(PetscMalloc1(maxlocal, <og)); 438 for (i = 0; i < nroots; i++) globals[i] = start + i; 439 for (i = 0; i < maxlocal; i++) ltog[i] = -1; 440 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, globals, ltog, MPI_REPLACE)); 441 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, globals, ltog, MPI_REPLACE)); 442 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, maxlocal, ltog, PETSC_OWN_POINTER, mapping)); 443 PetscCall(PetscFree(globals)); 444 PetscFunctionReturn(0); 445 } 446 447 /*@ 448 ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping 449 450 Not collective 451 452 Input Parameters: 453 + mapping - mapping data structure 454 - bs - the blocksize 455 456 Level: advanced 457 458 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()` 459 @*/ 460 PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping, PetscInt bs) { 461 PetscInt *nid; 462 const PetscInt *oid; 463 PetscInt i, cn, on, obs, nn; 464 465 PetscFunctionBegin; 466 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 467 PetscCheck(bs >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid block size %" PetscInt_FMT, bs); 468 if (bs == mapping->bs) PetscFunctionReturn(0); 469 on = mapping->n; 470 obs = mapping->bs; 471 oid = mapping->indices; 472 nn = (on * obs) / bs; 473 PetscCheck((on * obs) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT, bs, obs, on); 474 475 PetscCall(PetscMalloc1(nn, &nid)); 476 PetscCall(ISLocalToGlobalMappingGetIndices(mapping, &oid)); 477 for (i = 0; i < nn; i++) { 478 PetscInt j; 479 for (j = 0, cn = 0; j < bs - 1; j++) { 480 if (oid[i * bs + j] < 0) { 481 cn++; 482 continue; 483 } 484 PetscCheck(oid[i * bs + j] == oid[i * bs + j + 1] - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT, bs, obs, oid[i * bs + j], oid[i * bs + j + 1]); 485 } 486 if (oid[i * bs + j] < 0) cn++; 487 if (cn) { 488 PetscCheck(cn == bs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT, bs, obs, cn); 489 nid[i] = -1; 490 } else { 491 nid[i] = oid[i * bs] / bs; 492 } 493 } 494 PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping, &oid)); 495 496 mapping->n = nn; 497 mapping->bs = bs; 498 PetscCall(PetscFree(mapping->indices)); 499 mapping->indices = nid; 500 mapping->globalstart = 0; 501 mapping->globalend = 0; 502 503 /* reset the cached information */ 504 PetscCall(PetscFree(mapping->info_procs)); 505 PetscCall(PetscFree(mapping->info_numprocs)); 506 if (mapping->info_indices) { 507 PetscInt i; 508 509 PetscCall(PetscFree((mapping->info_indices)[0])); 510 for (i = 1; i < mapping->info_nproc; i++) { PetscCall(PetscFree(mapping->info_indices[i])); } 511 PetscCall(PetscFree(mapping->info_indices)); 512 } 513 mapping->info_cached = PETSC_FALSE; 514 515 PetscTryTypeMethod(mapping, destroy); 516 PetscFunctionReturn(0); 517 } 518 519 /*@ 520 ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping 521 ordering and a global parallel ordering. 522 523 Not Collective 524 525 Input Parameters: 526 . mapping - mapping data structure 527 528 Output Parameter: 529 . bs - the blocksize 530 531 Level: advanced 532 533 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()` 534 @*/ 535 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping, PetscInt *bs) { 536 PetscFunctionBegin; 537 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 538 *bs = mapping->bs; 539 PetscFunctionReturn(0); 540 } 541 542 /*@ 543 ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) 544 ordering and a global parallel ordering. 545 546 Not Collective, but communicator may have more than one process 547 548 Input Parameters: 549 + comm - MPI communicator 550 . bs - the block size 551 . n - the number of local elements divided by the block size, or equivalently the number of block indices 552 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs 553 - mode - see PetscCopyMode 554 555 Output Parameter: 556 . mapping - new mapping data structure 557 558 Notes: 559 There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1 560 561 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 562 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 563 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 564 565 Level: advanced 566 567 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH` 568 `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType` 569 @*/ 570 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm, PetscInt bs, PetscInt n, const PetscInt indices[], PetscCopyMode mode, ISLocalToGlobalMapping *mapping) { 571 PetscInt *in; 572 573 PetscFunctionBegin; 574 if (n) PetscValidIntPointer(indices, 4); 575 PetscValidPointer(mapping, 6); 576 577 *mapping = NULL; 578 PetscCall(ISInitializePackage()); 579 580 PetscCall(PetscHeaderCreate(*mapping, IS_LTOGM_CLASSID, "ISLocalToGlobalMapping", "Local to global mapping", "IS", comm, ISLocalToGlobalMappingDestroy, ISLocalToGlobalMappingView)); 581 (*mapping)->n = n; 582 (*mapping)->bs = bs; 583 if (mode == PETSC_COPY_VALUES) { 584 PetscCall(PetscMalloc1(n, &in)); 585 PetscCall(PetscArraycpy(in, indices, n)); 586 (*mapping)->indices = in; 587 (*mapping)->dealloc_indices = PETSC_TRUE; 588 PetscCall(PetscLogObjectMemory((PetscObject)*mapping, n * sizeof(PetscInt))); 589 } else if (mode == PETSC_OWN_POINTER) { 590 (*mapping)->indices = (PetscInt *)indices; 591 (*mapping)->dealloc_indices = PETSC_TRUE; 592 PetscCall(PetscLogObjectMemory((PetscObject)*mapping, n * sizeof(PetscInt))); 593 } else if (mode == PETSC_USE_POINTER) { 594 (*mapping)->indices = (PetscInt *)indices; 595 } else SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid mode %d", mode); 596 PetscFunctionReturn(0); 597 } 598 599 PetscFunctionList ISLocalToGlobalMappingList = NULL; 600 601 /*@ 602 ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database. 603 604 Not collective 605 606 Input Parameters: 607 . mapping - mapping data structure 608 609 Level: advanced 610 611 @*/ 612 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping) { 613 char type[256]; 614 ISLocalToGlobalMappingType defaulttype = "Not set"; 615 PetscBool flg; 616 617 PetscFunctionBegin; 618 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 619 PetscCall(ISLocalToGlobalMappingRegisterAll()); 620 PetscObjectOptionsBegin((PetscObject)mapping); 621 PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type", "ISLocalToGlobalMapping method", "ISLocalToGlobalMappingSetType", ISLocalToGlobalMappingList, (char *)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype, type, 256, &flg)); 622 if (flg) PetscCall(ISLocalToGlobalMappingSetType(mapping, type)); 623 PetscOptionsEnd(); 624 PetscFunctionReturn(0); 625 } 626 627 /*@ 628 ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) 629 ordering and a global parallel ordering. 630 631 Note Collective 632 633 Input Parameters: 634 . mapping - mapping data structure 635 636 Level: advanced 637 638 .seealso: `ISLocalToGlobalMappingCreate()` 639 @*/ 640 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) { 641 PetscFunctionBegin; 642 if (!*mapping) PetscFunctionReturn(0); 643 PetscValidHeaderSpecific((*mapping), IS_LTOGM_CLASSID, 1); 644 if (--((PetscObject)(*mapping))->refct > 0) { 645 *mapping = NULL; 646 PetscFunctionReturn(0); 647 } 648 if ((*mapping)->dealloc_indices) { PetscCall(PetscFree((*mapping)->indices)); } 649 PetscCall(PetscFree((*mapping)->info_procs)); 650 PetscCall(PetscFree((*mapping)->info_numprocs)); 651 if ((*mapping)->info_indices) { 652 PetscInt i; 653 654 PetscCall(PetscFree(((*mapping)->info_indices)[0])); 655 for (i = 1; i < (*mapping)->info_nproc; i++) { PetscCall(PetscFree(((*mapping)->info_indices)[i])); } 656 PetscCall(PetscFree((*mapping)->info_indices)); 657 } 658 if ((*mapping)->info_nodei) { PetscCall(PetscFree(((*mapping)->info_nodei)[0])); } 659 PetscCall(PetscFree2((*mapping)->info_nodec, (*mapping)->info_nodei)); 660 if ((*mapping)->ops->destroy) { PetscCall((*(*mapping)->ops->destroy)(*mapping)); } 661 PetscCall(PetscHeaderDestroy(mapping)); 662 *mapping = NULL; 663 PetscFunctionReturn(0); 664 } 665 666 /*@ 667 ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering 668 a new index set using the global numbering defined in an ISLocalToGlobalMapping 669 context. 670 671 Collective on is 672 673 Input Parameters: 674 + mapping - mapping between local and global numbering 675 - is - index set in local numbering 676 677 Output Parameters: 678 . newis - index set in global numbering 679 680 Notes: 681 The output IS will have the same communicator of the input IS. 682 683 Level: advanced 684 685 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 686 `ISLocalToGlobalMappingDestroy()`, `ISGlobalToLocalMappingApply()` 687 @*/ 688 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping, IS is, IS *newis) { 689 PetscInt n, *idxout; 690 const PetscInt *idxin; 691 692 PetscFunctionBegin; 693 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 694 PetscValidHeaderSpecific(is, IS_CLASSID, 2); 695 PetscValidPointer(newis, 3); 696 697 PetscCall(ISGetLocalSize(is, &n)); 698 PetscCall(ISGetIndices(is, &idxin)); 699 PetscCall(PetscMalloc1(n, &idxout)); 700 PetscCall(ISLocalToGlobalMappingApply(mapping, n, idxin, idxout)); 701 PetscCall(ISRestoreIndices(is, &idxin)); 702 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is), n, idxout, PETSC_OWN_POINTER, newis)); 703 PetscFunctionReturn(0); 704 } 705 706 /*@ 707 ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering 708 and converts them to the global numbering. 709 710 Not collective 711 712 Input Parameters: 713 + mapping - the local to global mapping context 714 . N - number of integers 715 - in - input indices in local numbering 716 717 Output Parameter: 718 . out - indices in global numbering 719 720 Notes: 721 The in and out array parameters may be identical. 722 723 Level: advanced 724 725 .seealso: `ISLocalToGlobalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`, 726 `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`, 727 `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()` 728 729 @*/ 730 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[]) { 731 PetscInt i, bs, Nmax; 732 733 PetscFunctionBegin; 734 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 735 bs = mapping->bs; 736 Nmax = bs * mapping->n; 737 if (bs == 1) { 738 const PetscInt *idx = mapping->indices; 739 for (i = 0; i < N; i++) { 740 if (in[i] < 0) { 741 out[i] = in[i]; 742 continue; 743 } 744 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 745 out[i] = idx[in[i]]; 746 } 747 } else { 748 const PetscInt *idx = mapping->indices; 749 for (i = 0; i < N; i++) { 750 if (in[i] < 0) { 751 out[i] = in[i]; 752 continue; 753 } 754 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 755 out[i] = idx[in[i] / bs] * bs + (in[i] % bs); 756 } 757 } 758 PetscFunctionReturn(0); 759 } 760 761 /*@ 762 ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering 763 764 Not collective 765 766 Input Parameters: 767 + mapping - the local to global mapping context 768 . N - number of integers 769 - in - input indices in local block numbering 770 771 Output Parameter: 772 . out - indices in global block numbering 773 774 Notes: 775 The in and out array parameters may be identical. 776 777 Example: 778 If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0 779 (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3. 780 781 Level: advanced 782 783 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`, 784 `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`, 785 `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()` 786 787 @*/ 788 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[]) { 789 PetscInt i, Nmax; 790 const PetscInt *idx; 791 792 PetscFunctionBegin; 793 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 794 Nmax = mapping->n; 795 idx = mapping->indices; 796 for (i = 0; i < N; i++) { 797 if (in[i] < 0) { 798 out[i] = in[i]; 799 continue; 800 } 801 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 802 out[i] = idx[in[i]]; 803 } 804 PetscFunctionReturn(0); 805 } 806 807 /*@ 808 ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers 809 specified with a global numbering. 810 811 Not collective 812 813 Input Parameters: 814 + mapping - mapping between local and global numbering 815 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 816 IS_GTOLM_DROP - drops the indices with no local value from the output list 817 . n - number of global indices to map 818 - idx - global indices to map 819 820 Output Parameters: 821 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 822 - idxout - local index of each global index, one must pass in an array long enough 823 to hold all the indices. You can call ISGlobalToLocalMappingApply() with 824 idxout == NULL to determine the required length (returned in nout) 825 and then allocate the required space and call ISGlobalToLocalMappingApply() 826 a second time to set the values. 827 828 Notes: 829 Either nout or idxout may be NULL. idx and idxout may be identical. 830 831 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 832 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 833 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 834 835 Level: advanced 836 837 Developer Note: The manual page states that idx and idxout may be identical but the calling 838 sequence declares idx as const so it cannot be the same as idxout. 839 840 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, 841 `ISLocalToGlobalMappingDestroy()` 842 @*/ 843 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[]) { 844 PetscFunctionBegin; 845 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 846 if (!mapping->data) { PetscCall(ISGlobalToLocalMappingSetUp(mapping)); } 847 PetscUseTypeMethod(mapping, globaltolocalmappingapply, type, n, idx, nout, idxout); 848 PetscFunctionReturn(0); 849 } 850 851 /*@ 852 ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering 853 a new index set using the local numbering defined in an ISLocalToGlobalMapping 854 context. 855 856 Not collective 857 858 Input Parameters: 859 + mapping - mapping between local and global numbering 860 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 861 IS_GTOLM_DROP - drops the indices with no local value from the output list 862 - is - index set in global numbering 863 864 Output Parameters: 865 . newis - index set in local numbering 866 867 Notes: 868 The output IS will be sequential, as it encodes a purely local operation 869 870 Level: advanced 871 872 .seealso: `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 873 `ISLocalToGlobalMappingDestroy()` 874 @*/ 875 PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, IS is, IS *newis) { 876 PetscInt n, nout, *idxout; 877 const PetscInt *idxin; 878 879 PetscFunctionBegin; 880 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 881 PetscValidHeaderSpecific(is, IS_CLASSID, 3); 882 PetscValidPointer(newis, 4); 883 884 PetscCall(ISGetLocalSize(is, &n)); 885 PetscCall(ISGetIndices(is, &idxin)); 886 if (type == IS_GTOLM_MASK) { 887 PetscCall(PetscMalloc1(n, &idxout)); 888 } else { 889 PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, NULL)); 890 PetscCall(PetscMalloc1(nout, &idxout)); 891 } 892 PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, idxout)); 893 PetscCall(ISRestoreIndices(is, &idxin)); 894 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nout, idxout, PETSC_OWN_POINTER, newis)); 895 PetscFunctionReturn(0); 896 } 897 898 /*@ 899 ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers 900 specified with a block global numbering. 901 902 Not collective 903 904 Input Parameters: 905 + mapping - mapping between local and global numbering 906 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 907 IS_GTOLM_DROP - drops the indices with no local value from the output list 908 . n - number of global indices to map 909 - idx - global indices to map 910 911 Output Parameters: 912 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 913 - idxout - local index of each global index, one must pass in an array long enough 914 to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with 915 idxout == NULL to determine the required length (returned in nout) 916 and then allocate the required space and call ISGlobalToLocalMappingApplyBlock() 917 a second time to set the values. 918 919 Notes: 920 Either nout or idxout may be NULL. idx and idxout may be identical. 921 922 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 923 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 924 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 925 926 Level: advanced 927 928 Developer Note: The manual page states that idx and idxout may be identical but the calling 929 sequence declares idx as const so it cannot be the same as idxout. 930 931 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 932 `ISLocalToGlobalMappingDestroy()` 933 @*/ 934 PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[]) { 935 PetscFunctionBegin; 936 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 937 if (!mapping->data) { PetscCall(ISGlobalToLocalMappingSetUp(mapping)); } 938 PetscUseTypeMethod(mapping, globaltolocalmappingapplyblock, type, n, idx, nout, idxout); 939 PetscFunctionReturn(0); 940 } 941 942 /*@C 943 ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and 944 each index shared by more than one processor 945 946 Collective on ISLocalToGlobalMapping 947 948 Input Parameter: 949 . mapping - the mapping from local to global indexing 950 951 Output Parameters: 952 + nproc - number of processors that are connected to this one 953 . proc - neighboring processors 954 . numproc - number of indices for each subdomain (processor) 955 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 956 957 Level: advanced 958 959 Fortran Usage: 960 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 961 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 962 PetscInt indices[nproc][numprocmax],ierr) 963 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 964 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 965 966 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 967 `ISLocalToGlobalMappingRestoreInfo()` 968 @*/ 969 PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) { 970 PetscFunctionBegin; 971 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 972 if (mapping->info_cached) { 973 *nproc = mapping->info_nproc; 974 *procs = mapping->info_procs; 975 *numprocs = mapping->info_numprocs; 976 *indices = mapping->info_indices; 977 } else { 978 PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping, nproc, procs, numprocs, indices)); 979 } 980 PetscFunctionReturn(0); 981 } 982 983 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) { 984 PetscMPIInt size, rank, tag1, tag2, tag3, *len, *source, imdex; 985 PetscInt i, n = mapping->n, Ng, ng, max = 0, *lindices = mapping->indices; 986 PetscInt *nprocs, *owner, nsends, *sends, j, *starts, nmax, nrecvs, *recvs, proc; 987 PetscInt cnt, scale, *ownedsenders, *nownedsenders, rstart; 988 PetscInt node, nownedm, nt, *sends2, nsends2, *starts2, *lens2, *dest, nrecvs2, *starts3, *recvs2, k, *bprocs, *tmp; 989 PetscInt first_procs, first_numprocs, *first_indices; 990 MPI_Request *recv_waits, *send_waits; 991 MPI_Status recv_status, *send_status, *recv_statuses; 992 MPI_Comm comm; 993 PetscBool debug = PETSC_FALSE; 994 995 PetscFunctionBegin; 996 PetscCall(PetscObjectGetComm((PetscObject)mapping, &comm)); 997 PetscCallMPI(MPI_Comm_size(comm, &size)); 998 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 999 if (size == 1) { 1000 *nproc = 0; 1001 *procs = NULL; 1002 PetscCall(PetscNew(numprocs)); 1003 (*numprocs)[0] = 0; 1004 PetscCall(PetscNew(indices)); 1005 (*indices)[0] = NULL; 1006 /* save info for reuse */ 1007 mapping->info_nproc = *nproc; 1008 mapping->info_procs = *procs; 1009 mapping->info_numprocs = *numprocs; 1010 mapping->info_indices = *indices; 1011 mapping->info_cached = PETSC_TRUE; 1012 PetscFunctionReturn(0); 1013 } 1014 1015 PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options, NULL, "-islocaltoglobalmappinggetinfo_debug", &debug, NULL)); 1016 1017 /* 1018 Notes on ISLocalToGlobalMappingGetBlockInfo 1019 1020 globally owned node - the nodes that have been assigned to this processor in global 1021 numbering, just for this routine. 1022 1023 nontrivial globally owned node - node assigned to this processor that is on a subdomain 1024 boundary (i.e. is has more than one local owner) 1025 1026 locally owned node - node that exists on this processors subdomain 1027 1028 nontrivial locally owned node - node that is not in the interior (i.e. has more than one 1029 local subdomain 1030 */ 1031 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag1)); 1032 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag2)); 1033 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag3)); 1034 1035 for (i = 0; i < n; i++) { 1036 if (lindices[i] > max) max = lindices[i]; 1037 } 1038 PetscCall(MPIU_Allreduce(&max, &Ng, 1, MPIU_INT, MPI_MAX, comm)); 1039 Ng++; 1040 PetscCallMPI(MPI_Comm_size(comm, &size)); 1041 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1042 scale = Ng / size + 1; 1043 ng = scale; 1044 if (rank == size - 1) ng = Ng - scale * (size - 1); 1045 ng = PetscMax(1, ng); 1046 rstart = scale * rank; 1047 1048 /* determine ownership ranges of global indices */ 1049 PetscCall(PetscMalloc1(2 * size, &nprocs)); 1050 PetscCall(PetscArrayzero(nprocs, 2 * size)); 1051 1052 /* determine owners of each local node */ 1053 PetscCall(PetscMalloc1(n, &owner)); 1054 for (i = 0; i < n; i++) { 1055 proc = lindices[i] / scale; /* processor that globally owns this index */ 1056 nprocs[2 * proc + 1] = 1; /* processor globally owns at least one of ours */ 1057 owner[i] = proc; 1058 nprocs[2 * proc]++; /* count of how many that processor globally owns of ours */ 1059 } 1060 nsends = 0; 1061 for (i = 0; i < size; i++) nsends += nprocs[2 * i + 1]; 1062 PetscCall(PetscInfo(mapping, "Number of global owners for my local data %" PetscInt_FMT "\n", nsends)); 1063 1064 /* inform other processors of number of messages and max length*/ 1065 PetscCall(PetscMaxSum(comm, nprocs, &nmax, &nrecvs)); 1066 PetscCall(PetscInfo(mapping, "Number of local owners for my global data %" PetscInt_FMT "\n", nrecvs)); 1067 1068 /* post receives for owned rows */ 1069 PetscCall(PetscMalloc1((2 * nrecvs + 1) * (nmax + 1), &recvs)); 1070 PetscCall(PetscMalloc1(nrecvs + 1, &recv_waits)); 1071 for (i = 0; i < nrecvs; i++) { PetscCallMPI(MPI_Irecv(recvs + 2 * nmax * i, 2 * nmax, MPIU_INT, MPI_ANY_SOURCE, tag1, comm, recv_waits + i)); } 1072 1073 /* pack messages containing lists of local nodes to owners */ 1074 PetscCall(PetscMalloc1(2 * n + 1, &sends)); 1075 PetscCall(PetscMalloc1(size + 1, &starts)); 1076 starts[0] = 0; 1077 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2]; 1078 for (i = 0; i < n; i++) { 1079 sends[starts[owner[i]]++] = lindices[i]; 1080 sends[starts[owner[i]]++] = i; 1081 } 1082 PetscCall(PetscFree(owner)); 1083 starts[0] = 0; 1084 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2]; 1085 1086 /* send the messages */ 1087 PetscCall(PetscMalloc1(nsends + 1, &send_waits)); 1088 PetscCall(PetscMalloc1(nsends + 1, &dest)); 1089 cnt = 0; 1090 for (i = 0; i < size; i++) { 1091 if (nprocs[2 * i]) { 1092 PetscCallMPI(MPI_Isend(sends + starts[i], 2 * nprocs[2 * i], MPIU_INT, i, tag1, comm, send_waits + cnt)); 1093 dest[cnt] = i; 1094 cnt++; 1095 } 1096 } 1097 PetscCall(PetscFree(starts)); 1098 1099 /* wait on receives */ 1100 PetscCall(PetscMalloc1(nrecvs + 1, &source)); 1101 PetscCall(PetscMalloc1(nrecvs + 1, &len)); 1102 cnt = nrecvs; 1103 PetscCall(PetscCalloc1(ng + 1, &nownedsenders)); 1104 while (cnt) { 1105 PetscCallMPI(MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status)); 1106 /* unpack receives into our local space */ 1107 PetscCallMPI(MPI_Get_count(&recv_status, MPIU_INT, &len[imdex])); 1108 source[imdex] = recv_status.MPI_SOURCE; 1109 len[imdex] = len[imdex] / 2; 1110 /* count how many local owners for each of my global owned indices */ 1111 for (i = 0; i < len[imdex]; i++) nownedsenders[recvs[2 * imdex * nmax + 2 * i] - rstart]++; 1112 cnt--; 1113 } 1114 PetscCall(PetscFree(recv_waits)); 1115 1116 /* count how many globally owned indices are on an edge multiplied by how many processors own them. */ 1117 nownedm = 0; 1118 for (i = 0; i < ng; i++) { 1119 if (nownedsenders[i] > 1) nownedm += nownedsenders[i]; 1120 } 1121 1122 /* create single array to contain rank of all local owners of each globally owned index */ 1123 PetscCall(PetscMalloc1(nownedm + 1, &ownedsenders)); 1124 PetscCall(PetscMalloc1(ng + 1, &starts)); 1125 starts[0] = 0; 1126 for (i = 1; i < ng; i++) { 1127 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1128 else starts[i] = starts[i - 1]; 1129 } 1130 1131 /* for each nontrivial globally owned node list all arriving processors */ 1132 for (i = 0; i < nrecvs; i++) { 1133 for (j = 0; j < len[i]; j++) { 1134 node = recvs[2 * i * nmax + 2 * j] - rstart; 1135 if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i]; 1136 } 1137 } 1138 1139 if (debug) { /* ----------------------------------- */ 1140 starts[0] = 0; 1141 for (i = 1; i < ng; i++) { 1142 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1143 else starts[i] = starts[i - 1]; 1144 } 1145 for (i = 0; i < ng; i++) { 1146 if (nownedsenders[i] > 1) { 1147 PetscCall(PetscSynchronizedPrintf(comm, "[%d] global node %" PetscInt_FMT " local owner processors: ", rank, i + rstart)); 1148 for (j = 0; j < nownedsenders[i]; j++) { PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", ownedsenders[starts[i] + j])); } 1149 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1150 } 1151 } 1152 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1153 } /* ----------------------------------- */ 1154 1155 /* wait on original sends */ 1156 if (nsends) { 1157 PetscCall(PetscMalloc1(nsends, &send_status)); 1158 PetscCallMPI(MPI_Waitall(nsends, send_waits, send_status)); 1159 PetscCall(PetscFree(send_status)); 1160 } 1161 PetscCall(PetscFree(send_waits)); 1162 PetscCall(PetscFree(sends)); 1163 PetscCall(PetscFree(nprocs)); 1164 1165 /* pack messages to send back to local owners */ 1166 starts[0] = 0; 1167 for (i = 1; i < ng; i++) { 1168 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1169 else starts[i] = starts[i - 1]; 1170 } 1171 nsends2 = nrecvs; 1172 PetscCall(PetscMalloc1(nsends2 + 1, &nprocs)); /* length of each message */ 1173 for (i = 0; i < nrecvs; i++) { 1174 nprocs[i] = 1; 1175 for (j = 0; j < len[i]; j++) { 1176 node = recvs[2 * i * nmax + 2 * j] - rstart; 1177 if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node]; 1178 } 1179 } 1180 nt = 0; 1181 for (i = 0; i < nsends2; i++) nt += nprocs[i]; 1182 1183 PetscCall(PetscMalloc1(nt + 1, &sends2)); 1184 PetscCall(PetscMalloc1(nsends2 + 1, &starts2)); 1185 1186 starts2[0] = 0; 1187 for (i = 1; i < nsends2; i++) starts2[i] = starts2[i - 1] + nprocs[i - 1]; 1188 /* 1189 Each message is 1 + nprocs[i] long, and consists of 1190 (0) the number of nodes being sent back 1191 (1) the local node number, 1192 (2) the number of processors sharing it, 1193 (3) the processors sharing it 1194 */ 1195 for (i = 0; i < nsends2; i++) { 1196 cnt = 1; 1197 sends2[starts2[i]] = 0; 1198 for (j = 0; j < len[i]; j++) { 1199 node = recvs[2 * i * nmax + 2 * j] - rstart; 1200 if (nownedsenders[node] > 1) { 1201 sends2[starts2[i]]++; 1202 sends2[starts2[i] + cnt++] = recvs[2 * i * nmax + 2 * j + 1]; 1203 sends2[starts2[i] + cnt++] = nownedsenders[node]; 1204 PetscCall(PetscArraycpy(&sends2[starts2[i] + cnt], &ownedsenders[starts[node]], nownedsenders[node])); 1205 cnt += nownedsenders[node]; 1206 } 1207 } 1208 } 1209 1210 /* receive the message lengths */ 1211 nrecvs2 = nsends; 1212 PetscCall(PetscMalloc1(nrecvs2 + 1, &lens2)); 1213 PetscCall(PetscMalloc1(nrecvs2 + 1, &starts3)); 1214 PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits)); 1215 for (i = 0; i < nrecvs2; i++) { PetscCallMPI(MPI_Irecv(&lens2[i], 1, MPIU_INT, dest[i], tag2, comm, recv_waits + i)); } 1216 1217 /* send the message lengths */ 1218 for (i = 0; i < nsends2; i++) { PetscCallMPI(MPI_Send(&nprocs[i], 1, MPIU_INT, source[i], tag2, comm)); } 1219 1220 /* wait on receives of lens */ 1221 if (nrecvs2) { 1222 PetscCall(PetscMalloc1(nrecvs2, &recv_statuses)); 1223 PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses)); 1224 PetscCall(PetscFree(recv_statuses)); 1225 } 1226 PetscCall(PetscFree(recv_waits)); 1227 1228 starts3[0] = 0; 1229 nt = 0; 1230 for (i = 0; i < nrecvs2 - 1; i++) { 1231 starts3[i + 1] = starts3[i] + lens2[i]; 1232 nt += lens2[i]; 1233 } 1234 if (nrecvs2) nt += lens2[nrecvs2 - 1]; 1235 1236 PetscCall(PetscMalloc1(nt + 1, &recvs2)); 1237 PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits)); 1238 for (i = 0; i < nrecvs2; i++) { PetscCallMPI(MPI_Irecv(recvs2 + starts3[i], lens2[i], MPIU_INT, dest[i], tag3, comm, recv_waits + i)); } 1239 1240 /* send the messages */ 1241 PetscCall(PetscMalloc1(nsends2 + 1, &send_waits)); 1242 for (i = 0; i < nsends2; i++) { PetscCallMPI(MPI_Isend(sends2 + starts2[i], nprocs[i], MPIU_INT, source[i], tag3, comm, send_waits + i)); } 1243 1244 /* wait on receives */ 1245 if (nrecvs2) { 1246 PetscCall(PetscMalloc1(nrecvs2, &recv_statuses)); 1247 PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses)); 1248 PetscCall(PetscFree(recv_statuses)); 1249 } 1250 PetscCall(PetscFree(recv_waits)); 1251 PetscCall(PetscFree(nprocs)); 1252 1253 if (debug) { /* ----------------------------------- */ 1254 cnt = 0; 1255 for (i = 0; i < nrecvs2; i++) { 1256 nt = recvs2[cnt++]; 1257 for (j = 0; j < nt; j++) { 1258 PetscCall(PetscSynchronizedPrintf(comm, "[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ", rank, recvs2[cnt], recvs2[cnt + 1])); 1259 for (k = 0; k < recvs2[cnt + 1]; k++) { PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", recvs2[cnt + 2 + k])); } 1260 cnt += 2 + recvs2[cnt + 1]; 1261 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1262 } 1263 } 1264 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1265 } /* ----------------------------------- */ 1266 1267 /* count number subdomains for each local node */ 1268 PetscCall(PetscCalloc1(size, &nprocs)); 1269 cnt = 0; 1270 for (i = 0; i < nrecvs2; i++) { 1271 nt = recvs2[cnt++]; 1272 for (j = 0; j < nt; j++) { 1273 for (k = 0; k < recvs2[cnt + 1]; k++) nprocs[recvs2[cnt + 2 + k]]++; 1274 cnt += 2 + recvs2[cnt + 1]; 1275 } 1276 } 1277 nt = 0; 1278 for (i = 0; i < size; i++) nt += (nprocs[i] > 0); 1279 *nproc = nt; 1280 PetscCall(PetscMalloc1(nt + 1, procs)); 1281 PetscCall(PetscMalloc1(nt + 1, numprocs)); 1282 PetscCall(PetscMalloc1(nt + 1, indices)); 1283 for (i = 0; i < nt + 1; i++) (*indices)[i] = NULL; 1284 PetscCall(PetscMalloc1(size, &bprocs)); 1285 cnt = 0; 1286 for (i = 0; i < size; i++) { 1287 if (nprocs[i] > 0) { 1288 bprocs[i] = cnt; 1289 (*procs)[cnt] = i; 1290 (*numprocs)[cnt] = nprocs[i]; 1291 PetscCall(PetscMalloc1(nprocs[i], &(*indices)[cnt])); 1292 cnt++; 1293 } 1294 } 1295 1296 /* make the list of subdomains for each nontrivial local node */ 1297 PetscCall(PetscArrayzero(*numprocs, nt)); 1298 cnt = 0; 1299 for (i = 0; i < nrecvs2; i++) { 1300 nt = recvs2[cnt++]; 1301 for (j = 0; j < nt; j++) { 1302 for (k = 0; k < recvs2[cnt + 1]; k++) (*indices)[bprocs[recvs2[cnt + 2 + k]]][(*numprocs)[bprocs[recvs2[cnt + 2 + k]]]++] = recvs2[cnt]; 1303 cnt += 2 + recvs2[cnt + 1]; 1304 } 1305 } 1306 PetscCall(PetscFree(bprocs)); 1307 PetscCall(PetscFree(recvs2)); 1308 1309 /* sort the node indexing by their global numbers */ 1310 nt = *nproc; 1311 for (i = 0; i < nt; i++) { 1312 PetscCall(PetscMalloc1((*numprocs)[i], &tmp)); 1313 for (j = 0; j < (*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]]; 1314 PetscCall(PetscSortIntWithArray((*numprocs)[i], tmp, (*indices)[i])); 1315 PetscCall(PetscFree(tmp)); 1316 } 1317 1318 if (debug) { /* ----------------------------------- */ 1319 nt = *nproc; 1320 for (i = 0; i < nt; i++) { 1321 PetscCall(PetscSynchronizedPrintf(comm, "[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ", rank, (*procs)[i], (*numprocs)[i])); 1322 for (j = 0; j < (*numprocs)[i]; j++) { PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", (*indices)[i][j])); } 1323 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1324 } 1325 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1326 } /* ----------------------------------- */ 1327 1328 /* wait on sends */ 1329 if (nsends2) { 1330 PetscCall(PetscMalloc1(nsends2, &send_status)); 1331 PetscCallMPI(MPI_Waitall(nsends2, send_waits, send_status)); 1332 PetscCall(PetscFree(send_status)); 1333 } 1334 1335 PetscCall(PetscFree(starts3)); 1336 PetscCall(PetscFree(dest)); 1337 PetscCall(PetscFree(send_waits)); 1338 1339 PetscCall(PetscFree(nownedsenders)); 1340 PetscCall(PetscFree(ownedsenders)); 1341 PetscCall(PetscFree(starts)); 1342 PetscCall(PetscFree(starts2)); 1343 PetscCall(PetscFree(lens2)); 1344 1345 PetscCall(PetscFree(source)); 1346 PetscCall(PetscFree(len)); 1347 PetscCall(PetscFree(recvs)); 1348 PetscCall(PetscFree(nprocs)); 1349 PetscCall(PetscFree(sends2)); 1350 1351 /* put the information about myself as the first entry in the list */ 1352 first_procs = (*procs)[0]; 1353 first_numprocs = (*numprocs)[0]; 1354 first_indices = (*indices)[0]; 1355 for (i = 0; i < *nproc; i++) { 1356 if ((*procs)[i] == rank) { 1357 (*procs)[0] = (*procs)[i]; 1358 (*numprocs)[0] = (*numprocs)[i]; 1359 (*indices)[0] = (*indices)[i]; 1360 (*procs)[i] = first_procs; 1361 (*numprocs)[i] = first_numprocs; 1362 (*indices)[i] = first_indices; 1363 break; 1364 } 1365 } 1366 1367 /* save info for reuse */ 1368 mapping->info_nproc = *nproc; 1369 mapping->info_procs = *procs; 1370 mapping->info_numprocs = *numprocs; 1371 mapping->info_indices = *indices; 1372 mapping->info_cached = PETSC_TRUE; 1373 PetscFunctionReturn(0); 1374 } 1375 1376 /*@C 1377 ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo() 1378 1379 Collective on ISLocalToGlobalMapping 1380 1381 Input Parameter: 1382 . mapping - the mapping from local to global indexing 1383 1384 Output Parameters: 1385 + nproc - number of processors that are connected to this one 1386 . proc - neighboring processors 1387 . numproc - number of indices for each processor 1388 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1389 1390 Level: advanced 1391 1392 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1393 `ISLocalToGlobalMappingGetInfo()` 1394 @*/ 1395 PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) { 1396 PetscFunctionBegin; 1397 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1398 if (mapping->info_free) { 1399 PetscCall(PetscFree(*numprocs)); 1400 if (*indices) { 1401 PetscInt i; 1402 1403 PetscCall(PetscFree((*indices)[0])); 1404 for (i = 1; i < *nproc; i++) { PetscCall(PetscFree((*indices)[i])); } 1405 PetscCall(PetscFree(*indices)); 1406 } 1407 } 1408 *nproc = 0; 1409 *procs = NULL; 1410 *numprocs = NULL; 1411 *indices = NULL; 1412 PetscFunctionReturn(0); 1413 } 1414 1415 /*@C 1416 ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and 1417 each index shared by more than one processor 1418 1419 Collective on ISLocalToGlobalMapping 1420 1421 Input Parameter: 1422 . mapping - the mapping from local to global indexing 1423 1424 Output Parameters: 1425 + nproc - number of processors that are connected to this one 1426 . proc - neighboring processors 1427 . numproc - number of indices for each subdomain (processor) 1428 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 1429 1430 Level: advanced 1431 1432 Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed. 1433 1434 Fortran Usage: 1435 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1436 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1437 PetscInt indices[nproc][numprocmax],ierr) 1438 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 1439 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 1440 1441 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1442 `ISLocalToGlobalMappingRestoreInfo()` 1443 @*/ 1444 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) { 1445 PetscInt **bindices = NULL, *bnumprocs = NULL, bs, i, j, k; 1446 1447 PetscFunctionBegin; 1448 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1449 bs = mapping->bs; 1450 PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping, nproc, procs, &bnumprocs, &bindices)); 1451 if (bs > 1) { /* we need to expand the cached info */ 1452 PetscCall(PetscCalloc1(*nproc, &*indices)); 1453 PetscCall(PetscCalloc1(*nproc, &*numprocs)); 1454 for (i = 0; i < *nproc; i++) { 1455 PetscCall(PetscMalloc1(bs * bnumprocs[i], &(*indices)[i])); 1456 for (j = 0; j < bnumprocs[i]; j++) { 1457 for (k = 0; k < bs; k++) { (*indices)[i][j * bs + k] = bs * bindices[i][j] + k; } 1458 } 1459 (*numprocs)[i] = bnumprocs[i] * bs; 1460 } 1461 mapping->info_free = PETSC_TRUE; 1462 } else { 1463 *numprocs = bnumprocs; 1464 *indices = bindices; 1465 } 1466 PetscFunctionReturn(0); 1467 } 1468 1469 /*@C 1470 ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo() 1471 1472 Collective on ISLocalToGlobalMapping 1473 1474 Input Parameter: 1475 . mapping - the mapping from local to global indexing 1476 1477 Output Parameters: 1478 + nproc - number of processors that are connected to this one 1479 . proc - neighboring processors 1480 . numproc - number of indices for each processor 1481 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1482 1483 Level: advanced 1484 1485 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1486 `ISLocalToGlobalMappingGetInfo()` 1487 @*/ 1488 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) { 1489 PetscFunctionBegin; 1490 PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping, nproc, procs, numprocs, indices)); 1491 PetscFunctionReturn(0); 1492 } 1493 1494 /*@C 1495 ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node 1496 1497 Collective on ISLocalToGlobalMapping 1498 1499 Input Parameter: 1500 . mapping - the mapping from local to global indexing 1501 1502 Output Parameters: 1503 + nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize()) 1504 . count - number of neighboring processors per node 1505 - indices - indices of processes sharing the node (sorted) 1506 1507 Level: advanced 1508 1509 Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed. 1510 1511 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1512 `ISLocalToGlobalMappingGetInfo()`, `ISLocalToGlobalMappingRestoreNodeInfo()` 1513 @*/ 1514 PetscErrorCode ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[]) { 1515 PetscInt n; 1516 1517 PetscFunctionBegin; 1518 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1519 PetscCall(ISLocalToGlobalMappingGetSize(mapping, &n)); 1520 if (!mapping->info_nodec) { 1521 PetscInt i, m, n_neigh, *neigh, *n_shared, **shared; 1522 1523 PetscCall(PetscMalloc2(n + 1, &mapping->info_nodec, n, &mapping->info_nodei)); 1524 PetscCall(ISLocalToGlobalMappingGetInfo(mapping, &n_neigh, &neigh, &n_shared, &shared)); 1525 for (i = 0; i < n; i++) { mapping->info_nodec[i] = 1; } 1526 m = n; 1527 mapping->info_nodec[n] = 0; 1528 for (i = 1; i < n_neigh; i++) { 1529 PetscInt j; 1530 1531 m += n_shared[i]; 1532 for (j = 0; j < n_shared[i]; j++) mapping->info_nodec[shared[i][j]] += 1; 1533 } 1534 if (n) PetscCall(PetscMalloc1(m, &mapping->info_nodei[0])); 1535 for (i = 1; i < n; i++) mapping->info_nodei[i] = mapping->info_nodei[i - 1] + mapping->info_nodec[i - 1]; 1536 PetscCall(PetscArrayzero(mapping->info_nodec, n)); 1537 for (i = 0; i < n; i++) { 1538 mapping->info_nodec[i] = 1; 1539 mapping->info_nodei[i][0] = neigh[0]; 1540 } 1541 for (i = 1; i < n_neigh; i++) { 1542 PetscInt j; 1543 1544 for (j = 0; j < n_shared[i]; j++) { 1545 PetscInt k = shared[i][j]; 1546 1547 mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i]; 1548 mapping->info_nodec[k] += 1; 1549 } 1550 } 1551 for (i = 0; i < n; i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i], mapping->info_nodei[i])); 1552 PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping, &n_neigh, &neigh, &n_shared, &shared)); 1553 } 1554 if (nnodes) *nnodes = n; 1555 if (count) *count = mapping->info_nodec; 1556 if (indices) *indices = mapping->info_nodei; 1557 PetscFunctionReturn(0); 1558 } 1559 1560 /*@C 1561 ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo() 1562 1563 Collective on ISLocalToGlobalMapping 1564 1565 Input Parameter: 1566 . mapping - the mapping from local to global indexing 1567 1568 Output Parameters: 1569 + nnodes - number of local nodes 1570 . count - number of neighboring processors per node 1571 - indices - indices of processes sharing the node (sorted) 1572 1573 Level: advanced 1574 1575 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1576 `ISLocalToGlobalMappingGetInfo()` 1577 @*/ 1578 PetscErrorCode ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[]) { 1579 PetscFunctionBegin; 1580 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1581 if (nnodes) *nnodes = 0; 1582 if (count) *count = NULL; 1583 if (indices) *indices = NULL; 1584 PetscFunctionReturn(0); 1585 } 1586 1587 /*@C 1588 ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped 1589 1590 Not Collective 1591 1592 Input Parameter: 1593 . ltog - local to global mapping 1594 1595 Output Parameter: 1596 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize() 1597 1598 Level: advanced 1599 1600 Notes: 1601 ISLocalToGlobalMappingGetSize() returns the length the this array 1602 1603 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()` 1604 @*/ 1605 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) { 1606 PetscFunctionBegin; 1607 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1608 PetscValidPointer(array, 2); 1609 if (ltog->bs == 1) { 1610 *array = ltog->indices; 1611 } else { 1612 PetscInt *jj, k, i, j, n = ltog->n, bs = ltog->bs; 1613 const PetscInt *ii; 1614 1615 PetscCall(PetscMalloc1(bs * n, &jj)); 1616 *array = jj; 1617 k = 0; 1618 ii = ltog->indices; 1619 for (i = 0; i < n; i++) 1620 for (j = 0; j < bs; j++) jj[k++] = bs * ii[i] + j; 1621 } 1622 PetscFunctionReturn(0); 1623 } 1624 1625 /*@C 1626 ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices() 1627 1628 Not Collective 1629 1630 Input Parameters: 1631 + ltog - local to global mapping 1632 - array - array of indices 1633 1634 Level: advanced 1635 1636 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` 1637 @*/ 1638 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) { 1639 PetscFunctionBegin; 1640 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1641 PetscValidPointer(array, 2); 1642 PetscCheck(ltog->bs != 1 || *array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer"); 1643 1644 if (ltog->bs > 1) { PetscCall(PetscFree(*(void **)array)); } 1645 PetscFunctionReturn(0); 1646 } 1647 1648 /*@C 1649 ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block 1650 1651 Not Collective 1652 1653 Input Parameter: 1654 . ltog - local to global mapping 1655 1656 Output Parameter: 1657 . array - array of indices 1658 1659 Level: advanced 1660 1661 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()` 1662 @*/ 1663 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) { 1664 PetscFunctionBegin; 1665 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1666 PetscValidPointer(array, 2); 1667 *array = ltog->indices; 1668 PetscFunctionReturn(0); 1669 } 1670 1671 /*@C 1672 ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices() 1673 1674 Not Collective 1675 1676 Input Parameters: 1677 + ltog - local to global mapping 1678 - array - array of indices 1679 1680 Level: advanced 1681 1682 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` 1683 @*/ 1684 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) { 1685 PetscFunctionBegin; 1686 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1687 PetscValidPointer(array, 2); 1688 PetscCheck(*array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer"); 1689 *array = NULL; 1690 PetscFunctionReturn(0); 1691 } 1692 1693 /*@C 1694 ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings 1695 1696 Not Collective 1697 1698 Input Parameters: 1699 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate 1700 . n - number of mappings to concatenate 1701 - ltogs - local to global mappings 1702 1703 Output Parameter: 1704 . ltogcat - new mapping 1705 1706 Note: this currently always returns a mapping with block size of 1 1707 1708 Developer Note: If all the input mapping have the same block size we could easily handle that as a special case 1709 1710 Level: advanced 1711 1712 .seealso: `ISLocalToGlobalMappingCreate()` 1713 @*/ 1714 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm, PetscInt n, const ISLocalToGlobalMapping ltogs[], ISLocalToGlobalMapping *ltogcat) { 1715 PetscInt i, cnt, m, *idx; 1716 1717 PetscFunctionBegin; 1718 PetscCheck(n >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "Must have a non-negative number of mappings, given %" PetscInt_FMT, n); 1719 if (n > 0) PetscValidPointer(ltogs, 3); 1720 for (i = 0; i < n; i++) PetscValidHeaderSpecific(ltogs[i], IS_LTOGM_CLASSID, 3); 1721 PetscValidPointer(ltogcat, 4); 1722 for (cnt = 0, i = 0; i < n; i++) { 1723 PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m)); 1724 cnt += m; 1725 } 1726 PetscCall(PetscMalloc1(cnt, &idx)); 1727 for (cnt = 0, i = 0; i < n; i++) { 1728 const PetscInt *subidx; 1729 PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m)); 1730 PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i], &subidx)); 1731 PetscCall(PetscArraycpy(&idx[cnt], subidx, m)); 1732 PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i], &subidx)); 1733 cnt += m; 1734 } 1735 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, cnt, idx, PETSC_OWN_POINTER, ltogcat)); 1736 PetscFunctionReturn(0); 1737 } 1738 1739 /*MC 1740 ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1741 used this is good for only small and moderate size problems. 1742 1743 Options Database Keys: 1744 . -islocaltoglobalmapping_type basic - select this method 1745 1746 Level: beginner 1747 1748 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH` 1749 M*/ 1750 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog) { 1751 PetscFunctionBegin; 1752 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Basic; 1753 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Basic; 1754 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic; 1755 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Basic; 1756 PetscFunctionReturn(0); 1757 } 1758 1759 /*MC 1760 ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1761 used this is good for large memory problems. 1762 1763 Options Database Keys: 1764 . -islocaltoglobalmapping_type hash - select this method 1765 1766 Notes: 1767 This is selected automatically for large problems if the user does not set the type. 1768 1769 Level: beginner 1770 1771 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH` 1772 M*/ 1773 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog) { 1774 PetscFunctionBegin; 1775 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Hash; 1776 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Hash; 1777 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash; 1778 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Hash; 1779 PetscFunctionReturn(0); 1780 } 1781 1782 /*@C 1783 ISLocalToGlobalMappingRegister - Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping 1784 1785 Not Collective 1786 1787 Input Parameters: 1788 + sname - name of a new method 1789 - routine_create - routine to create method context 1790 1791 Notes: 1792 ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings. 1793 1794 Sample usage: 1795 .vb 1796 ISLocalToGlobalMappingRegister("my_mapper",MyCreate); 1797 .ve 1798 1799 Then, your mapping can be chosen with the procedural interface via 1800 $ ISLocalToGlobalMappingSetType(ltog,"my_mapper") 1801 or at runtime via the option 1802 $ -islocaltoglobalmapping_type my_mapper 1803 1804 Level: advanced 1805 1806 .seealso: `ISLocalToGlobalMappingRegisterAll()`, `ISLocalToGlobalMappingRegisterDestroy()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH` 1807 1808 @*/ 1809 PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[], PetscErrorCode (*function)(ISLocalToGlobalMapping)) { 1810 PetscFunctionBegin; 1811 PetscCall(ISInitializePackage()); 1812 PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList, sname, function)); 1813 PetscFunctionReturn(0); 1814 } 1815 1816 /*@C 1817 ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach. 1818 1819 Logically Collective on ISLocalToGlobalMapping 1820 1821 Input Parameters: 1822 + ltog - the ISLocalToGlobalMapping object 1823 - type - a known method 1824 1825 Options Database Key: 1826 . -islocaltoglobalmapping_type <method> - Sets the method; use -help for a list 1827 of available methods (for instance, basic or hash) 1828 1829 Notes: 1830 See "petsc/include/petscis.h" for available methods 1831 1832 Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and 1833 then set the ISLocalToGlobalMapping type from the options database rather than by using 1834 this routine. 1835 1836 Level: intermediate 1837 1838 Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they 1839 are accessed by ISLocalToGlobalMappingSetType(). 1840 1841 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingGetType()` 1842 @*/ 1843 PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type) { 1844 PetscBool match; 1845 PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL; 1846 1847 PetscFunctionBegin; 1848 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1849 if (type) PetscValidCharPointer(type, 2); 1850 1851 PetscCall(PetscObjectTypeCompare((PetscObject)ltog, type, &match)); 1852 if (match) PetscFunctionReturn(0); 1853 1854 /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */ 1855 if (type) { 1856 PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList, type, &r)); 1857 PetscCheck(r, PetscObjectComm((PetscObject)ltog), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unable to find requested ISLocalToGlobalMapping type %s", type); 1858 } 1859 /* Destroy the previous private LTOG context */ 1860 PetscTryTypeMethod(ltog, destroy); 1861 ltog->ops->destroy = NULL; 1862 1863 PetscCall(PetscObjectChangeTypeName((PetscObject)ltog, type)); 1864 if (r) PetscCall((*r)(ltog)); 1865 PetscFunctionReturn(0); 1866 } 1867 1868 /*@C 1869 ISLocalToGlobalMappingGetType - Get the type of the l2g map 1870 1871 Not Collective 1872 1873 Input Parameter: 1874 . ltog - the ISLocalToGlobalMapping object 1875 1876 Output Parameter: 1877 . type - the type 1878 1879 Level: intermediate 1880 1881 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()` 1882 @*/ 1883 PetscErrorCode ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type) { 1884 PetscFunctionBegin; 1885 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1886 PetscValidPointer(type, 2); 1887 *type = ((PetscObject)ltog)->type_name; 1888 PetscFunctionReturn(0); 1889 } 1890 1891 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE; 1892 1893 /*@C 1894 ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package. 1895 1896 Not Collective 1897 1898 Level: advanced 1899 1900 .seealso: `ISRegister()`, `ISLocalToGlobalRegister()` 1901 @*/ 1902 PetscErrorCode ISLocalToGlobalMappingRegisterAll(void) { 1903 PetscFunctionBegin; 1904 if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0); 1905 ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE; 1906 PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic)); 1907 PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash)); 1908 PetscFunctionReturn(0); 1909 } 1910