1 2 #include <petsc/private/isimpl.h> /*I "petscis.h" I*/ 3 #include <petsc/private/hashmapi.h> 4 #include <petscsf.h> 5 #include <petscviewer.h> 6 7 PetscClassId IS_LTOGM_CLASSID; 8 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***); 9 10 typedef struct { 11 PetscInt *globals; 12 } ISLocalToGlobalMapping_Basic; 13 14 typedef struct { 15 PetscHMapI globalht; 16 } ISLocalToGlobalMapping_Hash; 17 18 /*@C 19 ISGetPointRange - Returns a description of the points in an IS suitable for traversal 20 21 Not collective 22 23 Input Parameter: 24 . pointIS - The IS object 25 26 Output Parameters: 27 + pStart - The first index, see notes 28 . pEnd - One past the last index, see notes 29 - points - The indices, see notes 30 31 Notes: 32 If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern 33 $ ISGetPointRange(is, &pStart, &pEnd, &points); 34 $ for (p = pStart; p < pEnd; ++p) { 35 $ const PetscInt point = points ? points[p] : p; 36 $ } 37 $ ISRestorePointRange(is, &pstart, &pEnd, &points); 38 39 Level: intermediate 40 41 .seealso: ISRestorePointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride() 42 @*/ 43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) 44 { 45 PetscInt numCells, step = 1; 46 PetscBool isStride; 47 PetscErrorCode ierr; 48 49 PetscFunctionBeginHot; 50 *pStart = 0; 51 *points = NULL; 52 ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr); 53 ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr); 54 if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);} 55 *pEnd = *pStart + numCells; 56 if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);} 57 PetscFunctionReturn(0); 58 } 59 60 /*@C 61 ISRestorePointRange - Destroys the traversal description 62 63 Not collective 64 65 Input Parameters: 66 + pointIS - The IS object 67 . pStart - The first index, from ISGetPointRange() 68 . pEnd - One past the last index, from ISGetPointRange() 69 - points - The indices, from ISGetPointRange() 70 71 Notes: 72 If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern 73 $ ISGetPointRange(is, &pStart, &pEnd, &points); 74 $ for (p = pStart; p < pEnd; ++p) { 75 $ const PetscInt point = points ? points[p] : p; 76 $ } 77 $ ISRestorePointRange(is, &pstart, &pEnd, &points); 78 79 Level: intermediate 80 81 .seealso: ISGetPointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride() 82 @*/ 83 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) 84 { 85 PetscInt step = 1; 86 PetscBool isStride; 87 PetscErrorCode ierr; 88 89 PetscFunctionBeginHot; 90 ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr); 91 if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);} 92 if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);} 93 PetscFunctionReturn(0); 94 } 95 96 /*@C 97 ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given 98 99 Not collective 100 101 Input Parameters: 102 + subpointIS - The IS object to be configured 103 . pStar t - The first index of the subrange 104 . pEnd - One past the last index for the subrange 105 - points - The indices for the entire range, from ISGetPointRange() 106 107 Output Parameters: 108 . subpointIS - The IS object now configured to be a subrange 109 110 Notes: 111 The input IS will now respond properly to calls to ISGetPointRange() and return the subrange. 112 113 Level: intermediate 114 115 .seealso: ISGetPointRange(), ISRestorePointRange(), ISGetIndices(), ISCreateStride() 116 @*/ 117 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points) 118 { 119 PetscErrorCode ierr; 120 121 PetscFunctionBeginHot; 122 if (points) { 123 ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr); 124 ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr); 125 } else { 126 ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr); 127 ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr); 128 } 129 PetscFunctionReturn(0); 130 } 131 132 /* -----------------------------------------------------------------------------------------*/ 133 134 /* 135 Creates the global mapping information in the ISLocalToGlobalMapping structure 136 137 If the user has not selected how to handle the global to local mapping then use HASH for "large" problems 138 */ 139 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping) 140 { 141 PetscInt i,*idx = mapping->indices,n = mapping->n,end,start; 142 PetscErrorCode ierr; 143 144 PetscFunctionBegin; 145 if (mapping->data) PetscFunctionReturn(0); 146 end = 0; 147 start = PETSC_MAX_INT; 148 149 for (i=0; i<n; i++) { 150 if (idx[i] < 0) continue; 151 if (idx[i] < start) start = idx[i]; 152 if (idx[i] > end) end = idx[i]; 153 } 154 if (start > end) {start = 0; end = -1;} 155 mapping->globalstart = start; 156 mapping->globalend = end; 157 if (!((PetscObject)mapping)->type_name) { 158 if ((end - start) > PetscMax(4*n,1000000)) { 159 ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr); 160 } else { 161 ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr); 162 } 163 } 164 ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr); 165 PetscFunctionReturn(0); 166 } 167 168 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping) 169 { 170 PetscErrorCode ierr; 171 PetscInt i,*idx = mapping->indices,n = mapping->n,end,start,*globals; 172 ISLocalToGlobalMapping_Basic *map; 173 174 PetscFunctionBegin; 175 start = mapping->globalstart; 176 end = mapping->globalend; 177 ierr = PetscNew(&map);CHKERRQ(ierr); 178 ierr = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr); 179 map->globals = globals; 180 for (i=0; i<end-start+1; i++) globals[i] = -1; 181 for (i=0; i<n; i++) { 182 if (idx[i] < 0) continue; 183 globals[idx[i] - start] = i; 184 } 185 mapping->data = (void*)map; 186 ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr); 187 PetscFunctionReturn(0); 188 } 189 190 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping) 191 { 192 PetscErrorCode ierr; 193 PetscInt i,*idx = mapping->indices,n = mapping->n; 194 ISLocalToGlobalMapping_Hash *map; 195 196 PetscFunctionBegin; 197 ierr = PetscNew(&map);CHKERRQ(ierr); 198 ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr); 199 for (i=0; i<n; i++) { 200 if (idx[i] < 0) continue; 201 ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr); 202 } 203 mapping->data = (void*)map; 204 ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr); 205 PetscFunctionReturn(0); 206 } 207 208 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping) 209 { 210 PetscErrorCode ierr; 211 ISLocalToGlobalMapping_Basic *map = (ISLocalToGlobalMapping_Basic *)mapping->data; 212 213 PetscFunctionBegin; 214 if (!map) PetscFunctionReturn(0); 215 ierr = PetscFree(map->globals);CHKERRQ(ierr); 216 ierr = PetscFree(mapping->data);CHKERRQ(ierr); 217 PetscFunctionReturn(0); 218 } 219 220 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping) 221 { 222 PetscErrorCode ierr; 223 ISLocalToGlobalMapping_Hash *map = (ISLocalToGlobalMapping_Hash*)mapping->data; 224 225 PetscFunctionBegin; 226 if (!map) PetscFunctionReturn(0); 227 ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr); 228 ierr = PetscFree(mapping->data);CHKERRQ(ierr); 229 PetscFunctionReturn(0); 230 } 231 232 #define GTOLTYPE _Basic 233 #define GTOLNAME _Basic 234 #define GTOLBS mapping->bs 235 #define GTOL(g, local) do { \ 236 local = map->globals[g/bs - start]; \ 237 if (local >= 0) local = bs*local + (g % bs); \ 238 } while (0) 239 240 #include <../src/vec/is/utils/isltog.h> 241 242 #define GTOLTYPE _Basic 243 #define GTOLNAME Block_Basic 244 #define GTOLBS 1 245 #define GTOL(g, local) do { \ 246 local = map->globals[g - start]; \ 247 } while (0) 248 #include <../src/vec/is/utils/isltog.h> 249 250 #define GTOLTYPE _Hash 251 #define GTOLNAME _Hash 252 #define GTOLBS mapping->bs 253 #define GTOL(g, local) do { \ 254 (void)PetscHMapIGet(map->globalht,g/bs,&local); \ 255 if (local >= 0) local = bs*local + (g % bs); \ 256 } while (0) 257 #include <../src/vec/is/utils/isltog.h> 258 259 #define GTOLTYPE _Hash 260 #define GTOLNAME Block_Hash 261 #define GTOLBS 1 262 #define GTOL(g, local) do { \ 263 (void)PetscHMapIGet(map->globalht,g,&local); \ 264 } while (0) 265 #include <../src/vec/is/utils/isltog.h> 266 267 /*@ 268 ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object 269 270 Not Collective 271 272 Input Parameter: 273 . ltog - local to global mapping 274 275 Output Parameter: 276 . nltog - the duplicated local to global mapping 277 278 Level: advanced 279 280 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 281 @*/ 282 PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog) 283 { 284 PetscErrorCode ierr; 285 286 PetscFunctionBegin; 287 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 288 ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr); 289 PetscFunctionReturn(0); 290 } 291 292 /*@ 293 ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping 294 295 Not Collective 296 297 Input Parameter: 298 . ltog - local to global mapping 299 300 Output Parameter: 301 . n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length 302 303 Level: advanced 304 305 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 306 @*/ 307 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n) 308 { 309 PetscFunctionBegin; 310 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 311 PetscValidIntPointer(n,2); 312 *n = mapping->bs*mapping->n; 313 PetscFunctionReturn(0); 314 } 315 316 /*@C 317 ISLocalToGlobalMappingViewFromOptions - View from Options 318 319 Collective on ISLocalToGlobalMapping 320 321 Input Parameters: 322 + A - the local to global mapping object 323 . obj - Optional object 324 - name - command line option 325 326 Level: intermediate 327 .seealso: ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate() 328 @*/ 329 PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[]) 330 { 331 PetscErrorCode ierr; 332 333 PetscFunctionBegin; 334 PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1); 335 ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr); 336 PetscFunctionReturn(0); 337 } 338 339 /*@C 340 ISLocalToGlobalMappingView - View a local to global mapping 341 342 Not Collective 343 344 Input Parameters: 345 + ltog - local to global mapping 346 - viewer - viewer 347 348 Level: advanced 349 350 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 351 @*/ 352 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer) 353 { 354 PetscInt i; 355 PetscMPIInt rank; 356 PetscBool iascii; 357 PetscErrorCode ierr; 358 359 PetscFunctionBegin; 360 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 361 if (!viewer) { 362 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr); 363 } 364 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 365 366 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRMPI(ierr); 367 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 368 if (iascii) { 369 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr); 370 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 371 for (i=0; i<mapping->n; i++) { 372 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr); 373 } 374 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 375 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 376 } 377 PetscFunctionReturn(0); 378 } 379 380 /*@ 381 ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) 382 ordering and a global parallel ordering. 383 384 Not collective 385 386 Input Parameter: 387 . is - index set containing the global numbers for each local number 388 389 Output Parameter: 390 . mapping - new mapping data structure 391 392 Notes: 393 the block size of the IS determines the block size of the mapping 394 Level: advanced 395 396 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions() 397 @*/ 398 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping) 399 { 400 PetscErrorCode ierr; 401 PetscInt n,bs; 402 const PetscInt *indices; 403 MPI_Comm comm; 404 PetscBool isblock; 405 406 PetscFunctionBegin; 407 PetscValidHeaderSpecific(is,IS_CLASSID,1); 408 PetscValidPointer(mapping,2); 409 410 ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr); 411 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 412 ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr); 413 if (!isblock) { 414 ierr = ISGetIndices(is,&indices);CHKERRQ(ierr); 415 ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 416 ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr); 417 } else { 418 ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr); 419 ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr); 420 ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 421 ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr); 422 } 423 PetscFunctionReturn(0); 424 } 425 426 /*@C 427 ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) 428 ordering and a global parallel ordering. 429 430 Collective 431 432 Input Parameter: 433 + sf - star forest mapping contiguous local indices to (rank, offset) 434 - start - first global index on this process 435 436 Output Parameter: 437 . mapping - new mapping data structure 438 439 Level: advanced 440 441 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions() 442 @*/ 443 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping) 444 { 445 PetscErrorCode ierr; 446 PetscInt i,maxlocal,nroots,nleaves,*globals,*ltog; 447 const PetscInt *ilocal; 448 MPI_Comm comm; 449 450 PetscFunctionBegin; 451 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 452 PetscValidPointer(mapping,3); 453 454 ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr); 455 ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr); 456 if (ilocal) { 457 for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1); 458 } 459 else maxlocal = nleaves; 460 ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr); 461 ierr = PetscMalloc1(maxlocal,<og);CHKERRQ(ierr); 462 for (i=0; i<nroots; i++) globals[i] = start + i; 463 for (i=0; i<maxlocal; i++) ltog[i] = -1; 464 ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr); 465 ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr); 466 ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr); 467 ierr = PetscFree(globals);CHKERRQ(ierr); 468 PetscFunctionReturn(0); 469 } 470 471 /*@ 472 ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping 473 474 Not collective 475 476 Input Parameters: 477 + mapping - mapping data structure 478 - bs - the blocksize 479 480 Level: advanced 481 482 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 483 @*/ 484 PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs) 485 { 486 PetscInt *nid; 487 const PetscInt *oid; 488 PetscInt i,cn,on,obs,nn; 489 PetscErrorCode ierr; 490 491 PetscFunctionBegin; 492 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 493 if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs); 494 if (bs == mapping->bs) PetscFunctionReturn(0); 495 on = mapping->n; 496 obs = mapping->bs; 497 oid = mapping->indices; 498 nn = (on*obs)/bs; 499 if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on); 500 501 ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr); 502 ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr); 503 for (i=0;i<nn;i++) { 504 PetscInt j; 505 for (j=0,cn=0;j<bs-1;j++) { 506 if (oid[i*bs+j] < 0) { cn++; continue; } 507 if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]); 508 } 509 if (oid[i*bs+j] < 0) cn++; 510 if (cn) { 511 if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn); 512 nid[i] = -1; 513 } else { 514 nid[i] = oid[i*bs]/bs; 515 } 516 } 517 ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr); 518 519 mapping->n = nn; 520 mapping->bs = bs; 521 ierr = PetscFree(mapping->indices);CHKERRQ(ierr); 522 mapping->indices = nid; 523 mapping->globalstart = 0; 524 mapping->globalend = 0; 525 526 /* reset the cached information */ 527 ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr); 528 ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr); 529 if (mapping->info_indices) { 530 PetscInt i; 531 532 ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr); 533 for (i=1; i<mapping->info_nproc; i++) { 534 ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr); 535 } 536 ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr); 537 } 538 mapping->info_cached = PETSC_FALSE; 539 540 if (mapping->ops->destroy) { 541 ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr); 542 } 543 PetscFunctionReturn(0); 544 } 545 546 /*@ 547 ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping 548 ordering and a global parallel ordering. 549 550 Not Collective 551 552 Input Parameters: 553 . mapping - mapping data structure 554 555 Output Parameter: 556 . bs - the blocksize 557 558 Level: advanced 559 560 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 561 @*/ 562 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs) 563 { 564 PetscFunctionBegin; 565 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 566 *bs = mapping->bs; 567 PetscFunctionReturn(0); 568 } 569 570 /*@ 571 ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) 572 ordering and a global parallel ordering. 573 574 Not Collective, but communicator may have more than one process 575 576 Input Parameters: 577 + comm - MPI communicator 578 . bs - the block size 579 . n - the number of local elements divided by the block size, or equivalently the number of block indices 580 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs 581 - mode - see PetscCopyMode 582 583 Output Parameter: 584 . mapping - new mapping data structure 585 586 Notes: 587 There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1 588 589 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 590 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 591 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 592 593 Level: advanced 594 595 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH 596 ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType 597 @*/ 598 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping) 599 { 600 PetscErrorCode ierr; 601 PetscInt *in; 602 603 PetscFunctionBegin; 604 if (n) PetscValidIntPointer(indices,3); 605 PetscValidPointer(mapping,4); 606 607 *mapping = NULL; 608 ierr = ISInitializePackage();CHKERRQ(ierr); 609 610 ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS", 611 comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr); 612 (*mapping)->n = n; 613 (*mapping)->bs = bs; 614 (*mapping)->info_cached = PETSC_FALSE; 615 (*mapping)->info_free = PETSC_FALSE; 616 (*mapping)->info_procs = NULL; 617 (*mapping)->info_numprocs = NULL; 618 (*mapping)->info_indices = NULL; 619 (*mapping)->info_nodec = NULL; 620 (*mapping)->info_nodei = NULL; 621 622 (*mapping)->ops->globaltolocalmappingapply = NULL; 623 (*mapping)->ops->globaltolocalmappingapplyblock = NULL; 624 (*mapping)->ops->destroy = NULL; 625 if (mode == PETSC_COPY_VALUES) { 626 ierr = PetscMalloc1(n,&in);CHKERRQ(ierr); 627 ierr = PetscArraycpy(in,indices,n);CHKERRQ(ierr); 628 (*mapping)->indices = in; 629 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 630 } else if (mode == PETSC_OWN_POINTER) { 631 (*mapping)->indices = (PetscInt*)indices; 632 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 633 } 634 else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER"); 635 PetscFunctionReturn(0); 636 } 637 638 PetscFunctionList ISLocalToGlobalMappingList = NULL; 639 640 641 /*@ 642 ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database. 643 644 Not collective 645 646 Input Parameters: 647 . mapping - mapping data structure 648 649 Level: advanced 650 651 @*/ 652 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping) 653 { 654 PetscErrorCode ierr; 655 char type[256]; 656 ISLocalToGlobalMappingType defaulttype = "Not set"; 657 PetscBool flg; 658 659 PetscFunctionBegin; 660 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 661 ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr); 662 ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr); 663 ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr); 664 if (flg) { 665 ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr); 666 } 667 ierr = PetscOptionsEnd();CHKERRQ(ierr); 668 PetscFunctionReturn(0); 669 } 670 671 /*@ 672 ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) 673 ordering and a global parallel ordering. 674 675 Note Collective 676 677 Input Parameters: 678 . mapping - mapping data structure 679 680 Level: advanced 681 682 .seealso: ISLocalToGlobalMappingCreate() 683 @*/ 684 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) 685 { 686 PetscErrorCode ierr; 687 688 PetscFunctionBegin; 689 if (!*mapping) PetscFunctionReturn(0); 690 PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1); 691 if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);} 692 ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr); 693 ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr); 694 ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr); 695 if ((*mapping)->info_indices) { 696 PetscInt i; 697 698 ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr); 699 for (i=1; i<(*mapping)->info_nproc; i++) { 700 ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr); 701 } 702 ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr); 703 } 704 if ((*mapping)->info_nodei) { 705 ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr); 706 } 707 ierr = PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);CHKERRQ(ierr); 708 if ((*mapping)->ops->destroy) { 709 ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr); 710 } 711 ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr); 712 *mapping = NULL; 713 PetscFunctionReturn(0); 714 } 715 716 /*@ 717 ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering 718 a new index set using the global numbering defined in an ISLocalToGlobalMapping 719 context. 720 721 Collective on is 722 723 Input Parameters: 724 + mapping - mapping between local and global numbering 725 - is - index set in local numbering 726 727 Output Parameters: 728 . newis - index set in global numbering 729 730 Notes: 731 The output IS will have the same communicator of the input IS. 732 733 Level: advanced 734 735 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(), 736 ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply() 737 @*/ 738 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis) 739 { 740 PetscErrorCode ierr; 741 PetscInt n,*idxout; 742 const PetscInt *idxin; 743 744 PetscFunctionBegin; 745 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 746 PetscValidHeaderSpecific(is,IS_CLASSID,2); 747 PetscValidPointer(newis,3); 748 749 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 750 ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); 751 ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr); 752 ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr); 753 ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); 754 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr); 755 PetscFunctionReturn(0); 756 } 757 758 /*@ 759 ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering 760 and converts them to the global numbering. 761 762 Not collective 763 764 Input Parameters: 765 + mapping - the local to global mapping context 766 . N - number of integers 767 - in - input indices in local numbering 768 769 Output Parameter: 770 . out - indices in global numbering 771 772 Notes: 773 The in and out array parameters may be identical. 774 775 Level: advanced 776 777 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 778 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 779 AOPetscToApplication(), ISGlobalToLocalMappingApply() 780 781 @*/ 782 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 783 { 784 PetscInt i,bs,Nmax; 785 786 PetscFunctionBegin; 787 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 788 bs = mapping->bs; 789 Nmax = bs*mapping->n; 790 if (bs == 1) { 791 const PetscInt *idx = mapping->indices; 792 for (i=0; i<N; i++) { 793 if (in[i] < 0) { 794 out[i] = in[i]; 795 continue; 796 } 797 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 798 out[i] = idx[in[i]]; 799 } 800 } else { 801 const PetscInt *idx = mapping->indices; 802 for (i=0; i<N; i++) { 803 if (in[i] < 0) { 804 out[i] = in[i]; 805 continue; 806 } 807 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 808 out[i] = idx[in[i]/bs]*bs + (in[i] % bs); 809 } 810 } 811 PetscFunctionReturn(0); 812 } 813 814 /*@ 815 ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering 816 817 Not collective 818 819 Input Parameters: 820 + mapping - the local to global mapping context 821 . N - number of integers 822 - in - input indices in local block numbering 823 824 Output Parameter: 825 . out - indices in global block numbering 826 827 Notes: 828 The in and out array parameters may be identical. 829 830 Example: 831 If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0 832 (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3. 833 834 Level: advanced 835 836 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 837 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 838 AOPetscToApplication(), ISGlobalToLocalMappingApply() 839 840 @*/ 841 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 842 { 843 844 PetscFunctionBegin; 845 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 846 { 847 PetscInt i,Nmax = mapping->n; 848 const PetscInt *idx = mapping->indices; 849 850 for (i=0; i<N; i++) { 851 if (in[i] < 0) { 852 out[i] = in[i]; 853 continue; 854 } 855 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i); 856 out[i] = idx[in[i]]; 857 } 858 } 859 PetscFunctionReturn(0); 860 } 861 862 /*@ 863 ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers 864 specified with a global numbering. 865 866 Not collective 867 868 Input Parameters: 869 + mapping - mapping between local and global numbering 870 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 871 IS_GTOLM_DROP - drops the indices with no local value from the output list 872 . n - number of global indices to map 873 - idx - global indices to map 874 875 Output Parameters: 876 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 877 - idxout - local index of each global index, one must pass in an array long enough 878 to hold all the indices. You can call ISGlobalToLocalMappingApply() with 879 idxout == NULL to determine the required length (returned in nout) 880 and then allocate the required space and call ISGlobalToLocalMappingApply() 881 a second time to set the values. 882 883 Notes: 884 Either nout or idxout may be NULL. idx and idxout may be identical. 885 886 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 887 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 888 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 889 890 Level: advanced 891 892 Developer Note: The manual page states that idx and idxout may be identical but the calling 893 sequence declares idx as const so it cannot be the same as idxout. 894 895 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(), 896 ISLocalToGlobalMappingDestroy() 897 @*/ 898 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[]) 899 { 900 PetscErrorCode ierr; 901 902 PetscFunctionBegin; 903 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 904 if (!mapping->data) { 905 ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr); 906 } 907 ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr); 908 PetscFunctionReturn(0); 909 } 910 911 /*@ 912 ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering 913 a new index set using the local numbering defined in an ISLocalToGlobalMapping 914 context. 915 916 Not collective 917 918 Input Parameters: 919 + mapping - mapping between local and global numbering 920 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 921 IS_GTOLM_DROP - drops the indices with no local value from the output list 922 - is - index set in global numbering 923 924 Output Parameters: 925 . newis - index set in local numbering 926 927 Notes: 928 The output IS will be sequential, as it encodes a purely local operation 929 930 Level: advanced 931 932 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(), 933 ISLocalToGlobalMappingDestroy() 934 @*/ 935 PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis) 936 { 937 PetscErrorCode ierr; 938 PetscInt n,nout,*idxout; 939 const PetscInt *idxin; 940 941 PetscFunctionBegin; 942 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 943 PetscValidHeaderSpecific(is,IS_CLASSID,3); 944 PetscValidPointer(newis,4); 945 946 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 947 ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); 948 if (type == IS_GTOLM_MASK) { 949 ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr); 950 } else { 951 ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr); 952 ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr); 953 } 954 ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr); 955 ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); 956 ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr); 957 PetscFunctionReturn(0); 958 } 959 960 /*@ 961 ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers 962 specified with a block global numbering. 963 964 Not collective 965 966 Input Parameters: 967 + mapping - mapping between local and global numbering 968 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 969 IS_GTOLM_DROP - drops the indices with no local value from the output list 970 . n - number of global indices to map 971 - idx - global indices to map 972 973 Output Parameters: 974 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 975 - idxout - local index of each global index, one must pass in an array long enough 976 to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with 977 idxout == NULL to determine the required length (returned in nout) 978 and then allocate the required space and call ISGlobalToLocalMappingApplyBlock() 979 a second time to set the values. 980 981 Notes: 982 Either nout or idxout may be NULL. idx and idxout may be identical. 983 984 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 985 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 986 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 987 988 Level: advanced 989 990 Developer Note: The manual page states that idx and idxout may be identical but the calling 991 sequence declares idx as const so it cannot be the same as idxout. 992 993 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(), 994 ISLocalToGlobalMappingDestroy() 995 @*/ 996 PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type, 997 PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[]) 998 { 999 PetscErrorCode ierr; 1000 1001 PetscFunctionBegin; 1002 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1003 if (!mapping->data) { 1004 ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr); 1005 } 1006 ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr); 1007 PetscFunctionReturn(0); 1008 } 1009 1010 1011 /*@C 1012 ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and 1013 each index shared by more than one processor 1014 1015 Collective on ISLocalToGlobalMapping 1016 1017 Input Parameters: 1018 . mapping - the mapping from local to global indexing 1019 1020 Output Parameter: 1021 + nproc - number of processors that are connected to this one 1022 . proc - neighboring processors 1023 . numproc - number of indices for each subdomain (processor) 1024 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 1025 1026 Level: advanced 1027 1028 Fortran Usage: 1029 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1030 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1031 PetscInt indices[nproc][numprocmax],ierr) 1032 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 1033 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 1034 1035 1036 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1037 ISLocalToGlobalMappingRestoreInfo() 1038 @*/ 1039 PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1040 { 1041 PetscErrorCode ierr; 1042 1043 PetscFunctionBegin; 1044 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1045 if (mapping->info_cached) { 1046 *nproc = mapping->info_nproc; 1047 *procs = mapping->info_procs; 1048 *numprocs = mapping->info_numprocs; 1049 *indices = mapping->info_indices; 1050 } else { 1051 ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr); 1052 } 1053 PetscFunctionReturn(0); 1054 } 1055 1056 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1057 { 1058 PetscErrorCode ierr; 1059 PetscMPIInt size,rank,tag1,tag2,tag3,*len,*source,imdex; 1060 PetscInt i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices; 1061 PetscInt *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc; 1062 PetscInt cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned; 1063 PetscInt node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp; 1064 PetscInt first_procs,first_numprocs,*first_indices; 1065 MPI_Request *recv_waits,*send_waits; 1066 MPI_Status recv_status,*send_status,*recv_statuses; 1067 MPI_Comm comm; 1068 PetscBool debug = PETSC_FALSE; 1069 1070 PetscFunctionBegin; 1071 ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr); 1072 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 1073 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 1074 if (size == 1) { 1075 *nproc = 0; 1076 *procs = NULL; 1077 ierr = PetscNew(numprocs);CHKERRQ(ierr); 1078 (*numprocs)[0] = 0; 1079 ierr = PetscNew(indices);CHKERRQ(ierr); 1080 (*indices)[0] = NULL; 1081 /* save info for reuse */ 1082 mapping->info_nproc = *nproc; 1083 mapping->info_procs = *procs; 1084 mapping->info_numprocs = *numprocs; 1085 mapping->info_indices = *indices; 1086 mapping->info_cached = PETSC_TRUE; 1087 PetscFunctionReturn(0); 1088 } 1089 1090 ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr); 1091 1092 /* 1093 Notes on ISLocalToGlobalMappingGetBlockInfo 1094 1095 globally owned node - the nodes that have been assigned to this processor in global 1096 numbering, just for this routine. 1097 1098 nontrivial globally owned node - node assigned to this processor that is on a subdomain 1099 boundary (i.e. is has more than one local owner) 1100 1101 locally owned node - node that exists on this processors subdomain 1102 1103 nontrivial locally owned node - node that is not in the interior (i.e. has more than one 1104 local subdomain 1105 */ 1106 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr); 1107 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr); 1108 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr); 1109 1110 for (i=0; i<n; i++) { 1111 if (lindices[i] > max) max = lindices[i]; 1112 } 1113 ierr = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr); 1114 Ng++; 1115 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 1116 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 1117 scale = Ng/size + 1; 1118 ng = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng); 1119 rstart = scale*rank; 1120 1121 /* determine ownership ranges of global indices */ 1122 ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr); 1123 ierr = PetscArrayzero(nprocs,2*size);CHKERRQ(ierr); 1124 1125 /* determine owners of each local node */ 1126 ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr); 1127 for (i=0; i<n; i++) { 1128 proc = lindices[i]/scale; /* processor that globally owns this index */ 1129 nprocs[2*proc+1] = 1; /* processor globally owns at least one of ours */ 1130 owner[i] = proc; 1131 nprocs[2*proc]++; /* count of how many that processor globally owns of ours */ 1132 } 1133 nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1]; 1134 ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr); 1135 1136 /* inform other processors of number of messages and max length*/ 1137 ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 1138 ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr); 1139 1140 /* post receives for owned rows */ 1141 ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr); 1142 ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr); 1143 for (i=0; i<nrecvs; i++) { 1144 ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRMPI(ierr); 1145 } 1146 1147 /* pack messages containing lists of local nodes to owners */ 1148 ierr = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr); 1149 ierr = PetscMalloc1(size+1,&starts);CHKERRQ(ierr); 1150 starts[0] = 0; 1151 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 1152 for (i=0; i<n; i++) { 1153 sends[starts[owner[i]]++] = lindices[i]; 1154 sends[starts[owner[i]]++] = i; 1155 } 1156 ierr = PetscFree(owner);CHKERRQ(ierr); 1157 starts[0] = 0; 1158 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 1159 1160 /* send the messages */ 1161 ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr); 1162 ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr); 1163 cnt = 0; 1164 for (i=0; i<size; i++) { 1165 if (nprocs[2*i]) { 1166 ierr = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRMPI(ierr); 1167 dest[cnt] = i; 1168 cnt++; 1169 } 1170 } 1171 ierr = PetscFree(starts);CHKERRQ(ierr); 1172 1173 /* wait on receives */ 1174 ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr); 1175 ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr); 1176 cnt = nrecvs; 1177 ierr = PetscCalloc1(ng+1,&nownedsenders);CHKERRQ(ierr); 1178 while (cnt) { 1179 ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRMPI(ierr); 1180 /* unpack receives into our local space */ 1181 ierr = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRMPI(ierr); 1182 source[imdex] = recv_status.MPI_SOURCE; 1183 len[imdex] = len[imdex]/2; 1184 /* count how many local owners for each of my global owned indices */ 1185 for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++; 1186 cnt--; 1187 } 1188 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1189 1190 /* count how many globally owned indices are on an edge multiplied by how many processors own them. */ 1191 nowned = 0; 1192 nownedm = 0; 1193 for (i=0; i<ng; i++) { 1194 if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;} 1195 } 1196 1197 /* create single array to contain rank of all local owners of each globally owned index */ 1198 ierr = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr); 1199 ierr = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr); 1200 starts[0] = 0; 1201 for (i=1; i<ng; i++) { 1202 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1203 else starts[i] = starts[i-1]; 1204 } 1205 1206 /* for each nontrival globally owned node list all arriving processors */ 1207 for (i=0; i<nrecvs; i++) { 1208 for (j=0; j<len[i]; j++) { 1209 node = recvs[2*i*nmax+2*j]-rstart; 1210 if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i]; 1211 } 1212 } 1213 1214 if (debug) { /* ----------------------------------- */ 1215 starts[0] = 0; 1216 for (i=1; i<ng; i++) { 1217 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1218 else starts[i] = starts[i-1]; 1219 } 1220 for (i=0; i<ng; i++) { 1221 if (nownedsenders[i] > 1) { 1222 ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr); 1223 for (j=0; j<nownedsenders[i]; j++) { 1224 ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr); 1225 } 1226 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1227 } 1228 } 1229 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1230 } /* ----------------------------------- */ 1231 1232 /* wait on original sends */ 1233 if (nsends) { 1234 ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr); 1235 ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRMPI(ierr); 1236 ierr = PetscFree(send_status);CHKERRQ(ierr); 1237 } 1238 ierr = PetscFree(send_waits);CHKERRQ(ierr); 1239 ierr = PetscFree(sends);CHKERRQ(ierr); 1240 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1241 1242 /* pack messages to send back to local owners */ 1243 starts[0] = 0; 1244 for (i=1; i<ng; i++) { 1245 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1246 else starts[i] = starts[i-1]; 1247 } 1248 nsends2 = nrecvs; 1249 ierr = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */ 1250 for (i=0; i<nrecvs; i++) { 1251 nprocs[i] = 1; 1252 for (j=0; j<len[i]; j++) { 1253 node = recvs[2*i*nmax+2*j]-rstart; 1254 if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node]; 1255 } 1256 } 1257 nt = 0; 1258 for (i=0; i<nsends2; i++) nt += nprocs[i]; 1259 1260 ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr); 1261 ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr); 1262 1263 starts2[0] = 0; 1264 for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1]; 1265 /* 1266 Each message is 1 + nprocs[i] long, and consists of 1267 (0) the number of nodes being sent back 1268 (1) the local node number, 1269 (2) the number of processors sharing it, 1270 (3) the processors sharing it 1271 */ 1272 for (i=0; i<nsends2; i++) { 1273 cnt = 1; 1274 sends2[starts2[i]] = 0; 1275 for (j=0; j<len[i]; j++) { 1276 node = recvs[2*i*nmax+2*j]-rstart; 1277 if (nownedsenders[node] > 1) { 1278 sends2[starts2[i]]++; 1279 sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1]; 1280 sends2[starts2[i]+cnt++] = nownedsenders[node]; 1281 ierr = PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);CHKERRQ(ierr); 1282 cnt += nownedsenders[node]; 1283 } 1284 } 1285 } 1286 1287 /* receive the message lengths */ 1288 nrecvs2 = nsends; 1289 ierr = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr); 1290 ierr = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr); 1291 ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr); 1292 for (i=0; i<nrecvs2; i++) { 1293 ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRMPI(ierr); 1294 } 1295 1296 /* send the message lengths */ 1297 for (i=0; i<nsends2; i++) { 1298 ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRMPI(ierr); 1299 } 1300 1301 /* wait on receives of lens */ 1302 if (nrecvs2) { 1303 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 1304 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr); 1305 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 1306 } 1307 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1308 1309 starts3[0] = 0; 1310 nt = 0; 1311 for (i=0; i<nrecvs2-1; i++) { 1312 starts3[i+1] = starts3[i] + lens2[i]; 1313 nt += lens2[i]; 1314 } 1315 if (nrecvs2) nt += lens2[nrecvs2-1]; 1316 1317 ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr); 1318 ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr); 1319 for (i=0; i<nrecvs2; i++) { 1320 ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRMPI(ierr); 1321 } 1322 1323 /* send the messages */ 1324 ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr); 1325 for (i=0; i<nsends2; i++) { 1326 ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRMPI(ierr); 1327 } 1328 1329 /* wait on receives */ 1330 if (nrecvs2) { 1331 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 1332 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr); 1333 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 1334 } 1335 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1336 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1337 1338 if (debug) { /* ----------------------------------- */ 1339 cnt = 0; 1340 for (i=0; i<nrecvs2; i++) { 1341 nt = recvs2[cnt++]; 1342 for (j=0; j<nt; j++) { 1343 ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr); 1344 for (k=0; k<recvs2[cnt+1]; k++) { 1345 ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr); 1346 } 1347 cnt += 2 + recvs2[cnt+1]; 1348 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1349 } 1350 } 1351 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1352 } /* ----------------------------------- */ 1353 1354 /* count number subdomains for each local node */ 1355 ierr = PetscCalloc1(size,&nprocs);CHKERRQ(ierr); 1356 cnt = 0; 1357 for (i=0; i<nrecvs2; i++) { 1358 nt = recvs2[cnt++]; 1359 for (j=0; j<nt; j++) { 1360 for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++; 1361 cnt += 2 + recvs2[cnt+1]; 1362 } 1363 } 1364 nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0); 1365 *nproc = nt; 1366 ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr); 1367 ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr); 1368 ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr); 1369 for (i=0;i<nt+1;i++) (*indices)[i]=NULL; 1370 ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr); 1371 cnt = 0; 1372 for (i=0; i<size; i++) { 1373 if (nprocs[i] > 0) { 1374 bprocs[i] = cnt; 1375 (*procs)[cnt] = i; 1376 (*numprocs)[cnt] = nprocs[i]; 1377 ierr = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr); 1378 cnt++; 1379 } 1380 } 1381 1382 /* make the list of subdomains for each nontrivial local node */ 1383 ierr = PetscArrayzero(*numprocs,nt);CHKERRQ(ierr); 1384 cnt = 0; 1385 for (i=0; i<nrecvs2; i++) { 1386 nt = recvs2[cnt++]; 1387 for (j=0; j<nt; j++) { 1388 for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt]; 1389 cnt += 2 + recvs2[cnt+1]; 1390 } 1391 } 1392 ierr = PetscFree(bprocs);CHKERRQ(ierr); 1393 ierr = PetscFree(recvs2);CHKERRQ(ierr); 1394 1395 /* sort the node indexing by their global numbers */ 1396 nt = *nproc; 1397 for (i=0; i<nt; i++) { 1398 ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr); 1399 for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]]; 1400 ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr); 1401 ierr = PetscFree(tmp);CHKERRQ(ierr); 1402 } 1403 1404 if (debug) { /* ----------------------------------- */ 1405 nt = *nproc; 1406 for (i=0; i<nt; i++) { 1407 ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr); 1408 for (j=0; j<(*numprocs)[i]; j++) { 1409 ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr); 1410 } 1411 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1412 } 1413 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1414 } /* ----------------------------------- */ 1415 1416 /* wait on sends */ 1417 if (nsends2) { 1418 ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr); 1419 ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRMPI(ierr); 1420 ierr = PetscFree(send_status);CHKERRQ(ierr); 1421 } 1422 1423 ierr = PetscFree(starts3);CHKERRQ(ierr); 1424 ierr = PetscFree(dest);CHKERRQ(ierr); 1425 ierr = PetscFree(send_waits);CHKERRQ(ierr); 1426 1427 ierr = PetscFree(nownedsenders);CHKERRQ(ierr); 1428 ierr = PetscFree(ownedsenders);CHKERRQ(ierr); 1429 ierr = PetscFree(starts);CHKERRQ(ierr); 1430 ierr = PetscFree(starts2);CHKERRQ(ierr); 1431 ierr = PetscFree(lens2);CHKERRQ(ierr); 1432 1433 ierr = PetscFree(source);CHKERRQ(ierr); 1434 ierr = PetscFree(len);CHKERRQ(ierr); 1435 ierr = PetscFree(recvs);CHKERRQ(ierr); 1436 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1437 ierr = PetscFree(sends2);CHKERRQ(ierr); 1438 1439 /* put the information about myself as the first entry in the list */ 1440 first_procs = (*procs)[0]; 1441 first_numprocs = (*numprocs)[0]; 1442 first_indices = (*indices)[0]; 1443 for (i=0; i<*nproc; i++) { 1444 if ((*procs)[i] == rank) { 1445 (*procs)[0] = (*procs)[i]; 1446 (*numprocs)[0] = (*numprocs)[i]; 1447 (*indices)[0] = (*indices)[i]; 1448 (*procs)[i] = first_procs; 1449 (*numprocs)[i] = first_numprocs; 1450 (*indices)[i] = first_indices; 1451 break; 1452 } 1453 } 1454 1455 /* save info for reuse */ 1456 mapping->info_nproc = *nproc; 1457 mapping->info_procs = *procs; 1458 mapping->info_numprocs = *numprocs; 1459 mapping->info_indices = *indices; 1460 mapping->info_cached = PETSC_TRUE; 1461 PetscFunctionReturn(0); 1462 } 1463 1464 /*@C 1465 ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo() 1466 1467 Collective on ISLocalToGlobalMapping 1468 1469 Input Parameters: 1470 . mapping - the mapping from local to global indexing 1471 1472 Output Parameter: 1473 + nproc - number of processors that are connected to this one 1474 . proc - neighboring processors 1475 . numproc - number of indices for each processor 1476 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1477 1478 Level: advanced 1479 1480 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1481 ISLocalToGlobalMappingGetInfo() 1482 @*/ 1483 PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1484 { 1485 PetscErrorCode ierr; 1486 1487 PetscFunctionBegin; 1488 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1489 if (mapping->info_free) { 1490 ierr = PetscFree(*numprocs);CHKERRQ(ierr); 1491 if (*indices) { 1492 PetscInt i; 1493 1494 ierr = PetscFree((*indices)[0]);CHKERRQ(ierr); 1495 for (i=1; i<*nproc; i++) { 1496 ierr = PetscFree((*indices)[i]);CHKERRQ(ierr); 1497 } 1498 ierr = PetscFree(*indices);CHKERRQ(ierr); 1499 } 1500 } 1501 *nproc = 0; 1502 *procs = NULL; 1503 *numprocs = NULL; 1504 *indices = NULL; 1505 PetscFunctionReturn(0); 1506 } 1507 1508 /*@C 1509 ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and 1510 each index shared by more than one processor 1511 1512 Collective on ISLocalToGlobalMapping 1513 1514 Input Parameters: 1515 . mapping - the mapping from local to global indexing 1516 1517 Output Parameter: 1518 + nproc - number of processors that are connected to this one 1519 . proc - neighboring processors 1520 . numproc - number of indices for each subdomain (processor) 1521 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 1522 1523 Level: advanced 1524 1525 Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed. 1526 1527 Fortran Usage: 1528 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1529 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1530 PetscInt indices[nproc][numprocmax],ierr) 1531 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 1532 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 1533 1534 1535 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1536 ISLocalToGlobalMappingRestoreInfo() 1537 @*/ 1538 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1539 { 1540 PetscErrorCode ierr; 1541 PetscInt **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k; 1542 1543 PetscFunctionBegin; 1544 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1545 ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr); 1546 if (bs > 1) { /* we need to expand the cached info */ 1547 ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr); 1548 ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr); 1549 for (i=0; i<*nproc; i++) { 1550 ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr); 1551 for (j=0; j<bnumprocs[i]; j++) { 1552 for (k=0; k<bs; k++) { 1553 (*indices)[i][j*bs+k] = bs*bindices[i][j] + k; 1554 } 1555 } 1556 (*numprocs)[i] = bnumprocs[i]*bs; 1557 } 1558 mapping->info_free = PETSC_TRUE; 1559 } else { 1560 *numprocs = bnumprocs; 1561 *indices = bindices; 1562 } 1563 PetscFunctionReturn(0); 1564 } 1565 1566 /*@C 1567 ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo() 1568 1569 Collective on ISLocalToGlobalMapping 1570 1571 Input Parameters: 1572 . mapping - the mapping from local to global indexing 1573 1574 Output Parameter: 1575 + nproc - number of processors that are connected to this one 1576 . proc - neighboring processors 1577 . numproc - number of indices for each processor 1578 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1579 1580 Level: advanced 1581 1582 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1583 ISLocalToGlobalMappingGetInfo() 1584 @*/ 1585 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1586 { 1587 PetscErrorCode ierr; 1588 1589 PetscFunctionBegin; 1590 ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr); 1591 PetscFunctionReturn(0); 1592 } 1593 1594 /*@C 1595 ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node 1596 1597 Collective on ISLocalToGlobalMapping 1598 1599 Input Parameters: 1600 . mapping - the mapping from local to global indexing 1601 1602 Output Parameter: 1603 + nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize()) 1604 . count - number of neighboring processors per node 1605 - indices - indices of processes sharing the node (sorted) 1606 1607 Level: advanced 1608 1609 Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed. 1610 1611 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1612 ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo() 1613 @*/ 1614 PetscErrorCode ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[]) 1615 { 1616 PetscInt n; 1617 PetscErrorCode ierr; 1618 1619 PetscFunctionBegin; 1620 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1621 ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr); 1622 if (!mapping->info_nodec) { 1623 PetscInt i,m,n_neigh,*neigh,*n_shared,**shared; 1624 1625 ierr = PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);CHKERRQ(ierr); 1626 ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr); 1627 for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;} 1628 m = n; 1629 mapping->info_nodec[n] = 0; 1630 for (i=1;i<n_neigh;i++) { 1631 PetscInt j; 1632 1633 m += n_shared[i]; 1634 for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1; 1635 } 1636 if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); } 1637 for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1]; 1638 ierr = PetscArrayzero(mapping->info_nodec,n);CHKERRQ(ierr); 1639 for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; } 1640 for (i=1;i<n_neigh;i++) { 1641 PetscInt j; 1642 1643 for (j=0;j<n_shared[i];j++) { 1644 PetscInt k = shared[i][j]; 1645 1646 mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i]; 1647 mapping->info_nodec[k] += 1; 1648 } 1649 } 1650 for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); } 1651 ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr); 1652 } 1653 if (nnodes) *nnodes = n; 1654 if (count) *count = mapping->info_nodec; 1655 if (indices) *indices = mapping->info_nodei; 1656 PetscFunctionReturn(0); 1657 } 1658 1659 /*@C 1660 ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo() 1661 1662 Collective on ISLocalToGlobalMapping 1663 1664 Input Parameters: 1665 . mapping - the mapping from local to global indexing 1666 1667 Output Parameter: 1668 + nnodes - number of local nodes 1669 . count - number of neighboring processors per node 1670 - indices - indices of processes sharing the node (sorted) 1671 1672 Level: advanced 1673 1674 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1675 ISLocalToGlobalMappingGetInfo() 1676 @*/ 1677 PetscErrorCode ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[]) 1678 { 1679 PetscFunctionBegin; 1680 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1681 if (nnodes) *nnodes = 0; 1682 if (count) *count = NULL; 1683 if (indices) *indices = NULL; 1684 PetscFunctionReturn(0); 1685 } 1686 1687 /*@C 1688 ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped 1689 1690 Not Collective 1691 1692 Input Arguments: 1693 . ltog - local to global mapping 1694 1695 Output Arguments: 1696 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize() 1697 1698 Level: advanced 1699 1700 Notes: 1701 ISLocalToGlobalMappingGetSize() returns the length the this array 1702 1703 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices() 1704 @*/ 1705 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1706 { 1707 PetscFunctionBegin; 1708 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1709 PetscValidPointer(array,2); 1710 if (ltog->bs == 1) { 1711 *array = ltog->indices; 1712 } else { 1713 PetscInt *jj,k,i,j,n = ltog->n, bs = ltog->bs; 1714 const PetscInt *ii; 1715 PetscErrorCode ierr; 1716 1717 ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr); 1718 *array = jj; 1719 k = 0; 1720 ii = ltog->indices; 1721 for (i=0; i<n; i++) 1722 for (j=0; j<bs; j++) 1723 jj[k++] = bs*ii[i] + j; 1724 } 1725 PetscFunctionReturn(0); 1726 } 1727 1728 /*@C 1729 ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices() 1730 1731 Not Collective 1732 1733 Input Arguments: 1734 + ltog - local to global mapping 1735 - array - array of indices 1736 1737 Level: advanced 1738 1739 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1740 @*/ 1741 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1742 { 1743 PetscFunctionBegin; 1744 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1745 PetscValidPointer(array,2); 1746 if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1747 1748 if (ltog->bs > 1) { 1749 PetscErrorCode ierr; 1750 ierr = PetscFree(*(void**)array);CHKERRQ(ierr); 1751 } 1752 PetscFunctionReturn(0); 1753 } 1754 1755 /*@C 1756 ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block 1757 1758 Not Collective 1759 1760 Input Arguments: 1761 . ltog - local to global mapping 1762 1763 Output Arguments: 1764 . array - array of indices 1765 1766 Level: advanced 1767 1768 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices() 1769 @*/ 1770 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1771 { 1772 PetscFunctionBegin; 1773 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1774 PetscValidPointer(array,2); 1775 *array = ltog->indices; 1776 PetscFunctionReturn(0); 1777 } 1778 1779 /*@C 1780 ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices() 1781 1782 Not Collective 1783 1784 Input Arguments: 1785 + ltog - local to global mapping 1786 - array - array of indices 1787 1788 Level: advanced 1789 1790 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1791 @*/ 1792 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1793 { 1794 PetscFunctionBegin; 1795 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1796 PetscValidPointer(array,2); 1797 if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1798 *array = NULL; 1799 PetscFunctionReturn(0); 1800 } 1801 1802 /*@C 1803 ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings 1804 1805 Not Collective 1806 1807 Input Arguments: 1808 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate 1809 . n - number of mappings to concatenate 1810 - ltogs - local to global mappings 1811 1812 Output Arguments: 1813 . ltogcat - new mapping 1814 1815 Note: this currently always returns a mapping with block size of 1 1816 1817 Developer Note: If all the input mapping have the same block size we could easily handle that as a special case 1818 1819 Level: advanced 1820 1821 .seealso: ISLocalToGlobalMappingCreate() 1822 @*/ 1823 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat) 1824 { 1825 PetscInt i,cnt,m,*idx; 1826 PetscErrorCode ierr; 1827 1828 PetscFunctionBegin; 1829 if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n); 1830 if (n > 0) PetscValidPointer(ltogs,3); 1831 for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3); 1832 PetscValidPointer(ltogcat,4); 1833 for (cnt=0,i=0; i<n; i++) { 1834 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1835 cnt += m; 1836 } 1837 ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr); 1838 for (cnt=0,i=0; i<n; i++) { 1839 const PetscInt *subidx; 1840 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1841 ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1842 ierr = PetscArraycpy(&idx[cnt],subidx,m);CHKERRQ(ierr); 1843 ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1844 cnt += m; 1845 } 1846 ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr); 1847 PetscFunctionReturn(0); 1848 } 1849 1850 /*MC 1851 ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1852 used this is good for only small and moderate size problems. 1853 1854 Options Database Keys: 1855 . -islocaltoglobalmapping_type basic - select this method 1856 1857 Level: beginner 1858 1859 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH 1860 M*/ 1861 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog) 1862 { 1863 PetscFunctionBegin; 1864 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Basic; 1865 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Basic; 1866 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic; 1867 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Basic; 1868 PetscFunctionReturn(0); 1869 } 1870 1871 /*MC 1872 ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1873 used this is good for large memory problems. 1874 1875 Options Database Keys: 1876 . -islocaltoglobalmapping_type hash - select this method 1877 1878 1879 Notes: 1880 This is selected automatically for large problems if the user does not set the type. 1881 1882 Level: beginner 1883 1884 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH 1885 M*/ 1886 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog) 1887 { 1888 PetscFunctionBegin; 1889 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Hash; 1890 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Hash; 1891 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash; 1892 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Hash; 1893 PetscFunctionReturn(0); 1894 } 1895 1896 1897 /*@C 1898 ISLocalToGlobalMappingRegister - Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping 1899 1900 Not Collective 1901 1902 Input Parameters: 1903 + sname - name of a new method 1904 - routine_create - routine to create method context 1905 1906 Notes: 1907 ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings. 1908 1909 Sample usage: 1910 .vb 1911 ISLocalToGlobalMappingRegister("my_mapper",MyCreate); 1912 .ve 1913 1914 Then, your mapping can be chosen with the procedural interface via 1915 $ ISLocalToGlobalMappingSetType(ltog,"my_mapper") 1916 or at runtime via the option 1917 $ -islocaltoglobalmapping_type my_mapper 1918 1919 Level: advanced 1920 1921 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH 1922 1923 @*/ 1924 PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping)) 1925 { 1926 PetscErrorCode ierr; 1927 1928 PetscFunctionBegin; 1929 ierr = ISInitializePackage();CHKERRQ(ierr); 1930 ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr); 1931 PetscFunctionReturn(0); 1932 } 1933 1934 /*@C 1935 ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach. 1936 1937 Logically Collective on ISLocalToGlobalMapping 1938 1939 Input Parameters: 1940 + ltog - the ISLocalToGlobalMapping object 1941 - type - a known method 1942 1943 Options Database Key: 1944 . -islocaltoglobalmapping_type <method> - Sets the method; use -help for a list 1945 of available methods (for instance, basic or hash) 1946 1947 Notes: 1948 See "petsc/include/petscis.h" for available methods 1949 1950 Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and 1951 then set the ISLocalToGlobalMapping type from the options database rather than by using 1952 this routine. 1953 1954 Level: intermediate 1955 1956 Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they 1957 are accessed by ISLocalToGlobalMappingSetType(). 1958 1959 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate() 1960 1961 @*/ 1962 PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type) 1963 { 1964 PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping); 1965 PetscBool match; 1966 1967 PetscFunctionBegin; 1968 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1969 PetscValidCharPointer(type,2); 1970 1971 ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr); 1972 if (match) PetscFunctionReturn(0); 1973 1974 ierr = PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr); 1975 if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type); 1976 /* Destroy the previous private LTOG context */ 1977 if (ltog->ops->destroy) { 1978 ierr = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr); 1979 ltog->ops->destroy = NULL; 1980 } 1981 ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr); 1982 ierr = (*r)(ltog);CHKERRQ(ierr); 1983 PetscFunctionReturn(0); 1984 } 1985 1986 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE; 1987 1988 /*@C 1989 ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package. 1990 1991 Not Collective 1992 1993 Level: advanced 1994 1995 .seealso: ISRegister(), ISLocalToGlobalRegister() 1996 @*/ 1997 PetscErrorCode ISLocalToGlobalMappingRegisterAll(void) 1998 { 1999 PetscErrorCode ierr; 2000 2001 PetscFunctionBegin; 2002 if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0); 2003 ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE; 2004 2005 ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr); 2006 ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr); 2007 PetscFunctionReturn(0); 2008 } 2009