1 2 #include <petsc/private/isimpl.h> /*I "petscis.h" I*/ 3 #include <petsc/private/hashmapi.h> 4 #include <petscsf.h> 5 #include <petscviewer.h> 6 7 PetscClassId IS_LTOGM_CLASSID; 8 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***); 9 10 typedef struct { 11 PetscInt *globals; 12 } ISLocalToGlobalMapping_Basic; 13 14 typedef struct { 15 PetscHMapI globalht; 16 } ISLocalToGlobalMapping_Hash; 17 18 19 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) 20 { 21 PetscInt numCells, step = 1; 22 PetscBool isStride; 23 PetscErrorCode ierr; 24 25 PetscFunctionBeginHot; 26 *pStart = 0; 27 *points = NULL; 28 ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr); 29 ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr); 30 if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);} 31 *pEnd = *pStart + numCells; 32 if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);} 33 PetscFunctionReturn(0); 34 } 35 36 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) 37 { 38 PetscInt step = 1; 39 PetscBool isStride; 40 PetscErrorCode ierr; 41 42 PetscFunctionBeginHot; 43 ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr); 44 if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);} 45 if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);} 46 PetscFunctionReturn(0); 47 } 48 49 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points) 50 { 51 PetscErrorCode ierr; 52 53 PetscFunctionBeginHot; 54 if (points) { 55 ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr); 56 ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr); 57 } else { 58 ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr); 59 ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr); 60 } 61 PetscFunctionReturn(0); 62 } 63 64 /* -----------------------------------------------------------------------------------------*/ 65 66 /* 67 Creates the global mapping information in the ISLocalToGlobalMapping structure 68 69 If the user has not selected how to handle the global to local mapping then use HASH for "large" problems 70 */ 71 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping) 72 { 73 PetscInt i,*idx = mapping->indices,n = mapping->n,end,start; 74 PetscErrorCode ierr; 75 76 PetscFunctionBegin; 77 if (mapping->data) PetscFunctionReturn(0); 78 end = 0; 79 start = PETSC_MAX_INT; 80 81 for (i=0; i<n; i++) { 82 if (idx[i] < 0) continue; 83 if (idx[i] < start) start = idx[i]; 84 if (idx[i] > end) end = idx[i]; 85 } 86 if (start > end) {start = 0; end = -1;} 87 mapping->globalstart = start; 88 mapping->globalend = end; 89 if (!((PetscObject)mapping)->type_name) { 90 if ((end - start) > PetscMax(4*n,1000000)) { 91 ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr); 92 } else { 93 ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr); 94 } 95 } 96 ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr); 97 PetscFunctionReturn(0); 98 } 99 100 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping) 101 { 102 PetscErrorCode ierr; 103 PetscInt i,*idx = mapping->indices,n = mapping->n,end,start,*globals; 104 ISLocalToGlobalMapping_Basic *map; 105 106 PetscFunctionBegin; 107 start = mapping->globalstart; 108 end = mapping->globalend; 109 ierr = PetscNew(&map);CHKERRQ(ierr); 110 ierr = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr); 111 map->globals = globals; 112 for (i=0; i<end-start+1; i++) globals[i] = -1; 113 for (i=0; i<n; i++) { 114 if (idx[i] < 0) continue; 115 globals[idx[i] - start] = i; 116 } 117 mapping->data = (void*)map; 118 ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr); 119 PetscFunctionReturn(0); 120 } 121 122 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping) 123 { 124 PetscErrorCode ierr; 125 PetscInt i,*idx = mapping->indices,n = mapping->n; 126 ISLocalToGlobalMapping_Hash *map; 127 128 PetscFunctionBegin; 129 ierr = PetscNew(&map);CHKERRQ(ierr); 130 ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr); 131 for (i=0; i<n; i++ ) { 132 if (idx[i] < 0) continue; 133 ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr); 134 } 135 mapping->data = (void*)map; 136 ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr); 137 PetscFunctionReturn(0); 138 } 139 140 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping) 141 { 142 PetscErrorCode ierr; 143 ISLocalToGlobalMapping_Basic *map = (ISLocalToGlobalMapping_Basic *)mapping->data; 144 145 PetscFunctionBegin; 146 if (!map) PetscFunctionReturn(0); 147 ierr = PetscFree(map->globals);CHKERRQ(ierr); 148 ierr = PetscFree(mapping->data);CHKERRQ(ierr); 149 PetscFunctionReturn(0); 150 } 151 152 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping) 153 { 154 PetscErrorCode ierr; 155 ISLocalToGlobalMapping_Hash *map = (ISLocalToGlobalMapping_Hash*)mapping->data; 156 157 PetscFunctionBegin; 158 if (!map) PetscFunctionReturn(0); 159 ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr); 160 ierr = PetscFree(mapping->data);CHKERRQ(ierr); 161 PetscFunctionReturn(0); 162 } 163 164 #define GTOLTYPE _Basic 165 #define GTOLNAME _Basic 166 #define GTOLBS mapping->bs 167 #define GTOL(g, local) do { \ 168 local = map->globals[g/bs - start]; \ 169 if (local >= 0) local = bs*local + (g % bs); \ 170 } while (0) 171 172 #include <../src/vec/is/utils/isltog.h> 173 174 #define GTOLTYPE _Basic 175 #define GTOLNAME Block_Basic 176 #define GTOLBS 1 177 #define GTOL(g, local) do { \ 178 local = map->globals[g - start]; \ 179 } while (0) 180 #include <../src/vec/is/utils/isltog.h> 181 182 #define GTOLTYPE _Hash 183 #define GTOLNAME _Hash 184 #define GTOLBS mapping->bs 185 #define GTOL(g, local) do { \ 186 (void)PetscHMapIGet(map->globalht,g/bs,&local); \ 187 if (local >= 0) local = bs*local + (g % bs); \ 188 } while (0) 189 #include <../src/vec/is/utils/isltog.h> 190 191 #define GTOLTYPE _Hash 192 #define GTOLNAME Block_Hash 193 #define GTOLBS 1 194 #define GTOL(g, local) do { \ 195 (void)PetscHMapIGet(map->globalht,g,&local); \ 196 } while (0) 197 #include <../src/vec/is/utils/isltog.h> 198 199 /*@ 200 ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object 201 202 Not Collective 203 204 Input Parameter: 205 . ltog - local to global mapping 206 207 Output Parameter: 208 . nltog - the duplicated local to global mapping 209 210 Level: advanced 211 212 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 213 @*/ 214 PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog) 215 { 216 PetscErrorCode ierr; 217 218 PetscFunctionBegin; 219 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 220 ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr); 221 PetscFunctionReturn(0); 222 } 223 224 /*@ 225 ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping 226 227 Not Collective 228 229 Input Parameter: 230 . ltog - local to global mapping 231 232 Output Parameter: 233 . n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length 234 235 Level: advanced 236 237 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 238 @*/ 239 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n) 240 { 241 PetscFunctionBegin; 242 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 243 PetscValidIntPointer(n,2); 244 *n = mapping->bs*mapping->n; 245 PetscFunctionReturn(0); 246 } 247 248 /*@C 249 ISLocalToGlobalMappingViewFromOptions - View from Options 250 251 Collective on ISLocalToGlobalMapping 252 253 Input Parameters: 254 + A - the local to global mapping object 255 . obj - Optional object 256 - name - command line option 257 258 Level: intermediate 259 .seealso: ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate() 260 @*/ 261 PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[]) 262 { 263 PetscErrorCode ierr; 264 265 PetscFunctionBegin; 266 PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1); 267 ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr); 268 PetscFunctionReturn(0); 269 } 270 271 /*@C 272 ISLocalToGlobalMappingView - View a local to global mapping 273 274 Not Collective 275 276 Input Parameters: 277 + ltog - local to global mapping 278 - viewer - viewer 279 280 Level: advanced 281 282 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 283 @*/ 284 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer) 285 { 286 PetscInt i; 287 PetscMPIInt rank; 288 PetscBool iascii; 289 PetscErrorCode ierr; 290 291 PetscFunctionBegin; 292 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 293 if (!viewer) { 294 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr); 295 } 296 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 297 298 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr); 299 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 300 if (iascii) { 301 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr); 302 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 303 for (i=0; i<mapping->n; i++) { 304 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr); 305 } 306 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 307 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 308 } 309 PetscFunctionReturn(0); 310 } 311 312 /*@ 313 ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) 314 ordering and a global parallel ordering. 315 316 Not collective 317 318 Input Parameter: 319 . is - index set containing the global numbers for each local number 320 321 Output Parameter: 322 . mapping - new mapping data structure 323 324 Notes: 325 the block size of the IS determines the block size of the mapping 326 Level: advanced 327 328 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions() 329 @*/ 330 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping) 331 { 332 PetscErrorCode ierr; 333 PetscInt n,bs; 334 const PetscInt *indices; 335 MPI_Comm comm; 336 PetscBool isblock; 337 338 PetscFunctionBegin; 339 PetscValidHeaderSpecific(is,IS_CLASSID,1); 340 PetscValidPointer(mapping,2); 341 342 ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr); 343 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 344 ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr); 345 if (!isblock) { 346 ierr = ISGetIndices(is,&indices);CHKERRQ(ierr); 347 ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 348 ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr); 349 } else { 350 ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr); 351 ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr); 352 ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 353 ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr); 354 } 355 PetscFunctionReturn(0); 356 } 357 358 /*@C 359 ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) 360 ordering and a global parallel ordering. 361 362 Collective 363 364 Input Parameter: 365 + sf - star forest mapping contiguous local indices to (rank, offset) 366 - start - first global index on this process 367 368 Output Parameter: 369 . mapping - new mapping data structure 370 371 Level: advanced 372 373 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions() 374 @*/ 375 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping) 376 { 377 PetscErrorCode ierr; 378 PetscInt i,maxlocal,nroots,nleaves,*globals,*ltog; 379 const PetscInt *ilocal; 380 MPI_Comm comm; 381 382 PetscFunctionBegin; 383 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 384 PetscValidPointer(mapping,3); 385 386 ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr); 387 ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr); 388 if (ilocal) { 389 for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1); 390 } 391 else maxlocal = nleaves; 392 ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr); 393 ierr = PetscMalloc1(maxlocal,<og);CHKERRQ(ierr); 394 for (i=0; i<nroots; i++) globals[i] = start + i; 395 for (i=0; i<maxlocal; i++) ltog[i] = -1; 396 ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr); 397 ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr); 398 ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr); 399 ierr = PetscFree(globals);CHKERRQ(ierr); 400 PetscFunctionReturn(0); 401 } 402 403 /*@ 404 ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping 405 406 Not collective 407 408 Input Parameters: 409 + mapping - mapping data structure 410 - bs - the blocksize 411 412 Level: advanced 413 414 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 415 @*/ 416 PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs) 417 { 418 PetscInt *nid; 419 const PetscInt *oid; 420 PetscInt i,cn,on,obs,nn; 421 PetscErrorCode ierr; 422 423 PetscFunctionBegin; 424 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 425 if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs); 426 if (bs == mapping->bs) PetscFunctionReturn(0); 427 on = mapping->n; 428 obs = mapping->bs; 429 oid = mapping->indices; 430 nn = (on*obs)/bs; 431 if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on); 432 433 ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr); 434 ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr); 435 for (i=0;i<nn;i++) { 436 PetscInt j; 437 for (j=0,cn=0;j<bs-1;j++) { 438 if (oid[i*bs+j] < 0) { cn++; continue; } 439 if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]); 440 } 441 if (oid[i*bs+j] < 0) cn++; 442 if (cn) { 443 if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn); 444 nid[i] = -1; 445 } else { 446 nid[i] = oid[i*bs]/bs; 447 } 448 } 449 ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr); 450 451 mapping->n = nn; 452 mapping->bs = bs; 453 ierr = PetscFree(mapping->indices);CHKERRQ(ierr); 454 mapping->indices = nid; 455 mapping->globalstart = 0; 456 mapping->globalend = 0; 457 458 /* reset the cached information */ 459 ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr); 460 ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr); 461 if (mapping->info_indices) { 462 PetscInt i; 463 464 ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr); 465 for (i=1; i<mapping->info_nproc; i++) { 466 ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr); 467 } 468 ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr); 469 } 470 mapping->info_cached = PETSC_FALSE; 471 472 if (mapping->ops->destroy) { 473 ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr); 474 } 475 PetscFunctionReturn(0); 476 } 477 478 /*@ 479 ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping 480 ordering and a global parallel ordering. 481 482 Not Collective 483 484 Input Parameters: 485 . mapping - mapping data structure 486 487 Output Parameter: 488 . bs - the blocksize 489 490 Level: advanced 491 492 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 493 @*/ 494 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs) 495 { 496 PetscFunctionBegin; 497 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 498 *bs = mapping->bs; 499 PetscFunctionReturn(0); 500 } 501 502 /*@ 503 ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) 504 ordering and a global parallel ordering. 505 506 Not Collective, but communicator may have more than one process 507 508 Input Parameters: 509 + comm - MPI communicator 510 . bs - the block size 511 . n - the number of local elements divided by the block size, or equivalently the number of block indices 512 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs 513 - mode - see PetscCopyMode 514 515 Output Parameter: 516 . mapping - new mapping data structure 517 518 Notes: 519 There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1 520 521 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 522 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 523 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 524 525 Level: advanced 526 527 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH 528 ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType 529 @*/ 530 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping) 531 { 532 PetscErrorCode ierr; 533 PetscInt *in; 534 535 PetscFunctionBegin; 536 if (n) PetscValidIntPointer(indices,3); 537 PetscValidPointer(mapping,4); 538 539 *mapping = NULL; 540 ierr = ISInitializePackage();CHKERRQ(ierr); 541 542 ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS", 543 comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr); 544 (*mapping)->n = n; 545 (*mapping)->bs = bs; 546 (*mapping)->info_cached = PETSC_FALSE; 547 (*mapping)->info_free = PETSC_FALSE; 548 (*mapping)->info_procs = NULL; 549 (*mapping)->info_numprocs = NULL; 550 (*mapping)->info_indices = NULL; 551 (*mapping)->info_nodec = NULL; 552 (*mapping)->info_nodei = NULL; 553 554 (*mapping)->ops->globaltolocalmappingapply = NULL; 555 (*mapping)->ops->globaltolocalmappingapplyblock = NULL; 556 (*mapping)->ops->destroy = NULL; 557 if (mode == PETSC_COPY_VALUES) { 558 ierr = PetscMalloc1(n,&in);CHKERRQ(ierr); 559 ierr = PetscArraycpy(in,indices,n);CHKERRQ(ierr); 560 (*mapping)->indices = in; 561 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 562 } else if (mode == PETSC_OWN_POINTER) { 563 (*mapping)->indices = (PetscInt*)indices; 564 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 565 } 566 else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER"); 567 PetscFunctionReturn(0); 568 } 569 570 PetscFunctionList ISLocalToGlobalMappingList = NULL; 571 572 573 /*@ 574 ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database. 575 576 Not collective 577 578 Input Parameters: 579 . mapping - mapping data structure 580 581 Level: advanced 582 583 @*/ 584 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping) 585 { 586 PetscErrorCode ierr; 587 char type[256]; 588 ISLocalToGlobalMappingType defaulttype = "Not set"; 589 PetscBool flg; 590 591 PetscFunctionBegin; 592 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 593 ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr); 594 ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr); 595 ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr); 596 if (flg) { 597 ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr); 598 } 599 ierr = PetscOptionsEnd();CHKERRQ(ierr); 600 PetscFunctionReturn(0); 601 } 602 603 /*@ 604 ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) 605 ordering and a global parallel ordering. 606 607 Note Collective 608 609 Input Parameters: 610 . mapping - mapping data structure 611 612 Level: advanced 613 614 .seealso: ISLocalToGlobalMappingCreate() 615 @*/ 616 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) 617 { 618 PetscErrorCode ierr; 619 620 PetscFunctionBegin; 621 if (!*mapping) PetscFunctionReturn(0); 622 PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1); 623 if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);} 624 ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr); 625 ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr); 626 ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr); 627 if ((*mapping)->info_indices) { 628 PetscInt i; 629 630 ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr); 631 for (i=1; i<(*mapping)->info_nproc; i++) { 632 ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr); 633 } 634 ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr); 635 } 636 if ((*mapping)->info_nodei) { 637 ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr); 638 } 639 ierr = PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);CHKERRQ(ierr); 640 if ((*mapping)->ops->destroy) { 641 ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr); 642 } 643 ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr); 644 *mapping = 0; 645 PetscFunctionReturn(0); 646 } 647 648 /*@ 649 ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering 650 a new index set using the global numbering defined in an ISLocalToGlobalMapping 651 context. 652 653 Collective on is 654 655 Input Parameters: 656 + mapping - mapping between local and global numbering 657 - is - index set in local numbering 658 659 Output Parameters: 660 . newis - index set in global numbering 661 662 Notes: 663 The output IS will have the same communicator of the input IS. 664 665 Level: advanced 666 667 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(), 668 ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply() 669 @*/ 670 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis) 671 { 672 PetscErrorCode ierr; 673 PetscInt n,*idxout; 674 const PetscInt *idxin; 675 676 PetscFunctionBegin; 677 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 678 PetscValidHeaderSpecific(is,IS_CLASSID,2); 679 PetscValidPointer(newis,3); 680 681 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 682 ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); 683 ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr); 684 ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr); 685 ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); 686 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr); 687 PetscFunctionReturn(0); 688 } 689 690 /*@ 691 ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering 692 and converts them to the global numbering. 693 694 Not collective 695 696 Input Parameters: 697 + mapping - the local to global mapping context 698 . N - number of integers 699 - in - input indices in local numbering 700 701 Output Parameter: 702 . out - indices in global numbering 703 704 Notes: 705 The in and out array parameters may be identical. 706 707 Level: advanced 708 709 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 710 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 711 AOPetscToApplication(), ISGlobalToLocalMappingApply() 712 713 @*/ 714 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 715 { 716 PetscInt i,bs,Nmax; 717 718 PetscFunctionBegin; 719 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 720 bs = mapping->bs; 721 Nmax = bs*mapping->n; 722 if (bs == 1) { 723 const PetscInt *idx = mapping->indices; 724 for (i=0; i<N; i++) { 725 if (in[i] < 0) { 726 out[i] = in[i]; 727 continue; 728 } 729 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 730 out[i] = idx[in[i]]; 731 } 732 } else { 733 const PetscInt *idx = mapping->indices; 734 for (i=0; i<N; i++) { 735 if (in[i] < 0) { 736 out[i] = in[i]; 737 continue; 738 } 739 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 740 out[i] = idx[in[i]/bs]*bs + (in[i] % bs); 741 } 742 } 743 PetscFunctionReturn(0); 744 } 745 746 /*@ 747 ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering 748 749 Not collective 750 751 Input Parameters: 752 + mapping - the local to global mapping context 753 . N - number of integers 754 - in - input indices in local block numbering 755 756 Output Parameter: 757 . out - indices in global block numbering 758 759 Notes: 760 The in and out array parameters may be identical. 761 762 Example: 763 If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0 764 (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3. 765 766 Level: advanced 767 768 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 769 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 770 AOPetscToApplication(), ISGlobalToLocalMappingApply() 771 772 @*/ 773 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 774 { 775 776 PetscFunctionBegin; 777 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 778 { 779 PetscInt i,Nmax = mapping->n; 780 const PetscInt *idx = mapping->indices; 781 782 for (i=0; i<N; i++) { 783 if (in[i] < 0) { 784 out[i] = in[i]; 785 continue; 786 } 787 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i); 788 out[i] = idx[in[i]]; 789 } 790 } 791 PetscFunctionReturn(0); 792 } 793 794 /*@ 795 ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers 796 specified with a global numbering. 797 798 Not collective 799 800 Input Parameters: 801 + mapping - mapping between local and global numbering 802 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 803 IS_GTOLM_DROP - drops the indices with no local value from the output list 804 . n - number of global indices to map 805 - idx - global indices to map 806 807 Output Parameters: 808 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 809 - idxout - local index of each global index, one must pass in an array long enough 810 to hold all the indices. You can call ISGlobalToLocalMappingApply() with 811 idxout == NULL to determine the required length (returned in nout) 812 and then allocate the required space and call ISGlobalToLocalMappingApply() 813 a second time to set the values. 814 815 Notes: 816 Either nout or idxout may be NULL. idx and idxout may be identical. 817 818 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 819 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 820 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 821 822 Level: advanced 823 824 Developer Note: The manual page states that idx and idxout may be identical but the calling 825 sequence declares idx as const so it cannot be the same as idxout. 826 827 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(), 828 ISLocalToGlobalMappingDestroy() 829 @*/ 830 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[]) 831 { 832 PetscErrorCode ierr; 833 834 PetscFunctionBegin; 835 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 836 if (!mapping->data) { 837 ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr); 838 } 839 ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr); 840 PetscFunctionReturn(0); 841 } 842 843 /*@ 844 ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering 845 a new index set using the local numbering defined in an ISLocalToGlobalMapping 846 context. 847 848 Not collective 849 850 Input Parameters: 851 + mapping - mapping between local and global numbering 852 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 853 IS_GTOLM_DROP - drops the indices with no local value from the output list 854 - is - index set in global numbering 855 856 Output Parameters: 857 . newis - index set in local numbering 858 859 Notes: 860 The output IS will be sequential, as it encodes a purely local operation 861 862 Level: advanced 863 864 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(), 865 ISLocalToGlobalMappingDestroy() 866 @*/ 867 PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis) 868 { 869 PetscErrorCode ierr; 870 PetscInt n,nout,*idxout; 871 const PetscInt *idxin; 872 873 PetscFunctionBegin; 874 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 875 PetscValidHeaderSpecific(is,IS_CLASSID,3); 876 PetscValidPointer(newis,4); 877 878 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 879 ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); 880 if (type == IS_GTOLM_MASK) { 881 ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr); 882 } else { 883 ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr); 884 ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr); 885 } 886 ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr); 887 ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); 888 ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr); 889 PetscFunctionReturn(0); 890 } 891 892 /*@ 893 ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers 894 specified with a block global numbering. 895 896 Not collective 897 898 Input Parameters: 899 + mapping - mapping between local and global numbering 900 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 901 IS_GTOLM_DROP - drops the indices with no local value from the output list 902 . n - number of global indices to map 903 - idx - global indices to map 904 905 Output Parameters: 906 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 907 - idxout - local index of each global index, one must pass in an array long enough 908 to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with 909 idxout == NULL to determine the required length (returned in nout) 910 and then allocate the required space and call ISGlobalToLocalMappingApplyBlock() 911 a second time to set the values. 912 913 Notes: 914 Either nout or idxout may be NULL. idx and idxout may be identical. 915 916 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 917 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 918 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 919 920 Level: advanced 921 922 Developer Note: The manual page states that idx and idxout may be identical but the calling 923 sequence declares idx as const so it cannot be the same as idxout. 924 925 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(), 926 ISLocalToGlobalMappingDestroy() 927 @*/ 928 PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type, 929 PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[]) 930 { 931 PetscErrorCode ierr; 932 933 PetscFunctionBegin; 934 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 935 if (!mapping->data) { 936 ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr); 937 } 938 ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr); 939 PetscFunctionReturn(0); 940 } 941 942 943 /*@C 944 ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and 945 each index shared by more than one processor 946 947 Collective on ISLocalToGlobalMapping 948 949 Input Parameters: 950 . mapping - the mapping from local to global indexing 951 952 Output Parameter: 953 + nproc - number of processors that are connected to this one 954 . proc - neighboring processors 955 . numproc - number of indices for each subdomain (processor) 956 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 957 958 Level: advanced 959 960 Fortran Usage: 961 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 962 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 963 PetscInt indices[nproc][numprocmax],ierr) 964 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 965 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 966 967 968 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 969 ISLocalToGlobalMappingRestoreInfo() 970 @*/ 971 PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 972 { 973 PetscErrorCode ierr; 974 975 PetscFunctionBegin; 976 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 977 if (mapping->info_cached) { 978 *nproc = mapping->info_nproc; 979 *procs = mapping->info_procs; 980 *numprocs = mapping->info_numprocs; 981 *indices = mapping->info_indices; 982 } else { 983 ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr); 984 } 985 PetscFunctionReturn(0); 986 } 987 988 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 989 { 990 PetscErrorCode ierr; 991 PetscMPIInt size,rank,tag1,tag2,tag3,*len,*source,imdex; 992 PetscInt i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices; 993 PetscInt *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc; 994 PetscInt cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned; 995 PetscInt node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp; 996 PetscInt first_procs,first_numprocs,*first_indices; 997 MPI_Request *recv_waits,*send_waits; 998 MPI_Status recv_status,*send_status,*recv_statuses; 999 MPI_Comm comm; 1000 PetscBool debug = PETSC_FALSE; 1001 1002 PetscFunctionBegin; 1003 ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr); 1004 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1005 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1006 if (size == 1) { 1007 *nproc = 0; 1008 *procs = NULL; 1009 ierr = PetscNew(numprocs);CHKERRQ(ierr); 1010 (*numprocs)[0] = 0; 1011 ierr = PetscNew(indices);CHKERRQ(ierr); 1012 (*indices)[0] = NULL; 1013 /* save info for reuse */ 1014 mapping->info_nproc = *nproc; 1015 mapping->info_procs = *procs; 1016 mapping->info_numprocs = *numprocs; 1017 mapping->info_indices = *indices; 1018 mapping->info_cached = PETSC_TRUE; 1019 PetscFunctionReturn(0); 1020 } 1021 1022 ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr); 1023 1024 /* 1025 Notes on ISLocalToGlobalMappingGetBlockInfo 1026 1027 globally owned node - the nodes that have been assigned to this processor in global 1028 numbering, just for this routine. 1029 1030 nontrivial globally owned node - node assigned to this processor that is on a subdomain 1031 boundary (i.e. is has more than one local owner) 1032 1033 locally owned node - node that exists on this processors subdomain 1034 1035 nontrivial locally owned node - node that is not in the interior (i.e. has more than one 1036 local subdomain 1037 */ 1038 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr); 1039 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr); 1040 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr); 1041 1042 for (i=0; i<n; i++) { 1043 if (lindices[i] > max) max = lindices[i]; 1044 } 1045 ierr = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr); 1046 Ng++; 1047 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1048 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1049 scale = Ng/size + 1; 1050 ng = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng); 1051 rstart = scale*rank; 1052 1053 /* determine ownership ranges of global indices */ 1054 ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr); 1055 ierr = PetscArrayzero(nprocs,2*size);CHKERRQ(ierr); 1056 1057 /* determine owners of each local node */ 1058 ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr); 1059 for (i=0; i<n; i++) { 1060 proc = lindices[i]/scale; /* processor that globally owns this index */ 1061 nprocs[2*proc+1] = 1; /* processor globally owns at least one of ours */ 1062 owner[i] = proc; 1063 nprocs[2*proc]++; /* count of how many that processor globally owns of ours */ 1064 } 1065 nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1]; 1066 ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr); 1067 1068 /* inform other processors of number of messages and max length*/ 1069 ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 1070 ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr); 1071 1072 /* post receives for owned rows */ 1073 ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr); 1074 ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr); 1075 for (i=0; i<nrecvs; i++) { 1076 ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr); 1077 } 1078 1079 /* pack messages containing lists of local nodes to owners */ 1080 ierr = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr); 1081 ierr = PetscMalloc1(size+1,&starts);CHKERRQ(ierr); 1082 starts[0] = 0; 1083 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 1084 for (i=0; i<n; i++) { 1085 sends[starts[owner[i]]++] = lindices[i]; 1086 sends[starts[owner[i]]++] = i; 1087 } 1088 ierr = PetscFree(owner);CHKERRQ(ierr); 1089 starts[0] = 0; 1090 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 1091 1092 /* send the messages */ 1093 ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr); 1094 ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr); 1095 cnt = 0; 1096 for (i=0; i<size; i++) { 1097 if (nprocs[2*i]) { 1098 ierr = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr); 1099 dest[cnt] = i; 1100 cnt++; 1101 } 1102 } 1103 ierr = PetscFree(starts);CHKERRQ(ierr); 1104 1105 /* wait on receives */ 1106 ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr); 1107 ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr); 1108 cnt = nrecvs; 1109 ierr = PetscCalloc1(ng+1,&nownedsenders);CHKERRQ(ierr); 1110 while (cnt) { 1111 ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 1112 /* unpack receives into our local space */ 1113 ierr = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr); 1114 source[imdex] = recv_status.MPI_SOURCE; 1115 len[imdex] = len[imdex]/2; 1116 /* count how many local owners for each of my global owned indices */ 1117 for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++; 1118 cnt--; 1119 } 1120 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1121 1122 /* count how many globally owned indices are on an edge multiplied by how many processors own them. */ 1123 nowned = 0; 1124 nownedm = 0; 1125 for (i=0; i<ng; i++) { 1126 if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;} 1127 } 1128 1129 /* create single array to contain rank of all local owners of each globally owned index */ 1130 ierr = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr); 1131 ierr = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr); 1132 starts[0] = 0; 1133 for (i=1; i<ng; i++) { 1134 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1135 else starts[i] = starts[i-1]; 1136 } 1137 1138 /* for each nontrival globally owned node list all arriving processors */ 1139 for (i=0; i<nrecvs; i++) { 1140 for (j=0; j<len[i]; j++) { 1141 node = recvs[2*i*nmax+2*j]-rstart; 1142 if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i]; 1143 } 1144 } 1145 1146 if (debug) { /* ----------------------------------- */ 1147 starts[0] = 0; 1148 for (i=1; i<ng; i++) { 1149 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1150 else starts[i] = starts[i-1]; 1151 } 1152 for (i=0; i<ng; i++) { 1153 if (nownedsenders[i] > 1) { 1154 ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr); 1155 for (j=0; j<nownedsenders[i]; j++) { 1156 ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr); 1157 } 1158 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1159 } 1160 } 1161 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1162 } /* ----------------------------------- */ 1163 1164 /* wait on original sends */ 1165 if (nsends) { 1166 ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr); 1167 ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 1168 ierr = PetscFree(send_status);CHKERRQ(ierr); 1169 } 1170 ierr = PetscFree(send_waits);CHKERRQ(ierr); 1171 ierr = PetscFree(sends);CHKERRQ(ierr); 1172 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1173 1174 /* pack messages to send back to local owners */ 1175 starts[0] = 0; 1176 for (i=1; i<ng; i++) { 1177 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1178 else starts[i] = starts[i-1]; 1179 } 1180 nsends2 = nrecvs; 1181 ierr = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */ 1182 for (i=0; i<nrecvs; i++) { 1183 nprocs[i] = 1; 1184 for (j=0; j<len[i]; j++) { 1185 node = recvs[2*i*nmax+2*j]-rstart; 1186 if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node]; 1187 } 1188 } 1189 nt = 0; 1190 for (i=0; i<nsends2; i++) nt += nprocs[i]; 1191 1192 ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr); 1193 ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr); 1194 1195 starts2[0] = 0; 1196 for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1]; 1197 /* 1198 Each message is 1 + nprocs[i] long, and consists of 1199 (0) the number of nodes being sent back 1200 (1) the local node number, 1201 (2) the number of processors sharing it, 1202 (3) the processors sharing it 1203 */ 1204 for (i=0; i<nsends2; i++) { 1205 cnt = 1; 1206 sends2[starts2[i]] = 0; 1207 for (j=0; j<len[i]; j++) { 1208 node = recvs[2*i*nmax+2*j]-rstart; 1209 if (nownedsenders[node] > 1) { 1210 sends2[starts2[i]]++; 1211 sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1]; 1212 sends2[starts2[i]+cnt++] = nownedsenders[node]; 1213 ierr = PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);CHKERRQ(ierr); 1214 cnt += nownedsenders[node]; 1215 } 1216 } 1217 } 1218 1219 /* receive the message lengths */ 1220 nrecvs2 = nsends; 1221 ierr = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr); 1222 ierr = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr); 1223 ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr); 1224 for (i=0; i<nrecvs2; i++) { 1225 ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr); 1226 } 1227 1228 /* send the message lengths */ 1229 for (i=0; i<nsends2; i++) { 1230 ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr); 1231 } 1232 1233 /* wait on receives of lens */ 1234 if (nrecvs2) { 1235 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 1236 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr); 1237 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 1238 } 1239 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1240 1241 starts3[0] = 0; 1242 nt = 0; 1243 for (i=0; i<nrecvs2-1; i++) { 1244 starts3[i+1] = starts3[i] + lens2[i]; 1245 nt += lens2[i]; 1246 } 1247 if (nrecvs2) nt += lens2[nrecvs2-1]; 1248 1249 ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr); 1250 ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr); 1251 for (i=0; i<nrecvs2; i++) { 1252 ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr); 1253 } 1254 1255 /* send the messages */ 1256 ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr); 1257 for (i=0; i<nsends2; i++) { 1258 ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr); 1259 } 1260 1261 /* wait on receives */ 1262 if (nrecvs2) { 1263 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 1264 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr); 1265 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 1266 } 1267 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1268 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1269 1270 if (debug) { /* ----------------------------------- */ 1271 cnt = 0; 1272 for (i=0; i<nrecvs2; i++) { 1273 nt = recvs2[cnt++]; 1274 for (j=0; j<nt; j++) { 1275 ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr); 1276 for (k=0; k<recvs2[cnt+1]; k++) { 1277 ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr); 1278 } 1279 cnt += 2 + recvs2[cnt+1]; 1280 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1281 } 1282 } 1283 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1284 } /* ----------------------------------- */ 1285 1286 /* count number subdomains for each local node */ 1287 ierr = PetscCalloc1(size,&nprocs);CHKERRQ(ierr); 1288 cnt = 0; 1289 for (i=0; i<nrecvs2; i++) { 1290 nt = recvs2[cnt++]; 1291 for (j=0; j<nt; j++) { 1292 for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++; 1293 cnt += 2 + recvs2[cnt+1]; 1294 } 1295 } 1296 nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0); 1297 *nproc = nt; 1298 ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr); 1299 ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr); 1300 ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr); 1301 for (i=0;i<nt+1;i++) (*indices)[i]=NULL; 1302 ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr); 1303 cnt = 0; 1304 for (i=0; i<size; i++) { 1305 if (nprocs[i] > 0) { 1306 bprocs[i] = cnt; 1307 (*procs)[cnt] = i; 1308 (*numprocs)[cnt] = nprocs[i]; 1309 ierr = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr); 1310 cnt++; 1311 } 1312 } 1313 1314 /* make the list of subdomains for each nontrivial local node */ 1315 ierr = PetscArrayzero(*numprocs,nt);CHKERRQ(ierr); 1316 cnt = 0; 1317 for (i=0; i<nrecvs2; i++) { 1318 nt = recvs2[cnt++]; 1319 for (j=0; j<nt; j++) { 1320 for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt]; 1321 cnt += 2 + recvs2[cnt+1]; 1322 } 1323 } 1324 ierr = PetscFree(bprocs);CHKERRQ(ierr); 1325 ierr = PetscFree(recvs2);CHKERRQ(ierr); 1326 1327 /* sort the node indexing by their global numbers */ 1328 nt = *nproc; 1329 for (i=0; i<nt; i++) { 1330 ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr); 1331 for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]]; 1332 ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr); 1333 ierr = PetscFree(tmp);CHKERRQ(ierr); 1334 } 1335 1336 if (debug) { /* ----------------------------------- */ 1337 nt = *nproc; 1338 for (i=0; i<nt; i++) { 1339 ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr); 1340 for (j=0; j<(*numprocs)[i]; j++) { 1341 ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr); 1342 } 1343 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1344 } 1345 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1346 } /* ----------------------------------- */ 1347 1348 /* wait on sends */ 1349 if (nsends2) { 1350 ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr); 1351 ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr); 1352 ierr = PetscFree(send_status);CHKERRQ(ierr); 1353 } 1354 1355 ierr = PetscFree(starts3);CHKERRQ(ierr); 1356 ierr = PetscFree(dest);CHKERRQ(ierr); 1357 ierr = PetscFree(send_waits);CHKERRQ(ierr); 1358 1359 ierr = PetscFree(nownedsenders);CHKERRQ(ierr); 1360 ierr = PetscFree(ownedsenders);CHKERRQ(ierr); 1361 ierr = PetscFree(starts);CHKERRQ(ierr); 1362 ierr = PetscFree(starts2);CHKERRQ(ierr); 1363 ierr = PetscFree(lens2);CHKERRQ(ierr); 1364 1365 ierr = PetscFree(source);CHKERRQ(ierr); 1366 ierr = PetscFree(len);CHKERRQ(ierr); 1367 ierr = PetscFree(recvs);CHKERRQ(ierr); 1368 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1369 ierr = PetscFree(sends2);CHKERRQ(ierr); 1370 1371 /* put the information about myself as the first entry in the list */ 1372 first_procs = (*procs)[0]; 1373 first_numprocs = (*numprocs)[0]; 1374 first_indices = (*indices)[0]; 1375 for (i=0; i<*nproc; i++) { 1376 if ((*procs)[i] == rank) { 1377 (*procs)[0] = (*procs)[i]; 1378 (*numprocs)[0] = (*numprocs)[i]; 1379 (*indices)[0] = (*indices)[i]; 1380 (*procs)[i] = first_procs; 1381 (*numprocs)[i] = first_numprocs; 1382 (*indices)[i] = first_indices; 1383 break; 1384 } 1385 } 1386 1387 /* save info for reuse */ 1388 mapping->info_nproc = *nproc; 1389 mapping->info_procs = *procs; 1390 mapping->info_numprocs = *numprocs; 1391 mapping->info_indices = *indices; 1392 mapping->info_cached = PETSC_TRUE; 1393 PetscFunctionReturn(0); 1394 } 1395 1396 /*@C 1397 ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo() 1398 1399 Collective on ISLocalToGlobalMapping 1400 1401 Input Parameters: 1402 . mapping - the mapping from local to global indexing 1403 1404 Output Parameter: 1405 + nproc - number of processors that are connected to this one 1406 . proc - neighboring processors 1407 . numproc - number of indices for each processor 1408 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1409 1410 Level: advanced 1411 1412 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1413 ISLocalToGlobalMappingGetInfo() 1414 @*/ 1415 PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1416 { 1417 PetscErrorCode ierr; 1418 1419 PetscFunctionBegin; 1420 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1421 if (mapping->info_free) { 1422 ierr = PetscFree(*numprocs);CHKERRQ(ierr); 1423 if (*indices) { 1424 PetscInt i; 1425 1426 ierr = PetscFree((*indices)[0]);CHKERRQ(ierr); 1427 for (i=1; i<*nproc; i++) { 1428 ierr = PetscFree((*indices)[i]);CHKERRQ(ierr); 1429 } 1430 ierr = PetscFree(*indices);CHKERRQ(ierr); 1431 } 1432 } 1433 *nproc = 0; 1434 *procs = NULL; 1435 *numprocs = NULL; 1436 *indices = NULL; 1437 PetscFunctionReturn(0); 1438 } 1439 1440 /*@C 1441 ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and 1442 each index shared by more than one processor 1443 1444 Collective on ISLocalToGlobalMapping 1445 1446 Input Parameters: 1447 . mapping - the mapping from local to global indexing 1448 1449 Output Parameter: 1450 + nproc - number of processors that are connected to this one 1451 . proc - neighboring processors 1452 . numproc - number of indices for each subdomain (processor) 1453 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 1454 1455 Level: advanced 1456 1457 Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed. 1458 1459 Fortran Usage: 1460 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1461 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1462 PetscInt indices[nproc][numprocmax],ierr) 1463 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 1464 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 1465 1466 1467 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1468 ISLocalToGlobalMappingRestoreInfo() 1469 @*/ 1470 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1471 { 1472 PetscErrorCode ierr; 1473 PetscInt **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k; 1474 1475 PetscFunctionBegin; 1476 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1477 ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr); 1478 if (bs > 1) { /* we need to expand the cached info */ 1479 ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr); 1480 ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr); 1481 for (i=0; i<*nproc; i++) { 1482 ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr); 1483 for (j=0; j<bnumprocs[i]; j++) { 1484 for (k=0; k<bs; k++) { 1485 (*indices)[i][j*bs+k] = bs*bindices[i][j] + k; 1486 } 1487 } 1488 (*numprocs)[i] = bnumprocs[i]*bs; 1489 } 1490 mapping->info_free = PETSC_TRUE; 1491 } else { 1492 *numprocs = bnumprocs; 1493 *indices = bindices; 1494 } 1495 PetscFunctionReturn(0); 1496 } 1497 1498 /*@C 1499 ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo() 1500 1501 Collective on ISLocalToGlobalMapping 1502 1503 Input Parameters: 1504 . mapping - the mapping from local to global indexing 1505 1506 Output Parameter: 1507 + nproc - number of processors that are connected to this one 1508 . proc - neighboring processors 1509 . numproc - number of indices for each processor 1510 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1511 1512 Level: advanced 1513 1514 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1515 ISLocalToGlobalMappingGetInfo() 1516 @*/ 1517 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1518 { 1519 PetscErrorCode ierr; 1520 1521 PetscFunctionBegin; 1522 ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr); 1523 PetscFunctionReturn(0); 1524 } 1525 1526 /*@C 1527 ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node 1528 1529 Collective on ISLocalToGlobalMapping 1530 1531 Input Parameters: 1532 . mapping - the mapping from local to global indexing 1533 1534 Output Parameter: 1535 + nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize()) 1536 . count - number of neighboring processors per node 1537 - indices - indices of processes sharing the node (sorted) 1538 1539 Level: advanced 1540 1541 Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed. 1542 1543 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1544 ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo() 1545 @*/ 1546 PetscErrorCode ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[]) 1547 { 1548 PetscInt n; 1549 PetscErrorCode ierr; 1550 1551 PetscFunctionBegin; 1552 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1553 ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr); 1554 if (!mapping->info_nodec) { 1555 PetscInt i,m,n_neigh,*neigh,*n_shared,**shared; 1556 1557 ierr = PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);CHKERRQ(ierr); 1558 ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr); 1559 for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;} 1560 m = n; 1561 mapping->info_nodec[n] = 0; 1562 for (i=1;i<n_neigh;i++) { 1563 PetscInt j; 1564 1565 m += n_shared[i]; 1566 for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1; 1567 } 1568 if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); } 1569 for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1]; 1570 ierr = PetscArrayzero(mapping->info_nodec,n);CHKERRQ(ierr); 1571 for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; } 1572 for (i=1;i<n_neigh;i++) { 1573 PetscInt j; 1574 1575 for (j=0;j<n_shared[i];j++) { 1576 PetscInt k = shared[i][j]; 1577 1578 mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i]; 1579 mapping->info_nodec[k] += 1; 1580 } 1581 } 1582 for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); } 1583 ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr); 1584 } 1585 if (nnodes) *nnodes = n; 1586 if (count) *count = mapping->info_nodec; 1587 if (indices) *indices = mapping->info_nodei; 1588 PetscFunctionReturn(0); 1589 } 1590 1591 /*@C 1592 ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo() 1593 1594 Collective on ISLocalToGlobalMapping 1595 1596 Input Parameters: 1597 . mapping - the mapping from local to global indexing 1598 1599 Output Parameter: 1600 + nnodes - number of local nodes 1601 . count - number of neighboring processors per node 1602 - indices - indices of processes sharing the node (sorted) 1603 1604 Level: advanced 1605 1606 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1607 ISLocalToGlobalMappingGetInfo() 1608 @*/ 1609 PetscErrorCode ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[]) 1610 { 1611 PetscFunctionBegin; 1612 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1613 if (nnodes) *nnodes = 0; 1614 if (count) *count = NULL; 1615 if (indices) *indices = NULL; 1616 PetscFunctionReturn(0); 1617 } 1618 1619 /*@C 1620 ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped 1621 1622 Not Collective 1623 1624 Input Arguments: 1625 . ltog - local to global mapping 1626 1627 Output Arguments: 1628 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize() 1629 1630 Level: advanced 1631 1632 Notes: 1633 ISLocalToGlobalMappingGetSize() returns the length the this array 1634 1635 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices() 1636 @*/ 1637 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1638 { 1639 PetscFunctionBegin; 1640 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1641 PetscValidPointer(array,2); 1642 if (ltog->bs == 1) { 1643 *array = ltog->indices; 1644 } else { 1645 PetscInt *jj,k,i,j,n = ltog->n, bs = ltog->bs; 1646 const PetscInt *ii; 1647 PetscErrorCode ierr; 1648 1649 ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr); 1650 *array = jj; 1651 k = 0; 1652 ii = ltog->indices; 1653 for (i=0; i<n; i++) 1654 for (j=0; j<bs; j++) 1655 jj[k++] = bs*ii[i] + j; 1656 } 1657 PetscFunctionReturn(0); 1658 } 1659 1660 /*@C 1661 ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices() 1662 1663 Not Collective 1664 1665 Input Arguments: 1666 + ltog - local to global mapping 1667 - array - array of indices 1668 1669 Level: advanced 1670 1671 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1672 @*/ 1673 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1674 { 1675 PetscFunctionBegin; 1676 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1677 PetscValidPointer(array,2); 1678 if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1679 1680 if (ltog->bs > 1) { 1681 PetscErrorCode ierr; 1682 ierr = PetscFree(*(void**)array);CHKERRQ(ierr); 1683 } 1684 PetscFunctionReturn(0); 1685 } 1686 1687 /*@C 1688 ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block 1689 1690 Not Collective 1691 1692 Input Arguments: 1693 . ltog - local to global mapping 1694 1695 Output Arguments: 1696 . array - array of indices 1697 1698 Level: advanced 1699 1700 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices() 1701 @*/ 1702 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1703 { 1704 PetscFunctionBegin; 1705 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1706 PetscValidPointer(array,2); 1707 *array = ltog->indices; 1708 PetscFunctionReturn(0); 1709 } 1710 1711 /*@C 1712 ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices() 1713 1714 Not Collective 1715 1716 Input Arguments: 1717 + ltog - local to global mapping 1718 - array - array of indices 1719 1720 Level: advanced 1721 1722 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1723 @*/ 1724 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1725 { 1726 PetscFunctionBegin; 1727 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1728 PetscValidPointer(array,2); 1729 if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1730 *array = NULL; 1731 PetscFunctionReturn(0); 1732 } 1733 1734 /*@C 1735 ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings 1736 1737 Not Collective 1738 1739 Input Arguments: 1740 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate 1741 . n - number of mappings to concatenate 1742 - ltogs - local to global mappings 1743 1744 Output Arguments: 1745 . ltogcat - new mapping 1746 1747 Note: this currently always returns a mapping with block size of 1 1748 1749 Developer Note: If all the input mapping have the same block size we could easily handle that as a special case 1750 1751 Level: advanced 1752 1753 .seealso: ISLocalToGlobalMappingCreate() 1754 @*/ 1755 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat) 1756 { 1757 PetscInt i,cnt,m,*idx; 1758 PetscErrorCode ierr; 1759 1760 PetscFunctionBegin; 1761 if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n); 1762 if (n > 0) PetscValidPointer(ltogs,3); 1763 for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3); 1764 PetscValidPointer(ltogcat,4); 1765 for (cnt=0,i=0; i<n; i++) { 1766 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1767 cnt += m; 1768 } 1769 ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr); 1770 for (cnt=0,i=0; i<n; i++) { 1771 const PetscInt *subidx; 1772 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1773 ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1774 ierr = PetscArraycpy(&idx[cnt],subidx,m);CHKERRQ(ierr); 1775 ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1776 cnt += m; 1777 } 1778 ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr); 1779 PetscFunctionReturn(0); 1780 } 1781 1782 /*MC 1783 ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1784 used this is good for only small and moderate size problems. 1785 1786 Options Database Keys: 1787 . -islocaltoglobalmapping_type basic - select this method 1788 1789 Level: beginner 1790 1791 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH 1792 M*/ 1793 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog) 1794 { 1795 PetscFunctionBegin; 1796 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Basic; 1797 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Basic; 1798 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic; 1799 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Basic; 1800 PetscFunctionReturn(0); 1801 } 1802 1803 /*MC 1804 ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1805 used this is good for large memory problems. 1806 1807 Options Database Keys: 1808 . -islocaltoglobalmapping_type hash - select this method 1809 1810 1811 Notes: 1812 This is selected automatically for large problems if the user does not set the type. 1813 1814 Level: beginner 1815 1816 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH 1817 M*/ 1818 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog) 1819 { 1820 PetscFunctionBegin; 1821 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Hash; 1822 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Hash; 1823 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash; 1824 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Hash; 1825 PetscFunctionReturn(0); 1826 } 1827 1828 1829 /*@C 1830 ISLocalToGlobalMappingRegister - Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping 1831 1832 Not Collective 1833 1834 Input Parameters: 1835 + sname - name of a new method 1836 - routine_create - routine to create method context 1837 1838 Notes: 1839 ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings. 1840 1841 Sample usage: 1842 .vb 1843 ISLocalToGlobalMappingRegister("my_mapper",MyCreate); 1844 .ve 1845 1846 Then, your mapping can be chosen with the procedural interface via 1847 $ ISLocalToGlobalMappingSetType(ltog,"my_mapper") 1848 or at runtime via the option 1849 $ -islocaltoglobalmapping_type my_mapper 1850 1851 Level: advanced 1852 1853 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH 1854 1855 @*/ 1856 PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping)) 1857 { 1858 PetscErrorCode ierr; 1859 1860 PetscFunctionBegin; 1861 ierr = ISInitializePackage();CHKERRQ(ierr); 1862 ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr); 1863 PetscFunctionReturn(0); 1864 } 1865 1866 /*@C 1867 ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach. 1868 1869 Logically Collective on ISLocalToGlobalMapping 1870 1871 Input Parameters: 1872 + ltog - the ISLocalToGlobalMapping object 1873 - type - a known method 1874 1875 Options Database Key: 1876 . -islocaltoglobalmapping_type <method> - Sets the method; use -help for a list 1877 of available methods (for instance, basic or hash) 1878 1879 Notes: 1880 See "petsc/include/petscis.h" for available methods 1881 1882 Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and 1883 then set the ISLocalToGlobalMapping type from the options database rather than by using 1884 this routine. 1885 1886 Level: intermediate 1887 1888 Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they 1889 are accessed by ISLocalToGlobalMappingSetType(). 1890 1891 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate() 1892 1893 @*/ 1894 PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type) 1895 { 1896 PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping); 1897 PetscBool match; 1898 1899 PetscFunctionBegin; 1900 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1901 PetscValidCharPointer(type,2); 1902 1903 ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr); 1904 if (match) PetscFunctionReturn(0); 1905 1906 ierr = PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr); 1907 if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type); 1908 /* Destroy the previous private LTOG context */ 1909 if (ltog->ops->destroy) { 1910 ierr = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr); 1911 ltog->ops->destroy = NULL; 1912 } 1913 ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr); 1914 ierr = (*r)(ltog);CHKERRQ(ierr); 1915 PetscFunctionReturn(0); 1916 } 1917 1918 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE; 1919 1920 /*@C 1921 ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package. 1922 1923 Not Collective 1924 1925 Level: advanced 1926 1927 .seealso: ISRegister(), ISLocalToGlobalRegister() 1928 @*/ 1929 PetscErrorCode ISLocalToGlobalMappingRegisterAll(void) 1930 { 1931 PetscErrorCode ierr; 1932 1933 PetscFunctionBegin; 1934 if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0); 1935 ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE; 1936 1937 ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr); 1938 ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr); 1939 PetscFunctionReturn(0); 1940 } 1941 1942