1 2 #include <petsc/private/isimpl.h> /*I "petscis.h" I*/ 3 #include <petsc/private/hash.h> 4 #include <petscsf.h> 5 #include <petscviewer.h> 6 7 PetscClassId IS_LTOGM_CLASSID; 8 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***); 9 10 typedef struct { 11 PetscInt *globals; 12 } ISLocalToGlobalMapping_Basic; 13 14 typedef struct { 15 PetscHashI globalht; 16 } ISLocalToGlobalMapping_Hash; 17 18 19 /* -----------------------------------------------------------------------------------------*/ 20 21 /* 22 Creates the global mapping information in the ISLocalToGlobalMapping structure 23 24 If the user has not selected how to handle the global to local mapping then use HASH for "large" problems 25 */ 26 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping) 27 { 28 PetscInt i,*idx = mapping->indices,n = mapping->n,end,start; 29 PetscErrorCode ierr; 30 31 PetscFunctionBegin; 32 if (mapping->data) PetscFunctionReturn(0); 33 end = 0; 34 start = PETSC_MAX_INT; 35 36 for (i=0; i<n; i++) { 37 if (idx[i] < 0) continue; 38 if (idx[i] < start) start = idx[i]; 39 if (idx[i] > end) end = idx[i]; 40 } 41 if (start > end) {start = 0; end = -1;} 42 mapping->globalstart = start; 43 mapping->globalend = end; 44 if (!((PetscObject)mapping)->type_name) { 45 if ((end - start) > PetscMax(4*n,1000000)) { 46 ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr); 47 } else { 48 ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr); 49 } 50 } 51 ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr); 52 PetscFunctionReturn(0); 53 } 54 55 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping) 56 { 57 PetscErrorCode ierr; 58 PetscInt i,*idx = mapping->indices,n = mapping->n,end,start,*globals; 59 ISLocalToGlobalMapping_Basic *map; 60 61 PetscFunctionBegin; 62 start = mapping->globalstart; 63 end = mapping->globalend; 64 ierr = PetscNew(&map);CHKERRQ(ierr); 65 ierr = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr); 66 map->globals = globals; 67 for (i=0; i<end-start+1; i++) globals[i] = -1; 68 for (i=0; i<n; i++) { 69 if (idx[i] < 0) continue; 70 globals[idx[i] - start] = i; 71 } 72 mapping->data = (void*)map; 73 ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr); 74 PetscFunctionReturn(0); 75 } 76 77 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping) 78 { 79 PetscErrorCode ierr; 80 PetscInt i,*idx = mapping->indices,n = mapping->n; 81 ISLocalToGlobalMapping_Hash *map; 82 83 PetscFunctionBegin; 84 ierr = PetscNew(&map);CHKERRQ(ierr); 85 PetscHashICreate(map->globalht); 86 for (i=0; i<n; i++ ) { 87 if (idx[i] < 0) continue; 88 PetscHashIAdd(map->globalht, idx[i], i); 89 } 90 mapping->data = (void*)map; 91 ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr); 92 PetscFunctionReturn(0); 93 } 94 95 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping) 96 { 97 PetscErrorCode ierr; 98 ISLocalToGlobalMapping_Basic *map = (ISLocalToGlobalMapping_Basic *)mapping->data; 99 100 PetscFunctionBegin; 101 if (!map) PetscFunctionReturn(0); 102 ierr = PetscFree(map->globals);CHKERRQ(ierr); 103 ierr = PetscFree(mapping->data);CHKERRQ(ierr); 104 PetscFunctionReturn(0); 105 } 106 107 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping) 108 { 109 PetscErrorCode ierr; 110 ISLocalToGlobalMapping_Hash *map = (ISLocalToGlobalMapping_Hash*)mapping->data; 111 112 PetscFunctionBegin; 113 if (!map) PetscFunctionReturn(0); 114 PetscHashIDestroy(map->globalht); 115 ierr = PetscFree(mapping->data);CHKERRQ(ierr); 116 PetscFunctionReturn(0); 117 } 118 119 #define GTOLTYPE _Basic 120 #define GTOLNAME _Basic 121 #define GTOLBS mapping->bs 122 #define GTOL(g, local) do { \ 123 local = map->globals[g/bs - start]; \ 124 local = bs*local + (g % bs); \ 125 } while (0) 126 127 #include <../src/vec/is/utils/isltog.h> 128 129 #define GTOLTYPE _Basic 130 #define GTOLNAME Block_Basic 131 #define GTOLBS 1 132 #define GTOL(g, local) do { \ 133 local = map->globals[g - start]; \ 134 } while (0) 135 #include <../src/vec/is/utils/isltog.h> 136 137 #define GTOLTYPE _Hash 138 #define GTOLNAME _Hash 139 #define GTOLBS mapping->bs 140 #define GTOL(g, local) do { \ 141 PetscHashIMap(map->globalht,g/bs,local); \ 142 local = bs*local + (g % bs); \ 143 } while (0) 144 #include <../src/vec/is/utils/isltog.h> 145 146 #define GTOLTYPE _Hash 147 #define GTOLNAME Block_Hash 148 #define GTOLBS 1 149 #define GTOL(g, local) do { \ 150 PetscHashIMap(map->globalht,g,local); \ 151 } while (0) 152 #include <../src/vec/is/utils/isltog.h> 153 154 /*@ 155 ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object 156 157 Not Collective 158 159 Input Parameter: 160 . ltog - local to global mapping 161 162 Output Parameter: 163 . nltog - the duplicated local to global mapping 164 165 Level: advanced 166 167 Concepts: mapping^local to global 168 169 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 170 @*/ 171 PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog) 172 { 173 PetscErrorCode ierr; 174 175 PetscFunctionBegin; 176 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 177 ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr); 178 PetscFunctionReturn(0); 179 } 180 181 /*@ 182 ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping 183 184 Not Collective 185 186 Input Parameter: 187 . ltog - local to global mapping 188 189 Output Parameter: 190 . n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length 191 192 Level: advanced 193 194 Concepts: mapping^local to global 195 196 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 197 @*/ 198 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n) 199 { 200 PetscFunctionBegin; 201 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 202 PetscValidIntPointer(n,2); 203 *n = mapping->bs*mapping->n; 204 PetscFunctionReturn(0); 205 } 206 207 /*@C 208 ISLocalToGlobalMappingView - View a local to global mapping 209 210 Not Collective 211 212 Input Parameters: 213 + ltog - local to global mapping 214 - viewer - viewer 215 216 Level: advanced 217 218 Concepts: mapping^local to global 219 220 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 221 @*/ 222 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer) 223 { 224 PetscInt i; 225 PetscMPIInt rank; 226 PetscBool iascii; 227 PetscErrorCode ierr; 228 229 PetscFunctionBegin; 230 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 231 if (!viewer) { 232 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr); 233 } 234 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 235 236 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr); 237 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 238 if (iascii) { 239 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr); 240 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 241 for (i=0; i<mapping->n; i++) { 242 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr); 243 } 244 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 245 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 246 } 247 PetscFunctionReturn(0); 248 } 249 250 /*@ 251 ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) 252 ordering and a global parallel ordering. 253 254 Not collective 255 256 Input Parameter: 257 . is - index set containing the global numbers for each local number 258 259 Output Parameter: 260 . mapping - new mapping data structure 261 262 Notes: the block size of the IS determines the block size of the mapping 263 Level: advanced 264 265 Concepts: mapping^local to global 266 267 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions() 268 @*/ 269 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping) 270 { 271 PetscErrorCode ierr; 272 PetscInt n,bs; 273 const PetscInt *indices; 274 MPI_Comm comm; 275 PetscBool isblock; 276 277 PetscFunctionBegin; 278 PetscValidHeaderSpecific(is,IS_CLASSID,1); 279 PetscValidPointer(mapping,2); 280 281 ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr); 282 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 283 ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr); 284 if (!isblock) { 285 ierr = ISGetIndices(is,&indices);CHKERRQ(ierr); 286 ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 287 ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr); 288 } else { 289 ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr); 290 ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr); 291 ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 292 ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr); 293 } 294 PetscFunctionReturn(0); 295 } 296 297 /*@C 298 ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) 299 ordering and a global parallel ordering. 300 301 Collective 302 303 Input Parameter: 304 + sf - star forest mapping contiguous local indices to (rank, offset) 305 - start - first global index on this process 306 307 Output Parameter: 308 . mapping - new mapping data structure 309 310 Level: advanced 311 312 Concepts: mapping^local to global 313 314 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions() 315 @*/ 316 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping) 317 { 318 PetscErrorCode ierr; 319 PetscInt i,maxlocal,nroots,nleaves,*globals,*ltog; 320 const PetscInt *ilocal; 321 MPI_Comm comm; 322 323 PetscFunctionBegin; 324 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 325 PetscValidPointer(mapping,3); 326 327 ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr); 328 ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr); 329 if (ilocal) { 330 for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1); 331 } 332 else maxlocal = nleaves; 333 ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr); 334 ierr = PetscMalloc1(maxlocal,<og);CHKERRQ(ierr); 335 for (i=0; i<nroots; i++) globals[i] = start + i; 336 for (i=0; i<maxlocal; i++) ltog[i] = -1; 337 ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr); 338 ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr); 339 ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr); 340 ierr = PetscFree(globals);CHKERRQ(ierr); 341 PetscFunctionReturn(0); 342 } 343 344 /*@ 345 ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping 346 347 Not collective 348 349 Input Parameters: 350 . mapping - mapping data structure 351 . bs - the blocksize 352 353 Level: advanced 354 355 Concepts: mapping^local to global 356 357 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 358 @*/ 359 PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs) 360 { 361 PetscInt *nid; 362 const PetscInt *oid; 363 PetscInt i,cn,on,obs,nn; 364 PetscErrorCode ierr; 365 366 PetscFunctionBegin; 367 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 368 if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs); 369 if (bs == mapping->bs) PetscFunctionReturn(0); 370 on = mapping->n; 371 obs = mapping->bs; 372 oid = mapping->indices; 373 nn = (on*obs)/bs; 374 if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on); 375 376 ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr); 377 ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr); 378 for (i=0;i<nn;i++) { 379 PetscInt j; 380 for (j=0,cn=0;j<bs-1;j++) { 381 if (oid[i*bs+j] < 0) { cn++; continue; } 382 if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]); 383 } 384 if (oid[i*bs+j] < 0) cn++; 385 if (cn) { 386 if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn); 387 nid[i] = -1; 388 } else { 389 nid[i] = oid[i*bs]/bs; 390 } 391 } 392 ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr); 393 394 mapping->n = nn; 395 mapping->bs = bs; 396 ierr = PetscFree(mapping->indices);CHKERRQ(ierr); 397 mapping->indices = nid; 398 mapping->globalstart = 0; 399 mapping->globalend = 0; 400 if (mapping->ops->destroy) { 401 ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr); 402 } 403 PetscFunctionReturn(0); 404 } 405 406 /*@ 407 ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping 408 ordering and a global parallel ordering. 409 410 Not Collective 411 412 Input Parameters: 413 . mapping - mapping data structure 414 415 Output Parameter: 416 . bs - the blocksize 417 418 Level: advanced 419 420 Concepts: mapping^local to global 421 422 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 423 @*/ 424 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs) 425 { 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 428 *bs = mapping->bs; 429 PetscFunctionReturn(0); 430 } 431 432 /*@ 433 ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) 434 ordering and a global parallel ordering. 435 436 Not Collective, but communicator may have more than one process 437 438 Input Parameters: 439 + comm - MPI communicator 440 . bs - the block size 441 . n - the number of local elements divided by the block size, or equivalently the number of block indices 442 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs 443 - mode - see PetscCopyMode 444 445 Output Parameter: 446 . mapping - new mapping data structure 447 448 Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1 449 450 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 451 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 452 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 453 454 Level: advanced 455 456 Concepts: mapping^local to global 457 458 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH 459 ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType 460 @*/ 461 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping) 462 { 463 PetscErrorCode ierr; 464 PetscInt *in; 465 466 PetscFunctionBegin; 467 if (n) PetscValidIntPointer(indices,3); 468 PetscValidPointer(mapping,4); 469 470 *mapping = NULL; 471 ierr = ISInitializePackage();CHKERRQ(ierr); 472 473 ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS", 474 comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr); 475 (*mapping)->n = n; 476 (*mapping)->bs = bs; 477 (*mapping)->info_cached = PETSC_FALSE; 478 (*mapping)->info_free = PETSC_FALSE; 479 (*mapping)->info_procs = NULL; 480 (*mapping)->info_numprocs = NULL; 481 (*mapping)->info_indices = NULL; 482 483 (*mapping)->ops->globaltolocalmappingapply = NULL; 484 (*mapping)->ops->globaltolocalmappingapplyblock = NULL; 485 (*mapping)->ops->destroy = NULL; 486 if (mode == PETSC_COPY_VALUES) { 487 ierr = PetscMalloc1(n,&in);CHKERRQ(ierr); 488 ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr); 489 (*mapping)->indices = in; 490 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 491 } else if (mode == PETSC_OWN_POINTER) { 492 (*mapping)->indices = (PetscInt*)indices; 493 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 494 } 495 else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER"); 496 PetscFunctionReturn(0); 497 } 498 499 PetscFunctionList ISLocalToGlobalMappingList = NULL; 500 501 502 /*@ 503 ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database. 504 505 Not collective 506 507 Input Parameters: 508 . mapping - mapping data structure 509 510 Level: advanced 511 512 @*/ 513 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping) 514 { 515 PetscErrorCode ierr; 516 char type[256]; 517 ISLocalToGlobalMappingType defaulttype = "Not set"; 518 PetscBool flg; 519 520 PetscFunctionBegin; 521 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 522 ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr); 523 ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr); 524 ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr); 525 if (flg) { 526 ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr); 527 } 528 ierr = PetscOptionsEnd();CHKERRQ(ierr); 529 PetscFunctionReturn(0); 530 } 531 532 /*@ 533 ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) 534 ordering and a global parallel ordering. 535 536 Note Collective 537 538 Input Parameters: 539 . mapping - mapping data structure 540 541 Level: advanced 542 543 .seealso: ISLocalToGlobalMappingCreate() 544 @*/ 545 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) 546 { 547 PetscErrorCode ierr; 548 549 PetscFunctionBegin; 550 if (!*mapping) PetscFunctionReturn(0); 551 PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1); 552 if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);} 553 ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr); 554 ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr); 555 ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr); 556 if ((*mapping)->info_indices) { 557 PetscInt i; 558 559 ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr); 560 for (i=1; i<(*mapping)->info_nproc; i++) { 561 ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr); 562 } 563 ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr); 564 } 565 if ((*mapping)->ops->destroy) { 566 ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr); 567 } 568 ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr); 569 *mapping = 0; 570 PetscFunctionReturn(0); 571 } 572 573 /*@ 574 ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering 575 a new index set using the global numbering defined in an ISLocalToGlobalMapping 576 context. 577 578 Not collective 579 580 Input Parameters: 581 + mapping - mapping between local and global numbering 582 - is - index set in local numbering 583 584 Output Parameters: 585 . newis - index set in global numbering 586 587 Level: advanced 588 589 Concepts: mapping^local to global 590 591 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(), 592 ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply() 593 @*/ 594 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis) 595 { 596 PetscErrorCode ierr; 597 PetscInt n,*idxout; 598 const PetscInt *idxin; 599 600 PetscFunctionBegin; 601 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 602 PetscValidHeaderSpecific(is,IS_CLASSID,2); 603 PetscValidPointer(newis,3); 604 605 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 606 ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); 607 ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr); 608 ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr); 609 ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); 610 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr); 611 PetscFunctionReturn(0); 612 } 613 614 /*@ 615 ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering 616 and converts them to the global numbering. 617 618 Not collective 619 620 Input Parameters: 621 + mapping - the local to global mapping context 622 . N - number of integers 623 - in - input indices in local numbering 624 625 Output Parameter: 626 . out - indices in global numbering 627 628 Notes: 629 The in and out array parameters may be identical. 630 631 Level: advanced 632 633 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 634 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 635 AOPetscToApplication(), ISGlobalToLocalMappingApply() 636 637 Concepts: mapping^local to global 638 @*/ 639 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 640 { 641 PetscInt i,bs,Nmax; 642 643 PetscFunctionBegin; 644 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 645 bs = mapping->bs; 646 Nmax = bs*mapping->n; 647 if (bs == 1) { 648 const PetscInt *idx = mapping->indices; 649 for (i=0; i<N; i++) { 650 if (in[i] < 0) { 651 out[i] = in[i]; 652 continue; 653 } 654 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 655 out[i] = idx[in[i]]; 656 } 657 } else { 658 const PetscInt *idx = mapping->indices; 659 for (i=0; i<N; i++) { 660 if (in[i] < 0) { 661 out[i] = in[i]; 662 continue; 663 } 664 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 665 out[i] = idx[in[i]/bs]*bs + (in[i] % bs); 666 } 667 } 668 PetscFunctionReturn(0); 669 } 670 671 /*@ 672 ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering 673 674 Not collective 675 676 Input Parameters: 677 + mapping - the local to global mapping context 678 . N - number of integers 679 - in - input indices in local block numbering 680 681 Output Parameter: 682 . out - indices in global block numbering 683 684 Notes: 685 The in and out array parameters may be identical. 686 687 Example: 688 If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0 689 (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3. 690 691 Level: advanced 692 693 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 694 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 695 AOPetscToApplication(), ISGlobalToLocalMappingApply() 696 697 Concepts: mapping^local to global 698 @*/ 699 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 700 { 701 702 PetscFunctionBegin; 703 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 704 { 705 PetscInt i,Nmax = mapping->n; 706 const PetscInt *idx = mapping->indices; 707 708 for (i=0; i<N; i++) { 709 if (in[i] < 0) { 710 out[i] = in[i]; 711 continue; 712 } 713 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i); 714 out[i] = idx[in[i]]; 715 } 716 } 717 PetscFunctionReturn(0); 718 } 719 720 /*@ 721 ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers 722 specified with a global numbering. 723 724 Not collective 725 726 Input Parameters: 727 + mapping - mapping between local and global numbering 728 . type - IS_GTOLM_MASK - replaces global indices with no local value with -1 729 IS_GTOLM_DROP - drops the indices with no local value from the output list 730 . n - number of global indices to map 731 - idx - global indices to map 732 733 Output Parameters: 734 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 735 - idxout - local index of each global index, one must pass in an array long enough 736 to hold all the indices. You can call ISGlobalToLocalMappingApply() with 737 idxout == NULL to determine the required length (returned in nout) 738 and then allocate the required space and call ISGlobalToLocalMappingApply() 739 a second time to set the values. 740 741 Notes: 742 Either nout or idxout may be NULL. idx and idxout may be identical. 743 744 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 745 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 746 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 747 748 Level: advanced 749 750 Developer Note: The manual page states that idx and idxout may be identical but the calling 751 sequence declares idx as const so it cannot be the same as idxout. 752 753 Concepts: mapping^global to local 754 755 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(), 756 ISLocalToGlobalMappingDestroy() 757 @*/ 758 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[]) 759 { 760 PetscErrorCode ierr; 761 762 PetscFunctionBegin; 763 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 764 if (!mapping->data) { 765 ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr); 766 } 767 ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr); 768 PetscFunctionReturn(0); 769 } 770 771 /*@ 772 ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering 773 a new index set using the local numbering defined in an ISLocalToGlobalMapping 774 context. 775 776 Not collective 777 778 Input Parameters: 779 + mapping - mapping between local and global numbering 780 . type - IS_GTOLM_MASK - replaces global indices with no local value with -1 781 IS_GTOLM_DROP - drops the indices with no local value from the output list 782 - is - index set in global numbering 783 784 Output Parameters: 785 . newis - index set in local numbering 786 787 Level: advanced 788 789 Concepts: mapping^local to global 790 791 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(), 792 ISLocalToGlobalMappingDestroy() 793 @*/ 794 PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis) 795 { 796 PetscErrorCode ierr; 797 PetscInt n,nout,*idxout; 798 const PetscInt *idxin; 799 800 PetscFunctionBegin; 801 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 802 PetscValidHeaderSpecific(is,IS_CLASSID,3); 803 PetscValidPointer(newis,4); 804 805 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 806 ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); 807 if (type == IS_GTOLM_MASK) { 808 ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr); 809 } else { 810 ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr); 811 ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr); 812 } 813 ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr); 814 ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); 815 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr); 816 PetscFunctionReturn(0); 817 } 818 819 /*@ 820 ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers 821 specified with a block global numbering. 822 823 Not collective 824 825 Input Parameters: 826 + mapping - mapping between local and global numbering 827 . type - IS_GTOLM_MASK - replaces global indices with no local value with -1 828 IS_GTOLM_DROP - drops the indices with no local value from the output list 829 . n - number of global indices to map 830 - idx - global indices to map 831 832 Output Parameters: 833 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 834 - idxout - local index of each global index, one must pass in an array long enough 835 to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with 836 idxout == NULL to determine the required length (returned in nout) 837 and then allocate the required space and call ISGlobalToLocalMappingApplyBlock() 838 a second time to set the values. 839 840 Notes: 841 Either nout or idxout may be NULL. idx and idxout may be identical. 842 843 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 844 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 845 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 846 847 Level: advanced 848 849 Developer Note: The manual page states that idx and idxout may be identical but the calling 850 sequence declares idx as const so it cannot be the same as idxout. 851 852 Concepts: mapping^global to local 853 854 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(), 855 ISLocalToGlobalMappingDestroy() 856 @*/ 857 PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type, 858 PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[]) 859 { 860 PetscErrorCode ierr; 861 862 PetscFunctionBegin; 863 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 864 if (!mapping->data) { 865 ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr); 866 } 867 ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr); 868 PetscFunctionReturn(0); 869 } 870 871 872 /*@C 873 ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and 874 each index shared by more than one processor 875 876 Collective on ISLocalToGlobalMapping 877 878 Input Parameters: 879 . mapping - the mapping from local to global indexing 880 881 Output Parameter: 882 + nproc - number of processors that are connected to this one 883 . proc - neighboring processors 884 . numproc - number of indices for each subdomain (processor) 885 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 886 887 Level: advanced 888 889 Concepts: mapping^local to global 890 891 Fortran Usage: 892 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 893 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 894 PetscInt indices[nproc][numprocmax],ierr) 895 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 896 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 897 898 899 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 900 ISLocalToGlobalMappingRestoreInfo() 901 @*/ 902 PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 903 { 904 PetscErrorCode ierr; 905 906 PetscFunctionBegin; 907 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 908 if (mapping->info_cached) { 909 *nproc = mapping->info_nproc; 910 *procs = mapping->info_procs; 911 *numprocs = mapping->info_numprocs; 912 *indices = mapping->info_indices; 913 } else { 914 ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr); 915 } 916 PetscFunctionReturn(0); 917 } 918 919 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 920 { 921 PetscErrorCode ierr; 922 PetscMPIInt size,rank,tag1,tag2,tag3,*len,*source,imdex; 923 PetscInt i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices; 924 PetscInt *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc; 925 PetscInt cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned; 926 PetscInt node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp; 927 PetscInt first_procs,first_numprocs,*first_indices; 928 MPI_Request *recv_waits,*send_waits; 929 MPI_Status recv_status,*send_status,*recv_statuses; 930 MPI_Comm comm; 931 PetscBool debug = PETSC_FALSE; 932 933 PetscFunctionBegin; 934 ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr); 935 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 936 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 937 if (size == 1) { 938 *nproc = 0; 939 *procs = NULL; 940 ierr = PetscNew(numprocs);CHKERRQ(ierr); 941 (*numprocs)[0] = 0; 942 ierr = PetscNew(indices);CHKERRQ(ierr); 943 (*indices)[0] = NULL; 944 /* save info for reuse */ 945 mapping->info_nproc = *nproc; 946 mapping->info_procs = *procs; 947 mapping->info_numprocs = *numprocs; 948 mapping->info_indices = *indices; 949 mapping->info_cached = PETSC_TRUE; 950 PetscFunctionReturn(0); 951 } 952 953 ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr); 954 955 /* 956 Notes on ISLocalToGlobalMappingGetBlockInfo 957 958 globally owned node - the nodes that have been assigned to this processor in global 959 numbering, just for this routine. 960 961 nontrivial globally owned node - node assigned to this processor that is on a subdomain 962 boundary (i.e. is has more than one local owner) 963 964 locally owned node - node that exists on this processors subdomain 965 966 nontrivial locally owned node - node that is not in the interior (i.e. has more than one 967 local subdomain 968 */ 969 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr); 970 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr); 971 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr); 972 973 for (i=0; i<n; i++) { 974 if (lindices[i] > max) max = lindices[i]; 975 } 976 ierr = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr); 977 Ng++; 978 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 979 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 980 scale = Ng/size + 1; 981 ng = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng); 982 rstart = scale*rank; 983 984 /* determine ownership ranges of global indices */ 985 ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr); 986 ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 987 988 /* determine owners of each local node */ 989 ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr); 990 for (i=0; i<n; i++) { 991 proc = lindices[i]/scale; /* processor that globally owns this index */ 992 nprocs[2*proc+1] = 1; /* processor globally owns at least one of ours */ 993 owner[i] = proc; 994 nprocs[2*proc]++; /* count of how many that processor globally owns of ours */ 995 } 996 nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1]; 997 ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr); 998 999 /* inform other processors of number of messages and max length*/ 1000 ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 1001 ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr); 1002 1003 /* post receives for owned rows */ 1004 ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr); 1005 ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr); 1006 for (i=0; i<nrecvs; i++) { 1007 ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr); 1008 } 1009 1010 /* pack messages containing lists of local nodes to owners */ 1011 ierr = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr); 1012 ierr = PetscMalloc1(size+1,&starts);CHKERRQ(ierr); 1013 starts[0] = 0; 1014 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 1015 for (i=0; i<n; i++) { 1016 sends[starts[owner[i]]++] = lindices[i]; 1017 sends[starts[owner[i]]++] = i; 1018 } 1019 ierr = PetscFree(owner);CHKERRQ(ierr); 1020 starts[0] = 0; 1021 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 1022 1023 /* send the messages */ 1024 ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr); 1025 ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr); 1026 cnt = 0; 1027 for (i=0; i<size; i++) { 1028 if (nprocs[2*i]) { 1029 ierr = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr); 1030 dest[cnt] = i; 1031 cnt++; 1032 } 1033 } 1034 ierr = PetscFree(starts);CHKERRQ(ierr); 1035 1036 /* wait on receives */ 1037 ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr); 1038 ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr); 1039 cnt = nrecvs; 1040 ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr); 1041 ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr); 1042 while (cnt) { 1043 ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 1044 /* unpack receives into our local space */ 1045 ierr = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr); 1046 source[imdex] = recv_status.MPI_SOURCE; 1047 len[imdex] = len[imdex]/2; 1048 /* count how many local owners for each of my global owned indices */ 1049 for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++; 1050 cnt--; 1051 } 1052 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1053 1054 /* count how many globally owned indices are on an edge multiplied by how many processors own them. */ 1055 nowned = 0; 1056 nownedm = 0; 1057 for (i=0; i<ng; i++) { 1058 if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;} 1059 } 1060 1061 /* create single array to contain rank of all local owners of each globally owned index */ 1062 ierr = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr); 1063 ierr = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr); 1064 starts[0] = 0; 1065 for (i=1; i<ng; i++) { 1066 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1067 else starts[i] = starts[i-1]; 1068 } 1069 1070 /* for each nontrival globally owned node list all arriving processors */ 1071 for (i=0; i<nrecvs; i++) { 1072 for (j=0; j<len[i]; j++) { 1073 node = recvs[2*i*nmax+2*j]-rstart; 1074 if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i]; 1075 } 1076 } 1077 1078 if (debug) { /* ----------------------------------- */ 1079 starts[0] = 0; 1080 for (i=1; i<ng; i++) { 1081 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1082 else starts[i] = starts[i-1]; 1083 } 1084 for (i=0; i<ng; i++) { 1085 if (nownedsenders[i] > 1) { 1086 ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr); 1087 for (j=0; j<nownedsenders[i]; j++) { 1088 ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr); 1089 } 1090 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1091 } 1092 } 1093 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1094 } /* ----------------------------------- */ 1095 1096 /* wait on original sends */ 1097 if (nsends) { 1098 ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr); 1099 ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 1100 ierr = PetscFree(send_status);CHKERRQ(ierr); 1101 } 1102 ierr = PetscFree(send_waits);CHKERRQ(ierr); 1103 ierr = PetscFree(sends);CHKERRQ(ierr); 1104 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1105 1106 /* pack messages to send back to local owners */ 1107 starts[0] = 0; 1108 for (i=1; i<ng; i++) { 1109 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1110 else starts[i] = starts[i-1]; 1111 } 1112 nsends2 = nrecvs; 1113 ierr = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */ 1114 for (i=0; i<nrecvs; i++) { 1115 nprocs[i] = 1; 1116 for (j=0; j<len[i]; j++) { 1117 node = recvs[2*i*nmax+2*j]-rstart; 1118 if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node]; 1119 } 1120 } 1121 nt = 0; 1122 for (i=0; i<nsends2; i++) nt += nprocs[i]; 1123 1124 ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr); 1125 ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr); 1126 1127 starts2[0] = 0; 1128 for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1]; 1129 /* 1130 Each message is 1 + nprocs[i] long, and consists of 1131 (0) the number of nodes being sent back 1132 (1) the local node number, 1133 (2) the number of processors sharing it, 1134 (3) the processors sharing it 1135 */ 1136 for (i=0; i<nsends2; i++) { 1137 cnt = 1; 1138 sends2[starts2[i]] = 0; 1139 for (j=0; j<len[i]; j++) { 1140 node = recvs[2*i*nmax+2*j]-rstart; 1141 if (nownedsenders[node] > 1) { 1142 sends2[starts2[i]]++; 1143 sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1]; 1144 sends2[starts2[i]+cnt++] = nownedsenders[node]; 1145 ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr); 1146 cnt += nownedsenders[node]; 1147 } 1148 } 1149 } 1150 1151 /* receive the message lengths */ 1152 nrecvs2 = nsends; 1153 ierr = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr); 1154 ierr = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr); 1155 ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr); 1156 for (i=0; i<nrecvs2; i++) { 1157 ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr); 1158 } 1159 1160 /* send the message lengths */ 1161 for (i=0; i<nsends2; i++) { 1162 ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr); 1163 } 1164 1165 /* wait on receives of lens */ 1166 if (nrecvs2) { 1167 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 1168 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr); 1169 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 1170 } 1171 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1172 1173 starts3[0] = 0; 1174 nt = 0; 1175 for (i=0; i<nrecvs2-1; i++) { 1176 starts3[i+1] = starts3[i] + lens2[i]; 1177 nt += lens2[i]; 1178 } 1179 if (nrecvs2) nt += lens2[nrecvs2-1]; 1180 1181 ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr); 1182 ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr); 1183 for (i=0; i<nrecvs2; i++) { 1184 ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr); 1185 } 1186 1187 /* send the messages */ 1188 ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr); 1189 for (i=0; i<nsends2; i++) { 1190 ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr); 1191 } 1192 1193 /* wait on receives */ 1194 if (nrecvs2) { 1195 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 1196 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr); 1197 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 1198 } 1199 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1200 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1201 1202 if (debug) { /* ----------------------------------- */ 1203 cnt = 0; 1204 for (i=0; i<nrecvs2; i++) { 1205 nt = recvs2[cnt++]; 1206 for (j=0; j<nt; j++) { 1207 ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr); 1208 for (k=0; k<recvs2[cnt+1]; k++) { 1209 ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr); 1210 } 1211 cnt += 2 + recvs2[cnt+1]; 1212 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1213 } 1214 } 1215 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1216 } /* ----------------------------------- */ 1217 1218 /* count number subdomains for each local node */ 1219 ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr); 1220 ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr); 1221 cnt = 0; 1222 for (i=0; i<nrecvs2; i++) { 1223 nt = recvs2[cnt++]; 1224 for (j=0; j<nt; j++) { 1225 for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++; 1226 cnt += 2 + recvs2[cnt+1]; 1227 } 1228 } 1229 nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0); 1230 *nproc = nt; 1231 ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr); 1232 ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr); 1233 ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr); 1234 for (i=0;i<nt+1;i++) (*indices)[i]=NULL; 1235 ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr); 1236 cnt = 0; 1237 for (i=0; i<size; i++) { 1238 if (nprocs[i] > 0) { 1239 bprocs[i] = cnt; 1240 (*procs)[cnt] = i; 1241 (*numprocs)[cnt] = nprocs[i]; 1242 ierr = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr); 1243 cnt++; 1244 } 1245 } 1246 1247 /* make the list of subdomains for each nontrivial local node */ 1248 ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr); 1249 cnt = 0; 1250 for (i=0; i<nrecvs2; i++) { 1251 nt = recvs2[cnt++]; 1252 for (j=0; j<nt; j++) { 1253 for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt]; 1254 cnt += 2 + recvs2[cnt+1]; 1255 } 1256 } 1257 ierr = PetscFree(bprocs);CHKERRQ(ierr); 1258 ierr = PetscFree(recvs2);CHKERRQ(ierr); 1259 1260 /* sort the node indexing by their global numbers */ 1261 nt = *nproc; 1262 for (i=0; i<nt; i++) { 1263 ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr); 1264 for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]]; 1265 ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr); 1266 ierr = PetscFree(tmp);CHKERRQ(ierr); 1267 } 1268 1269 if (debug) { /* ----------------------------------- */ 1270 nt = *nproc; 1271 for (i=0; i<nt; i++) { 1272 ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr); 1273 for (j=0; j<(*numprocs)[i]; j++) { 1274 ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr); 1275 } 1276 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1277 } 1278 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1279 } /* ----------------------------------- */ 1280 1281 /* wait on sends */ 1282 if (nsends2) { 1283 ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr); 1284 ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr); 1285 ierr = PetscFree(send_status);CHKERRQ(ierr); 1286 } 1287 1288 ierr = PetscFree(starts3);CHKERRQ(ierr); 1289 ierr = PetscFree(dest);CHKERRQ(ierr); 1290 ierr = PetscFree(send_waits);CHKERRQ(ierr); 1291 1292 ierr = PetscFree(nownedsenders);CHKERRQ(ierr); 1293 ierr = PetscFree(ownedsenders);CHKERRQ(ierr); 1294 ierr = PetscFree(starts);CHKERRQ(ierr); 1295 ierr = PetscFree(starts2);CHKERRQ(ierr); 1296 ierr = PetscFree(lens2);CHKERRQ(ierr); 1297 1298 ierr = PetscFree(source);CHKERRQ(ierr); 1299 ierr = PetscFree(len);CHKERRQ(ierr); 1300 ierr = PetscFree(recvs);CHKERRQ(ierr); 1301 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1302 ierr = PetscFree(sends2);CHKERRQ(ierr); 1303 1304 /* put the information about myself as the first entry in the list */ 1305 first_procs = (*procs)[0]; 1306 first_numprocs = (*numprocs)[0]; 1307 first_indices = (*indices)[0]; 1308 for (i=0; i<*nproc; i++) { 1309 if ((*procs)[i] == rank) { 1310 (*procs)[0] = (*procs)[i]; 1311 (*numprocs)[0] = (*numprocs)[i]; 1312 (*indices)[0] = (*indices)[i]; 1313 (*procs)[i] = first_procs; 1314 (*numprocs)[i] = first_numprocs; 1315 (*indices)[i] = first_indices; 1316 break; 1317 } 1318 } 1319 1320 /* save info for reuse */ 1321 mapping->info_nproc = *nproc; 1322 mapping->info_procs = *procs; 1323 mapping->info_numprocs = *numprocs; 1324 mapping->info_indices = *indices; 1325 mapping->info_cached = PETSC_TRUE; 1326 PetscFunctionReturn(0); 1327 } 1328 1329 /*@C 1330 ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo() 1331 1332 Collective on ISLocalToGlobalMapping 1333 1334 Input Parameters: 1335 . mapping - the mapping from local to global indexing 1336 1337 Output Parameter: 1338 + nproc - number of processors that are connected to this one 1339 . proc - neighboring processors 1340 . numproc - number of indices for each processor 1341 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1342 1343 Level: advanced 1344 1345 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1346 ISLocalToGlobalMappingGetInfo() 1347 @*/ 1348 PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1349 { 1350 PetscErrorCode ierr; 1351 1352 PetscFunctionBegin; 1353 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1354 if (mapping->info_free) { 1355 ierr = PetscFree(*numprocs);CHKERRQ(ierr); 1356 if (*indices) { 1357 PetscInt i; 1358 1359 ierr = PetscFree((*indices)[0]);CHKERRQ(ierr); 1360 for (i=1; i<*nproc; i++) { 1361 ierr = PetscFree((*indices)[i]);CHKERRQ(ierr); 1362 } 1363 ierr = PetscFree(*indices);CHKERRQ(ierr); 1364 } 1365 } 1366 *nproc = 0; 1367 *procs = NULL; 1368 *numprocs = NULL; 1369 *indices = NULL; 1370 PetscFunctionReturn(0); 1371 } 1372 1373 /*@C 1374 ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and 1375 each index shared by more than one processor 1376 1377 Collective on ISLocalToGlobalMapping 1378 1379 Input Parameters: 1380 . mapping - the mapping from local to global indexing 1381 1382 Output Parameter: 1383 + nproc - number of processors that are connected to this one 1384 . proc - neighboring processors 1385 . numproc - number of indices for each subdomain (processor) 1386 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 1387 1388 Level: advanced 1389 1390 Concepts: mapping^local to global 1391 1392 Fortran Usage: 1393 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1394 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1395 PetscInt indices[nproc][numprocmax],ierr) 1396 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 1397 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 1398 1399 1400 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1401 ISLocalToGlobalMappingRestoreInfo() 1402 @*/ 1403 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1404 { 1405 PetscErrorCode ierr; 1406 PetscInt **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k; 1407 1408 PetscFunctionBegin; 1409 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1410 ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr); 1411 if (bs > 1) { /* we need to expand the cached info */ 1412 ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr); 1413 ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr); 1414 for (i=0; i<*nproc; i++) { 1415 ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr); 1416 for (j=0; j<bnumprocs[i]; j++) { 1417 for (k=0; k<bs; k++) { 1418 (*indices)[i][j*bs+k] = bs*bindices[i][j] + k; 1419 } 1420 } 1421 (*numprocs)[i] = bnumprocs[i]*bs; 1422 } 1423 mapping->info_free = PETSC_TRUE; 1424 } else { 1425 *numprocs = bnumprocs; 1426 *indices = bindices; 1427 } 1428 PetscFunctionReturn(0); 1429 } 1430 1431 /*@C 1432 ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo() 1433 1434 Collective on ISLocalToGlobalMapping 1435 1436 Input Parameters: 1437 . mapping - the mapping from local to global indexing 1438 1439 Output Parameter: 1440 + nproc - number of processors that are connected to this one 1441 . proc - neighboring processors 1442 . numproc - number of indices for each processor 1443 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1444 1445 Level: advanced 1446 1447 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1448 ISLocalToGlobalMappingGetInfo() 1449 @*/ 1450 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1451 { 1452 PetscErrorCode ierr; 1453 1454 PetscFunctionBegin; 1455 ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr); 1456 PetscFunctionReturn(0); 1457 } 1458 1459 /*@C 1460 ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped 1461 1462 Not Collective 1463 1464 Input Arguments: 1465 . ltog - local to global mapping 1466 1467 Output Arguments: 1468 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize() 1469 1470 Level: advanced 1471 1472 Notes: ISLocalToGlobalMappingGetSize() returns the length the this array 1473 1474 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices() 1475 @*/ 1476 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1477 { 1478 PetscFunctionBegin; 1479 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1480 PetscValidPointer(array,2); 1481 if (ltog->bs == 1) { 1482 *array = ltog->indices; 1483 } else { 1484 PetscInt *jj,k,i,j,n = ltog->n, bs = ltog->bs; 1485 const PetscInt *ii; 1486 PetscErrorCode ierr; 1487 1488 ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr); 1489 *array = jj; 1490 k = 0; 1491 ii = ltog->indices; 1492 for (i=0; i<n; i++) 1493 for (j=0; j<bs; j++) 1494 jj[k++] = bs*ii[i] + j; 1495 } 1496 PetscFunctionReturn(0); 1497 } 1498 1499 /*@C 1500 ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices() 1501 1502 Not Collective 1503 1504 Input Arguments: 1505 + ltog - local to global mapping 1506 - array - array of indices 1507 1508 Level: advanced 1509 1510 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1511 @*/ 1512 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1513 { 1514 PetscFunctionBegin; 1515 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1516 PetscValidPointer(array,2); 1517 if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1518 1519 if (ltog->bs > 1) { 1520 PetscErrorCode ierr; 1521 ierr = PetscFree(*(void**)array);CHKERRQ(ierr); 1522 } 1523 PetscFunctionReturn(0); 1524 } 1525 1526 /*@C 1527 ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block 1528 1529 Not Collective 1530 1531 Input Arguments: 1532 . ltog - local to global mapping 1533 1534 Output Arguments: 1535 . array - array of indices 1536 1537 Level: advanced 1538 1539 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices() 1540 @*/ 1541 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1542 { 1543 PetscFunctionBegin; 1544 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1545 PetscValidPointer(array,2); 1546 *array = ltog->indices; 1547 PetscFunctionReturn(0); 1548 } 1549 1550 /*@C 1551 ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices() 1552 1553 Not Collective 1554 1555 Input Arguments: 1556 + ltog - local to global mapping 1557 - array - array of indices 1558 1559 Level: advanced 1560 1561 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1562 @*/ 1563 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1564 { 1565 PetscFunctionBegin; 1566 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1567 PetscValidPointer(array,2); 1568 if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1569 *array = NULL; 1570 PetscFunctionReturn(0); 1571 } 1572 1573 /*@C 1574 ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings 1575 1576 Not Collective 1577 1578 Input Arguments: 1579 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate 1580 . n - number of mappings to concatenate 1581 - ltogs - local to global mappings 1582 1583 Output Arguments: 1584 . ltogcat - new mapping 1585 1586 Note: this currently always returns a mapping with block size of 1 1587 1588 Developer Note: If all the input mapping have the same block size we could easily handle that as a special case 1589 1590 Level: advanced 1591 1592 .seealso: ISLocalToGlobalMappingCreate() 1593 @*/ 1594 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat) 1595 { 1596 PetscInt i,cnt,m,*idx; 1597 PetscErrorCode ierr; 1598 1599 PetscFunctionBegin; 1600 if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n); 1601 if (n > 0) PetscValidPointer(ltogs,3); 1602 for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3); 1603 PetscValidPointer(ltogcat,4); 1604 for (cnt=0,i=0; i<n; i++) { 1605 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1606 cnt += m; 1607 } 1608 ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr); 1609 for (cnt=0,i=0; i<n; i++) { 1610 const PetscInt *subidx; 1611 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1612 ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1613 ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr); 1614 ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1615 cnt += m; 1616 } 1617 ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr); 1618 PetscFunctionReturn(0); 1619 } 1620 1621 /*MC 1622 ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1623 used this is good for only small and moderate size problems. 1624 1625 Options Database Keys: 1626 + -islocaltoglobalmapping_type basic - select this method 1627 1628 Level: beginner 1629 1630 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH 1631 M*/ 1632 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog) 1633 { 1634 PetscFunctionBegin; 1635 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Basic; 1636 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Basic; 1637 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic; 1638 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Basic; 1639 PetscFunctionReturn(0); 1640 } 1641 1642 /*MC 1643 ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1644 used this is good for large memory problems. 1645 1646 Options Database Keys: 1647 + -islocaltoglobalmapping_type hash - select this method 1648 1649 1650 Notes: This is selected automatically for large problems if the user does not set the type. 1651 1652 Level: beginner 1653 1654 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH 1655 M*/ 1656 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog) 1657 { 1658 PetscFunctionBegin; 1659 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Hash; 1660 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Hash; 1661 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash; 1662 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Hash; 1663 PetscFunctionReturn(0); 1664 } 1665 1666 1667 /*@C 1668 ISLocalToGlobalMappingRegister - Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping 1669 1670 Not Collective 1671 1672 Input Parameters: 1673 + sname - name of a new method 1674 - routine_create - routine to create method context 1675 1676 Notes: 1677 ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings. 1678 1679 Sample usage: 1680 .vb 1681 ISLocalToGlobalMappingRegister("my_mapper",MyCreate); 1682 .ve 1683 1684 Then, your mapping can be chosen with the procedural interface via 1685 $ ISLocalToGlobalMappingSetType(ltog,"my_mapper") 1686 or at runtime via the option 1687 $ -islocaltoglobalmapping_type my_mapper 1688 1689 Level: advanced 1690 1691 .keywords: ISLocalToGlobalMappingType, register 1692 1693 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH 1694 1695 @*/ 1696 PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping)) 1697 { 1698 PetscErrorCode ierr; 1699 1700 PetscFunctionBegin; 1701 ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr); 1702 PetscFunctionReturn(0); 1703 } 1704 1705 /*@C 1706 ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach. 1707 1708 Logically Collective on ISLocalToGlobalMapping 1709 1710 Input Parameters: 1711 + ltog - the ISLocalToGlobalMapping object 1712 - type - a known method 1713 1714 Options Database Key: 1715 . -islocaltoglobalmapping_type <method> - Sets the method; use -help for a list 1716 of available methods (for instance, basic or hash) 1717 1718 Notes: 1719 See "petsc/include/petscis.h" for available methods 1720 1721 Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and 1722 then set the ISLocalToGlobalMapping type from the options database rather than by using 1723 this routine. 1724 1725 Level: intermediate 1726 1727 Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they 1728 are accessed by ISLocalToGlobalMappingSetType(). 1729 1730 .keywords: ISLocalToGlobalMapping, set, method 1731 1732 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate() 1733 1734 @*/ 1735 PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type) 1736 { 1737 PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping); 1738 PetscBool match; 1739 1740 PetscFunctionBegin; 1741 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1742 PetscValidCharPointer(type,2); 1743 1744 ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr); 1745 if (match) PetscFunctionReturn(0); 1746 1747 ierr = PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr); 1748 if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type); 1749 /* Destroy the previous private LTOG context */ 1750 if (ltog->ops->destroy) { 1751 ierr = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr); 1752 ltog->ops->destroy = NULL; 1753 } 1754 ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr); 1755 ierr = (*r)(ltog);CHKERRQ(ierr); 1756 PetscFunctionReturn(0); 1757 } 1758 1759 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE; 1760 1761 /*@C 1762 ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package. 1763 1764 Not Collective 1765 1766 Level: advanced 1767 1768 .keywords: IS, register, all 1769 .seealso: ISRegister(), ISLocalToGlobalRegister() 1770 @*/ 1771 PetscErrorCode ISLocalToGlobalMappingRegisterAll(void) 1772 { 1773 PetscErrorCode ierr; 1774 1775 PetscFunctionBegin; 1776 if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0); 1777 ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE; 1778 1779 ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr); 1780 ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr); 1781 PetscFunctionReturn(0); 1782 } 1783 1784