1 2 #include <petsc/private/isimpl.h> /*I "petscis.h" I*/ 3 #include <petscsf.h> 4 #include <petscviewer.h> 5 6 PetscClassId IS_LTOGM_CLASSID; 7 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***); 8 9 /*@ 10 ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object 11 12 Not Collective 13 14 Input Parameter: 15 . ltog - local to global mapping 16 17 Output Parameter: 18 . nltog - the duplicated local to global mapping 19 20 Level: advanced 21 22 Concepts: mapping^local to global 23 24 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 25 @*/ 26 PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog) 27 { 28 PetscErrorCode ierr; 29 30 PetscFunctionBegin; 31 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 32 ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr); 33 PetscFunctionReturn(0); 34 } 35 36 /*@ 37 ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping 38 39 Not Collective 40 41 Input Parameter: 42 . ltog - local to global mapping 43 44 Output Parameter: 45 . n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length 46 47 Level: advanced 48 49 Concepts: mapping^local to global 50 51 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 52 @*/ 53 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n) 54 { 55 PetscFunctionBegin; 56 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 57 PetscValidIntPointer(n,2); 58 *n = mapping->bs*mapping->n; 59 PetscFunctionReturn(0); 60 } 61 62 /*@C 63 ISLocalToGlobalMappingView - View a local to global mapping 64 65 Not Collective 66 67 Input Parameters: 68 + ltog - local to global mapping 69 - viewer - viewer 70 71 Level: advanced 72 73 Concepts: mapping^local to global 74 75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 76 @*/ 77 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer) 78 { 79 PetscInt i; 80 PetscMPIInt rank; 81 PetscBool iascii; 82 PetscErrorCode ierr; 83 84 PetscFunctionBegin; 85 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 86 if (!viewer) { 87 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr); 88 } 89 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 90 91 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr); 92 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 93 if (iascii) { 94 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr); 95 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 96 for (i=0; i<mapping->n; i++) { 97 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr); 98 } 99 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 100 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 101 } 102 PetscFunctionReturn(0); 103 } 104 105 /*@ 106 ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) 107 ordering and a global parallel ordering. 108 109 Not collective 110 111 Input Parameter: 112 . is - index set containing the global numbers for each local number 113 114 Output Parameter: 115 . mapping - new mapping data structure 116 117 Notes: the block size of the IS determines the block size of the mapping 118 Level: advanced 119 120 Concepts: mapping^local to global 121 122 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions() 123 @*/ 124 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping) 125 { 126 PetscErrorCode ierr; 127 PetscInt n,bs; 128 const PetscInt *indices; 129 MPI_Comm comm; 130 PetscBool isblock; 131 132 PetscFunctionBegin; 133 PetscValidHeaderSpecific(is,IS_CLASSID,1); 134 PetscValidPointer(mapping,2); 135 136 ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr); 137 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 138 ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr); 139 if (!isblock) { 140 ierr = ISGetIndices(is,&indices);CHKERRQ(ierr); 141 ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 142 ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr); 143 } else { 144 ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr); 145 ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr); 146 ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 147 ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr); 148 } 149 PetscFunctionReturn(0); 150 } 151 152 /*@C 153 ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) 154 ordering and a global parallel ordering. 155 156 Collective 157 158 Input Parameter: 159 + sf - star forest mapping contiguous local indices to (rank, offset) 160 - start - first global index on this process 161 162 Output Parameter: 163 . mapping - new mapping data structure 164 165 Level: advanced 166 167 Concepts: mapping^local to global 168 169 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions() 170 @*/ 171 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping) 172 { 173 PetscErrorCode ierr; 174 PetscInt i,maxlocal,nroots,nleaves,*globals,*ltog; 175 const PetscInt *ilocal; 176 MPI_Comm comm; 177 178 PetscFunctionBegin; 179 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 180 PetscValidPointer(mapping,3); 181 182 ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr); 183 ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr); 184 if (ilocal) { 185 for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1); 186 } 187 else maxlocal = nleaves; 188 ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr); 189 ierr = PetscMalloc1(maxlocal,<og);CHKERRQ(ierr); 190 for (i=0; i<nroots; i++) globals[i] = start + i; 191 for (i=0; i<maxlocal; i++) ltog[i] = -1; 192 ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr); 193 ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr); 194 ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr); 195 ierr = PetscFree(globals);CHKERRQ(ierr); 196 PetscFunctionReturn(0); 197 } 198 199 /*@ 200 ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping 201 202 Not collective 203 204 Input Parameters: 205 . mapping - mapping data structure 206 . bs - the blocksize 207 208 Level: advanced 209 210 Concepts: mapping^local to global 211 212 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 213 @*/ 214 PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs) 215 { 216 PetscInt *nid; 217 const PetscInt *oid; 218 PetscInt i,cn,on,obs,nn; 219 PetscErrorCode ierr; 220 221 PetscFunctionBegin; 222 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 223 if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs); 224 if (bs == mapping->bs) PetscFunctionReturn(0); 225 on = mapping->n; 226 obs = mapping->bs; 227 oid = mapping->indices; 228 nn = (on*obs)/bs; 229 if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on); 230 231 ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr); 232 ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr); 233 for (i=0;i<nn;i++) { 234 PetscInt j; 235 for (j=0,cn=0;j<bs-1;j++) { 236 if (oid[i*bs+j] < 0) { cn++; continue; } 237 if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]); 238 } 239 if (oid[i*bs+j] < 0) cn++; 240 if (cn) { 241 if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn); 242 nid[i] = -1; 243 } else { 244 nid[i] = oid[i*bs]/bs; 245 } 246 } 247 ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr); 248 249 mapping->n = nn; 250 mapping->bs = bs; 251 ierr = PetscFree(mapping->indices);CHKERRQ(ierr); 252 mapping->indices = nid; 253 ierr = PetscFree(mapping->globals);CHKERRQ(ierr); 254 mapping->globalstart = 0; 255 mapping->globalend = 0; 256 PetscFunctionReturn(0); 257 } 258 259 /*@ 260 ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping 261 ordering and a global parallel ordering. 262 263 Not Collective 264 265 Input Parameters: 266 . mapping - mapping data structure 267 268 Output Parameter: 269 . bs - the blocksize 270 271 Level: advanced 272 273 Concepts: mapping^local to global 274 275 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 276 @*/ 277 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs) 278 { 279 PetscFunctionBegin; 280 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 281 *bs = mapping->bs; 282 PetscFunctionReturn(0); 283 } 284 285 /*@ 286 ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) 287 ordering and a global parallel ordering. 288 289 Not Collective, but communicator may have more than one process 290 291 Input Parameters: 292 + comm - MPI communicator 293 . bs - the block size 294 . n - the number of local elements divided by the block size, or equivalently the number of block indices 295 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs 296 - mode - see PetscCopyMode 297 298 Output Parameter: 299 . mapping - new mapping data structure 300 301 Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1 302 Level: advanced 303 304 Concepts: mapping^local to global 305 306 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions() 307 @*/ 308 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping) 309 { 310 PetscErrorCode ierr; 311 PetscInt *in; 312 313 PetscFunctionBegin; 314 if (n) PetscValidIntPointer(indices,3); 315 PetscValidPointer(mapping,4); 316 317 *mapping = NULL; 318 ierr = ISInitializePackage();CHKERRQ(ierr); 319 320 ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS", 321 comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr); 322 (*mapping)->n = n; 323 (*mapping)->bs = bs; 324 (*mapping)->info_cached = PETSC_FALSE; 325 (*mapping)->info_free = PETSC_FALSE; 326 (*mapping)->info_procs = NULL; 327 (*mapping)->info_numprocs = NULL; 328 (*mapping)->info_indices = NULL; 329 /* 330 Do not create the global to local mapping. This is only created if 331 ISGlobalToLocalMapping() is called 332 */ 333 (*mapping)->globals = 0; 334 (*mapping)->globalht = 0; 335 (*mapping)->use_hash_table = PETSC_FALSE; 336 ierr = ISLocalToGlobalMappingSetFromOptions(*mapping);CHKERRQ(ierr); 337 if (mode == PETSC_COPY_VALUES) { 338 ierr = PetscMalloc1(n,&in);CHKERRQ(ierr); 339 ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr); 340 (*mapping)->indices = in; 341 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 342 } else if (mode == PETSC_OWN_POINTER) { 343 (*mapping)->indices = (PetscInt*)indices; 344 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 345 } 346 else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER"); 347 ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr); 348 PetscFunctionReturn(0); 349 } 350 351 /*@ 352 ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database. 353 354 Not collective 355 356 Input Parameters: 357 . mapping - mapping data structure 358 359 Options Database: 360 . -ltogm_use_hash_table true,false see ISLocalToGlobalMappingSetUseHashTable() 361 362 Level: advanced 363 364 .seealso: ISLocalToGlobalMappingSetUseHashTable() 365 @*/ 366 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping) 367 { 368 PetscErrorCode ierr; 369 PetscBool flg; 370 371 PetscFunctionBegin; 372 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 373 374 ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr); 375 376 ierr = ISLocalToGlobalMappingGetUseHashTable(mapping,&flg);CHKERRQ(ierr); 377 ierr = PetscOptionsBool("-ltogm_use_hash_table", "use hash table (memory scalable for global to local in ISLtoGMappings","ISLocalToGlobalMappingSetUseHashTable",flg,&flg,NULL);CHKERRQ(ierr); 378 ierr = ISLocalToGlobalMappingSetUseHashTable(mapping,flg);CHKERRQ(ierr); 379 ierr = PetscOptionsEnd();CHKERRQ(ierr); 380 PetscFunctionReturn(0); 381 } 382 383 /*@ 384 ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) 385 ordering and a global parallel ordering. 386 387 Note Collective 388 389 Input Parameters: 390 . mapping - mapping data structure 391 392 Level: advanced 393 394 .seealso: ISLocalToGlobalMappingCreate() 395 @*/ 396 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) 397 { 398 PetscErrorCode ierr; 399 400 PetscFunctionBegin; 401 if (!*mapping) PetscFunctionReturn(0); 402 PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1); 403 if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);} 404 ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr); 405 ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr); 406 ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr); 407 ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr); 408 PetscHashIDestroy((*mapping)->globalht); 409 if ((*mapping)->info_indices) { 410 PetscInt i; 411 412 ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr); 413 for (i=1; i<(*mapping)->info_nproc; i++) { 414 ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr); 415 } 416 ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr); 417 } 418 ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr); 419 *mapping = 0; 420 PetscFunctionReturn(0); 421 } 422 423 /*@ 424 ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering 425 a new index set using the global numbering defined in an ISLocalToGlobalMapping 426 context. 427 428 Not collective 429 430 Input Parameters: 431 + mapping - mapping between local and global numbering 432 - is - index set in local numbering 433 434 Output Parameters: 435 . newis - index set in global numbering 436 437 Level: advanced 438 439 Concepts: mapping^local to global 440 441 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(), 442 ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply() 443 @*/ 444 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis) 445 { 446 PetscErrorCode ierr; 447 PetscInt n,*idxout; 448 const PetscInt *idxin; 449 450 PetscFunctionBegin; 451 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 452 PetscValidHeaderSpecific(is,IS_CLASSID,2); 453 PetscValidPointer(newis,3); 454 455 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 456 ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); 457 ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr); 458 ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr); 459 ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); 460 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr); 461 PetscFunctionReturn(0); 462 } 463 464 /*@ 465 ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering 466 and converts them to the global numbering. 467 468 Not collective 469 470 Input Parameters: 471 + mapping - the local to global mapping context 472 . N - number of integers 473 - in - input indices in local numbering 474 475 Output Parameter: 476 . out - indices in global numbering 477 478 Notes: 479 The in and out array parameters may be identical. 480 481 Level: advanced 482 483 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 484 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 485 AOPetscToApplication(), ISGlobalToLocalMappingApply() 486 487 Concepts: mapping^local to global 488 @*/ 489 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 490 { 491 PetscInt i,bs,Nmax; 492 493 PetscFunctionBegin; 494 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 495 bs = mapping->bs; 496 Nmax = bs*mapping->n; 497 if (bs == 1) { 498 const PetscInt *idx = mapping->indices; 499 for (i=0; i<N; i++) { 500 if (in[i] < 0) { 501 out[i] = in[i]; 502 continue; 503 } 504 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 505 out[i] = idx[in[i]]; 506 } 507 } else { 508 const PetscInt *idx = mapping->indices; 509 for (i=0; i<N; i++) { 510 if (in[i] < 0) { 511 out[i] = in[i]; 512 continue; 513 } 514 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 515 out[i] = idx[in[i]/bs]*bs + (in[i] % bs); 516 } 517 } 518 PetscFunctionReturn(0); 519 } 520 521 /*@ 522 ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering 523 524 Not collective 525 526 Input Parameters: 527 + mapping - the local to global mapping context 528 . N - number of integers 529 - in - input indices in local block numbering 530 531 Output Parameter: 532 . out - indices in global block numbering 533 534 Notes: 535 The in and out array parameters may be identical. 536 537 Example: 538 If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0 539 (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3. 540 541 Level: advanced 542 543 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 544 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 545 AOPetscToApplication(), ISGlobalToLocalMappingApply() 546 547 Concepts: mapping^local to global 548 @*/ 549 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 550 { 551 552 PetscFunctionBegin; 553 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 554 { 555 PetscInt i,Nmax = mapping->n; 556 const PetscInt *idx = mapping->indices; 557 558 for (i=0; i<N; i++) { 559 if (in[i] < 0) { 560 out[i] = in[i]; 561 continue; 562 } 563 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i); 564 out[i] = idx[in[i]]; 565 } 566 } 567 PetscFunctionReturn(0); 568 } 569 570 /* -----------------------------------------------------------------------------------------*/ 571 572 /* 573 Creates the global fields in the ISLocalToGlobalMapping structure 574 */ 575 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping) 576 { 577 PetscErrorCode ierr; 578 PetscInt i,*idx = mapping->indices,n = mapping->n,end,start,*globals; 579 PetscBool use_hash_table; 580 581 PetscFunctionBegin; 582 ierr = ISLocalToGlobalMappingGetUseHashTable(mapping,&use_hash_table);CHKERRQ(ierr); 583 if (use_hash_table && mapping->globalht) PetscFunctionReturn(0); 584 else if (mapping->globals) PetscFunctionReturn(0); 585 end = 0; 586 start = PETSC_MAX_INT; 587 588 for (i=0; i<n; i++) { 589 if (idx[i] < 0) continue; 590 if (idx[i] < start) start = idx[i]; 591 if (idx[i] > end) end = idx[i]; 592 } 593 if (start > end) {start = 0; end = -1;} 594 mapping->globalstart = start; 595 mapping->globalend = end; 596 597 if (mapping->use_hash_table) { 598 PetscHashICreate(mapping->globalht); 599 for (i=0; i<n; i++ ) { 600 if (idx[i] < 0) continue; 601 PetscHashIAdd(mapping->globalht, idx[i], i); 602 } 603 ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr); 604 } else { 605 ierr = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr); 606 mapping->globals = globals; 607 for (i=0; i<end-start+1; i++) globals[i] = -1; 608 for (i=0; i<n; i++) { 609 if (idx[i] < 0) continue; 610 globals[idx[i] - start] = i; 611 } 612 613 ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr); 614 } 615 PetscFunctionReturn(0); 616 } 617 618 /*@ 619 ISLocalToGlobalMappingSetUseHashTable - Should global to local lookups use a hash table? 620 621 This is memory-scalable, but slightly slower, implementation than the default flat array. 622 623 Not collective 624 625 Input parameters: 626 + mapping - mapping between local and global numbering 627 - flg - use a hash table? 628 629 Level: advanced 630 631 .seealso: ISGlobalToLocalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingSetFromOptions(), ISLocalToGlobalMappingGetUseHashTable() 632 633 @*/ 634 PetscErrorCode ISLocalToGlobalMappingSetUseHashTable(ISLocalToGlobalMapping mapping, PetscBool flg) 635 { 636 PetscFunctionBegin; 637 638 mapping->use_hash_table = flg; 639 640 PetscFunctionReturn(0); 641 } 642 643 /*@ 644 ISLocalToGlobalMappingGetUseHashTable - Do global to local lookups use a hash table? 645 646 This is memory-scalable, but slightly slower, implementation than the default flat array. 647 648 Not collective 649 650 Input parameter: 651 + mapping - mapping between local and global numbering 652 653 Output parameter: 654 + flg - does the mapping use a hash table? 655 656 Level: advanced 657 658 .seealso: ISGlobalToLocalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingSetFromOptions(), ISLocalToGlobalMappingSetUseHashTable() 659 @*/ 660 PetscErrorCode ISLocalToGlobalMappingGetUseHashTable(ISLocalToGlobalMapping mapping, PetscBool *flg) 661 { 662 PetscFunctionBegin; 663 664 *flg = mapping->use_hash_table; 665 666 PetscFunctionReturn(0); 667 } 668 669 /*@ 670 ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers 671 specified with a global numbering. 672 673 Not collective 674 675 Input Parameters: 676 + mapping - mapping between local and global numbering 677 . type - IS_GTOLM_MASK - replaces global indices with no local value with -1 678 IS_GTOLM_DROP - drops the indices with no local value from the output list 679 . n - number of global indices to map 680 - idx - global indices to map 681 682 Output Parameters: 683 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 684 - idxout - local index of each global index, one must pass in an array long enough 685 to hold all the indices. You can call ISGlobalToLocalMappingApply() with 686 idxout == NULL to determine the required length (returned in nout) 687 and then allocate the required space and call ISGlobalToLocalMappingApply() 688 a second time to set the values. 689 690 Notes: 691 Either nout or idxout may be NULL. idx and idxout may be identical. 692 693 Unless the option -ltogm_use_hash_table is used, this is not scalable in memory usage. Each processor requires 694 O(Nglobal) size array to compute these. 695 696 Level: advanced 697 698 Developer Note: The manual page states that idx and idxout may be identical but the calling 699 sequence declares idx as const so it cannot be the same as idxout. 700 701 Concepts: mapping^global to local 702 703 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(), 704 ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingGetUseHashTable(), ISLocalToGlobalMappingSetUseHashTable() 705 @*/ 706 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, 707 PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[]) 708 { 709 PetscInt i,*globals,nf = 0,tmp,start,end,bs,local; 710 PetscBool use_hash_table; 711 PetscErrorCode ierr; 712 713 PetscFunctionBegin; 714 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 715 ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr); 716 ierr = ISLocalToGlobalMappingGetUseHashTable(mapping,&use_hash_table);CHKERRQ(ierr); 717 globals = mapping->globals; 718 start = mapping->globalstart; 719 end = mapping->globalend; 720 bs = mapping->bs; 721 722 #define GTOL(g, local) do { \ 723 if (use_hash_table) { \ 724 PetscHashIMap(mapping->globalht,g/bs,local); \ 725 } else { \ 726 local = globals[g/bs - start]; \ 727 } \ 728 local = bs*local + (g % bs); \ 729 } while (0) 730 731 if (type == IS_GTOLM_MASK) { 732 if (idxout) { 733 for (i=0; i<n; i++) { 734 if (idx[i] < 0) idxout[i] = idx[i]; 735 else if (idx[i] < bs*start) idxout[i] = -1; 736 else if (idx[i] > bs*(end+1)-1) idxout[i] = -1; 737 else GTOL(idx[i], local); 738 } 739 } 740 if (nout) *nout = n; 741 } else { 742 if (idxout) { 743 for (i=0; i<n; i++) { 744 if (idx[i] < 0) continue; 745 if (idx[i] < bs*start) continue; 746 if (idx[i] > bs*(end+1)-1) continue; 747 GTOL(idx[i], tmp); 748 if (tmp < 0) continue; 749 idxout[nf++] = tmp; 750 } 751 } else { 752 for (i=0; i<n; i++) { 753 if (idx[i] < 0) continue; 754 if (idx[i] < bs*start) continue; 755 if (idx[i] > bs*(end+1)-1) continue; 756 GTOL(idx[i], tmp); 757 if (tmp < 0) continue; 758 nf++; 759 } 760 } 761 if (nout) *nout = nf; 762 } 763 #undef GTOL 764 PetscFunctionReturn(0); 765 } 766 767 /*@ 768 ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering 769 a new index set using the local numbering defined in an ISLocalToGlobalMapping 770 context. 771 772 Not collective 773 774 Input Parameters: 775 + mapping - mapping between local and global numbering 776 - is - index set in global numbering 777 778 Output Parameters: 779 . newis - index set in local numbering 780 781 Level: advanced 782 783 Concepts: mapping^local to global 784 785 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(), 786 ISLocalToGlobalMappingDestroy() 787 @*/ 788 PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis) 789 { 790 PetscErrorCode ierr; 791 PetscInt n,nout,*idxout; 792 const PetscInt *idxin; 793 794 PetscFunctionBegin; 795 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 796 PetscValidHeaderSpecific(is,IS_CLASSID,3); 797 PetscValidPointer(newis,4); 798 799 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 800 ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); 801 if (type == IS_GTOLM_MASK) { 802 ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr); 803 } else { 804 ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr); 805 ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr); 806 } 807 ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr); 808 ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); 809 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr); 810 PetscFunctionReturn(0); 811 } 812 813 /*@ 814 ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers 815 specified with a block global numbering. 816 817 Not collective 818 819 Input Parameters: 820 + mapping - mapping between local and global numbering 821 . type - IS_GTOLM_MASK - replaces global indices with no local value with -1 822 IS_GTOLM_DROP - drops the indices with no local value from the output list 823 . n - number of global indices to map 824 - idx - global indices to map 825 826 Output Parameters: 827 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 828 - idxout - local index of each global index, one must pass in an array long enough 829 to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with 830 idxout == NULL to determine the required length (returned in nout) 831 and then allocate the required space and call ISGlobalToLocalMappingApplyBlock() 832 a second time to set the values. 833 834 Notes: 835 Either nout or idxout may be NULL. idx and idxout may be identical. 836 837 Unless the option -ltogm_use_hash_table is used, this is not scalable in memory usage. Each processor requires 838 O(Nglobal) size array to compute these. 839 840 Level: advanced 841 842 Developer Note: The manual page states that idx and idxout may be identical but the calling 843 sequence declares idx as const so it cannot be the same as idxout. 844 845 Concepts: mapping^global to local 846 847 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(), 848 ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingGetUseHashTable(), ISLocalToGlobalMappingSetUseHashTable() 849 @*/ 850 PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, 851 PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[]) 852 { 853 PetscInt i,*globals,nf = 0,tmp,start,end; 854 PetscBool use_hash_table; 855 PetscErrorCode ierr; 856 857 PetscFunctionBegin; 858 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 859 ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr); 860 ierr = ISLocalToGlobalMappingGetUseHashTable(mapping,&use_hash_table);CHKERRQ(ierr); 861 globals = mapping->globals; 862 start = mapping->globalstart; 863 end = mapping->globalend; 864 865 #define GTOL(g, local) do { \ 866 if (use_hash_table) { \ 867 PetscHashIMap(mapping->globalht,g,local); \ 868 } else { \ 869 local = globals[g - start]; \ 870 } \ 871 } while (0) 872 873 if (type == IS_GTOLM_MASK) { 874 if (idxout) { 875 for (i=0; i<n; i++) { 876 if (idx[i] < 0) idxout[i] = idx[i]; 877 else if (idx[i] < start) idxout[i] = -1; 878 else if (idx[i] > end) idxout[i] = -1; 879 else GTOL(idx[i], idxout[i]); 880 } 881 } 882 if (nout) *nout = n; 883 } else { 884 if (idxout) { 885 for (i=0; i<n; i++) { 886 if (idx[i] < 0) continue; 887 if (idx[i] < start) continue; 888 if (idx[i] > end) continue; 889 GTOL(idx[i], tmp); 890 if (tmp < 0) continue; 891 idxout[nf++] = tmp; 892 } 893 } else { 894 for (i=0; i<n; i++) { 895 if (idx[i] < 0) continue; 896 if (idx[i] < start) continue; 897 if (idx[i] > end) continue; 898 GTOL(idx[i], tmp); 899 if (tmp < 0) continue; 900 nf++; 901 } 902 } 903 if (nout) *nout = nf; 904 } 905 #undef GTOL 906 PetscFunctionReturn(0); 907 } 908 909 /*@C 910 ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and 911 each index shared by more than one processor 912 913 Collective on ISLocalToGlobalMapping 914 915 Input Parameters: 916 . mapping - the mapping from local to global indexing 917 918 Output Parameter: 919 + nproc - number of processors that are connected to this one 920 . proc - neighboring processors 921 . numproc - number of indices for each subdomain (processor) 922 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 923 924 Level: advanced 925 926 Concepts: mapping^local to global 927 928 Fortran Usage: 929 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 930 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 931 PetscInt indices[nproc][numprocmax],ierr) 932 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 933 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 934 935 936 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 937 ISLocalToGlobalMappingRestoreInfo() 938 @*/ 939 PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 940 { 941 PetscErrorCode ierr; 942 943 PetscFunctionBegin; 944 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 945 if (mapping->info_cached) { 946 *nproc = mapping->info_nproc; 947 *procs = mapping->info_procs; 948 *numprocs = mapping->info_numprocs; 949 *indices = mapping->info_indices; 950 } else { 951 ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr); 952 } 953 PetscFunctionReturn(0); 954 } 955 956 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 957 { 958 PetscErrorCode ierr; 959 PetscMPIInt size,rank,tag1,tag2,tag3,*len,*source,imdex; 960 PetscInt i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices; 961 PetscInt *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc; 962 PetscInt cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned; 963 PetscInt node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp; 964 PetscInt first_procs,first_numprocs,*first_indices; 965 MPI_Request *recv_waits,*send_waits; 966 MPI_Status recv_status,*send_status,*recv_statuses; 967 MPI_Comm comm; 968 PetscBool debug = PETSC_FALSE; 969 970 PetscFunctionBegin; 971 ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr); 972 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 973 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 974 if (size == 1) { 975 *nproc = 0; 976 *procs = NULL; 977 ierr = PetscNew(numprocs);CHKERRQ(ierr); 978 (*numprocs)[0] = 0; 979 ierr = PetscNew(indices);CHKERRQ(ierr); 980 (*indices)[0] = NULL; 981 /* save info for reuse */ 982 mapping->info_nproc = *nproc; 983 mapping->info_procs = *procs; 984 mapping->info_numprocs = *numprocs; 985 mapping->info_indices = *indices; 986 mapping->info_cached = PETSC_TRUE; 987 PetscFunctionReturn(0); 988 } 989 990 ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr); 991 992 /* 993 Notes on ISLocalToGlobalMappingGetBlockInfo 994 995 globally owned node - the nodes that have been assigned to this processor in global 996 numbering, just for this routine. 997 998 nontrivial globally owned node - node assigned to this processor that is on a subdomain 999 boundary (i.e. is has more than one local owner) 1000 1001 locally owned node - node that exists on this processors subdomain 1002 1003 nontrivial locally owned node - node that is not in the interior (i.e. has more than one 1004 local subdomain 1005 */ 1006 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr); 1007 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr); 1008 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr); 1009 1010 for (i=0; i<n; i++) { 1011 if (lindices[i] > max) max = lindices[i]; 1012 } 1013 ierr = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr); 1014 Ng++; 1015 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1016 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1017 scale = Ng/size + 1; 1018 ng = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng); 1019 rstart = scale*rank; 1020 1021 /* determine ownership ranges of global indices */ 1022 ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr); 1023 ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 1024 1025 /* determine owners of each local node */ 1026 ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr); 1027 for (i=0; i<n; i++) { 1028 proc = lindices[i]/scale; /* processor that globally owns this index */ 1029 nprocs[2*proc+1] = 1; /* processor globally owns at least one of ours */ 1030 owner[i] = proc; 1031 nprocs[2*proc]++; /* count of how many that processor globally owns of ours */ 1032 } 1033 nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1]; 1034 ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr); 1035 1036 /* inform other processors of number of messages and max length*/ 1037 ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 1038 ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr); 1039 1040 /* post receives for owned rows */ 1041 ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr); 1042 ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr); 1043 for (i=0; i<nrecvs; i++) { 1044 ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr); 1045 } 1046 1047 /* pack messages containing lists of local nodes to owners */ 1048 ierr = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr); 1049 ierr = PetscMalloc1(size+1,&starts);CHKERRQ(ierr); 1050 starts[0] = 0; 1051 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 1052 for (i=0; i<n; i++) { 1053 sends[starts[owner[i]]++] = lindices[i]; 1054 sends[starts[owner[i]]++] = i; 1055 } 1056 ierr = PetscFree(owner);CHKERRQ(ierr); 1057 starts[0] = 0; 1058 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 1059 1060 /* send the messages */ 1061 ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr); 1062 ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr); 1063 cnt = 0; 1064 for (i=0; i<size; i++) { 1065 if (nprocs[2*i]) { 1066 ierr = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr); 1067 dest[cnt] = i; 1068 cnt++; 1069 } 1070 } 1071 ierr = PetscFree(starts);CHKERRQ(ierr); 1072 1073 /* wait on receives */ 1074 ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr); 1075 ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr); 1076 cnt = nrecvs; 1077 ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr); 1078 ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr); 1079 while (cnt) { 1080 ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 1081 /* unpack receives into our local space */ 1082 ierr = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr); 1083 source[imdex] = recv_status.MPI_SOURCE; 1084 len[imdex] = len[imdex]/2; 1085 /* count how many local owners for each of my global owned indices */ 1086 for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++; 1087 cnt--; 1088 } 1089 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1090 1091 /* count how many globally owned indices are on an edge multiplied by how many processors own them. */ 1092 nowned = 0; 1093 nownedm = 0; 1094 for (i=0; i<ng; i++) { 1095 if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;} 1096 } 1097 1098 /* create single array to contain rank of all local owners of each globally owned index */ 1099 ierr = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr); 1100 ierr = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr); 1101 starts[0] = 0; 1102 for (i=1; i<ng; i++) { 1103 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1104 else starts[i] = starts[i-1]; 1105 } 1106 1107 /* for each nontrival globally owned node list all arriving processors */ 1108 for (i=0; i<nrecvs; i++) { 1109 for (j=0; j<len[i]; j++) { 1110 node = recvs[2*i*nmax+2*j]-rstart; 1111 if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i]; 1112 } 1113 } 1114 1115 if (debug) { /* ----------------------------------- */ 1116 starts[0] = 0; 1117 for (i=1; i<ng; i++) { 1118 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1119 else starts[i] = starts[i-1]; 1120 } 1121 for (i=0; i<ng; i++) { 1122 if (nownedsenders[i] > 1) { 1123 ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr); 1124 for (j=0; j<nownedsenders[i]; j++) { 1125 ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr); 1126 } 1127 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1128 } 1129 } 1130 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1131 } /* ----------------------------------- */ 1132 1133 /* wait on original sends */ 1134 if (nsends) { 1135 ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr); 1136 ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 1137 ierr = PetscFree(send_status);CHKERRQ(ierr); 1138 } 1139 ierr = PetscFree(send_waits);CHKERRQ(ierr); 1140 ierr = PetscFree(sends);CHKERRQ(ierr); 1141 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1142 1143 /* pack messages to send back to local owners */ 1144 starts[0] = 0; 1145 for (i=1; i<ng; i++) { 1146 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 1147 else starts[i] = starts[i-1]; 1148 } 1149 nsends2 = nrecvs; 1150 ierr = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */ 1151 for (i=0; i<nrecvs; i++) { 1152 nprocs[i] = 1; 1153 for (j=0; j<len[i]; j++) { 1154 node = recvs[2*i*nmax+2*j]-rstart; 1155 if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node]; 1156 } 1157 } 1158 nt = 0; 1159 for (i=0; i<nsends2; i++) nt += nprocs[i]; 1160 1161 ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr); 1162 ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr); 1163 1164 starts2[0] = 0; 1165 for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1]; 1166 /* 1167 Each message is 1 + nprocs[i] long, and consists of 1168 (0) the number of nodes being sent back 1169 (1) the local node number, 1170 (2) the number of processors sharing it, 1171 (3) the processors sharing it 1172 */ 1173 for (i=0; i<nsends2; i++) { 1174 cnt = 1; 1175 sends2[starts2[i]] = 0; 1176 for (j=0; j<len[i]; j++) { 1177 node = recvs[2*i*nmax+2*j]-rstart; 1178 if (nownedsenders[node] > 1) { 1179 sends2[starts2[i]]++; 1180 sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1]; 1181 sends2[starts2[i]+cnt++] = nownedsenders[node]; 1182 ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr); 1183 cnt += nownedsenders[node]; 1184 } 1185 } 1186 } 1187 1188 /* receive the message lengths */ 1189 nrecvs2 = nsends; 1190 ierr = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr); 1191 ierr = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr); 1192 ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr); 1193 for (i=0; i<nrecvs2; i++) { 1194 ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr); 1195 } 1196 1197 /* send the message lengths */ 1198 for (i=0; i<nsends2; i++) { 1199 ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr); 1200 } 1201 1202 /* wait on receives of lens */ 1203 if (nrecvs2) { 1204 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 1205 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr); 1206 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 1207 } 1208 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1209 1210 starts3[0] = 0; 1211 nt = 0; 1212 for (i=0; i<nrecvs2-1; i++) { 1213 starts3[i+1] = starts3[i] + lens2[i]; 1214 nt += lens2[i]; 1215 } 1216 if (nrecvs2) nt += lens2[nrecvs2-1]; 1217 1218 ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr); 1219 ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr); 1220 for (i=0; i<nrecvs2; i++) { 1221 ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr); 1222 } 1223 1224 /* send the messages */ 1225 ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr); 1226 for (i=0; i<nsends2; i++) { 1227 ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr); 1228 } 1229 1230 /* wait on receives */ 1231 if (nrecvs2) { 1232 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 1233 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr); 1234 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 1235 } 1236 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 1237 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1238 1239 if (debug) { /* ----------------------------------- */ 1240 cnt = 0; 1241 for (i=0; i<nrecvs2; i++) { 1242 nt = recvs2[cnt++]; 1243 for (j=0; j<nt; j++) { 1244 ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr); 1245 for (k=0; k<recvs2[cnt+1]; k++) { 1246 ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr); 1247 } 1248 cnt += 2 + recvs2[cnt+1]; 1249 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1250 } 1251 } 1252 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1253 } /* ----------------------------------- */ 1254 1255 /* count number subdomains for each local node */ 1256 ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr); 1257 ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr); 1258 cnt = 0; 1259 for (i=0; i<nrecvs2; i++) { 1260 nt = recvs2[cnt++]; 1261 for (j=0; j<nt; j++) { 1262 for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++; 1263 cnt += 2 + recvs2[cnt+1]; 1264 } 1265 } 1266 nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0); 1267 *nproc = nt; 1268 ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr); 1269 ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr); 1270 ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr); 1271 for (i=0;i<nt+1;i++) (*indices)[i]=NULL; 1272 ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr); 1273 cnt = 0; 1274 for (i=0; i<size; i++) { 1275 if (nprocs[i] > 0) { 1276 bprocs[i] = cnt; 1277 (*procs)[cnt] = i; 1278 (*numprocs)[cnt] = nprocs[i]; 1279 ierr = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr); 1280 cnt++; 1281 } 1282 } 1283 1284 /* make the list of subdomains for each nontrivial local node */ 1285 ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr); 1286 cnt = 0; 1287 for (i=0; i<nrecvs2; i++) { 1288 nt = recvs2[cnt++]; 1289 for (j=0; j<nt; j++) { 1290 for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt]; 1291 cnt += 2 + recvs2[cnt+1]; 1292 } 1293 } 1294 ierr = PetscFree(bprocs);CHKERRQ(ierr); 1295 ierr = PetscFree(recvs2);CHKERRQ(ierr); 1296 1297 /* sort the node indexing by their global numbers */ 1298 nt = *nproc; 1299 for (i=0; i<nt; i++) { 1300 ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr); 1301 for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]]; 1302 ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr); 1303 ierr = PetscFree(tmp);CHKERRQ(ierr); 1304 } 1305 1306 if (debug) { /* ----------------------------------- */ 1307 nt = *nproc; 1308 for (i=0; i<nt; i++) { 1309 ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr); 1310 for (j=0; j<(*numprocs)[i]; j++) { 1311 ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr); 1312 } 1313 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1314 } 1315 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1316 } /* ----------------------------------- */ 1317 1318 /* wait on sends */ 1319 if (nsends2) { 1320 ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr); 1321 ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr); 1322 ierr = PetscFree(send_status);CHKERRQ(ierr); 1323 } 1324 1325 ierr = PetscFree(starts3);CHKERRQ(ierr); 1326 ierr = PetscFree(dest);CHKERRQ(ierr); 1327 ierr = PetscFree(send_waits);CHKERRQ(ierr); 1328 1329 ierr = PetscFree(nownedsenders);CHKERRQ(ierr); 1330 ierr = PetscFree(ownedsenders);CHKERRQ(ierr); 1331 ierr = PetscFree(starts);CHKERRQ(ierr); 1332 ierr = PetscFree(starts2);CHKERRQ(ierr); 1333 ierr = PetscFree(lens2);CHKERRQ(ierr); 1334 1335 ierr = PetscFree(source);CHKERRQ(ierr); 1336 ierr = PetscFree(len);CHKERRQ(ierr); 1337 ierr = PetscFree(recvs);CHKERRQ(ierr); 1338 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1339 ierr = PetscFree(sends2);CHKERRQ(ierr); 1340 1341 /* put the information about myself as the first entry in the list */ 1342 first_procs = (*procs)[0]; 1343 first_numprocs = (*numprocs)[0]; 1344 first_indices = (*indices)[0]; 1345 for (i=0; i<*nproc; i++) { 1346 if ((*procs)[i] == rank) { 1347 (*procs)[0] = (*procs)[i]; 1348 (*numprocs)[0] = (*numprocs)[i]; 1349 (*indices)[0] = (*indices)[i]; 1350 (*procs)[i] = first_procs; 1351 (*numprocs)[i] = first_numprocs; 1352 (*indices)[i] = first_indices; 1353 break; 1354 } 1355 } 1356 1357 /* save info for reuse */ 1358 mapping->info_nproc = *nproc; 1359 mapping->info_procs = *procs; 1360 mapping->info_numprocs = *numprocs; 1361 mapping->info_indices = *indices; 1362 mapping->info_cached = PETSC_TRUE; 1363 PetscFunctionReturn(0); 1364 } 1365 1366 /*@C 1367 ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo() 1368 1369 Collective on ISLocalToGlobalMapping 1370 1371 Input Parameters: 1372 . mapping - the mapping from local to global indexing 1373 1374 Output Parameter: 1375 + nproc - number of processors that are connected to this one 1376 . proc - neighboring processors 1377 . numproc - number of indices for each processor 1378 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1379 1380 Level: advanced 1381 1382 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1383 ISLocalToGlobalMappingGetInfo() 1384 @*/ 1385 PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1386 { 1387 PetscErrorCode ierr; 1388 1389 PetscFunctionBegin; 1390 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1391 if (mapping->info_free) { 1392 ierr = PetscFree(*numprocs);CHKERRQ(ierr); 1393 if (*indices) { 1394 PetscInt i; 1395 1396 ierr = PetscFree((*indices)[0]);CHKERRQ(ierr); 1397 for (i=1; i<*nproc; i++) { 1398 ierr = PetscFree((*indices)[i]);CHKERRQ(ierr); 1399 } 1400 ierr = PetscFree(*indices);CHKERRQ(ierr); 1401 } 1402 } 1403 *nproc = 0; 1404 *procs = NULL; 1405 *numprocs = NULL; 1406 *indices = NULL; 1407 PetscFunctionReturn(0); 1408 } 1409 1410 /*@C 1411 ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and 1412 each index shared by more than one processor 1413 1414 Collective on ISLocalToGlobalMapping 1415 1416 Input Parameters: 1417 . mapping - the mapping from local to global indexing 1418 1419 Output Parameter: 1420 + nproc - number of processors that are connected to this one 1421 . proc - neighboring processors 1422 . numproc - number of indices for each subdomain (processor) 1423 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 1424 1425 Level: advanced 1426 1427 Concepts: mapping^local to global 1428 1429 Fortran Usage: 1430 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1431 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1432 PetscInt indices[nproc][numprocmax],ierr) 1433 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 1434 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 1435 1436 1437 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1438 ISLocalToGlobalMappingRestoreInfo() 1439 @*/ 1440 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1441 { 1442 PetscErrorCode ierr; 1443 PetscInt **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k; 1444 1445 PetscFunctionBegin; 1446 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1447 ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr); 1448 if (bs > 1) { /* we need to expand the cached info */ 1449 ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr); 1450 ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr); 1451 for (i=0; i<*nproc; i++) { 1452 ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr); 1453 for (j=0; j<bnumprocs[i]; j++) { 1454 for (k=0; k<bs; k++) { 1455 (*indices)[i][j*bs+k] = bs*bindices[i][j] + k; 1456 } 1457 } 1458 (*numprocs)[i] = bnumprocs[i]*bs; 1459 } 1460 mapping->info_free = PETSC_TRUE; 1461 } else { 1462 *numprocs = bnumprocs; 1463 *indices = bindices; 1464 } 1465 PetscFunctionReturn(0); 1466 } 1467 1468 /*@C 1469 ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo() 1470 1471 Collective on ISLocalToGlobalMapping 1472 1473 Input Parameters: 1474 . mapping - the mapping from local to global indexing 1475 1476 Output Parameter: 1477 + nproc - number of processors that are connected to this one 1478 . proc - neighboring processors 1479 . numproc - number of indices for each processor 1480 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1481 1482 Level: advanced 1483 1484 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1485 ISLocalToGlobalMappingGetInfo() 1486 @*/ 1487 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1488 { 1489 PetscErrorCode ierr; 1490 1491 PetscFunctionBegin; 1492 ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr); 1493 PetscFunctionReturn(0); 1494 } 1495 1496 /*@C 1497 ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped 1498 1499 Not Collective 1500 1501 Input Arguments: 1502 . ltog - local to global mapping 1503 1504 Output Arguments: 1505 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize() 1506 1507 Level: advanced 1508 1509 Notes: ISLocalToGlobalMappingGetSize() returns the length the this array 1510 1511 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices() 1512 @*/ 1513 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1514 { 1515 PetscFunctionBegin; 1516 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1517 PetscValidPointer(array,2); 1518 if (ltog->bs == 1) { 1519 *array = ltog->indices; 1520 } else { 1521 PetscInt *jj,k,i,j,n = ltog->n, bs = ltog->bs; 1522 const PetscInt *ii; 1523 PetscErrorCode ierr; 1524 1525 ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr); 1526 *array = jj; 1527 k = 0; 1528 ii = ltog->indices; 1529 for (i=0; i<n; i++) 1530 for (j=0; j<bs; j++) 1531 jj[k++] = bs*ii[i] + j; 1532 } 1533 PetscFunctionReturn(0); 1534 } 1535 1536 /*@C 1537 ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices() 1538 1539 Not Collective 1540 1541 Input Arguments: 1542 + ltog - local to global mapping 1543 - array - array of indices 1544 1545 Level: advanced 1546 1547 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1548 @*/ 1549 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1550 { 1551 PetscFunctionBegin; 1552 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1553 PetscValidPointer(array,2); 1554 if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1555 1556 if (ltog->bs > 1) { 1557 PetscErrorCode ierr; 1558 ierr = PetscFree(*(void**)array);CHKERRQ(ierr); 1559 } 1560 PetscFunctionReturn(0); 1561 } 1562 1563 /*@C 1564 ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block 1565 1566 Not Collective 1567 1568 Input Arguments: 1569 . ltog - local to global mapping 1570 1571 Output Arguments: 1572 . array - array of indices 1573 1574 Level: advanced 1575 1576 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices() 1577 @*/ 1578 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1579 { 1580 PetscFunctionBegin; 1581 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1582 PetscValidPointer(array,2); 1583 *array = ltog->indices; 1584 PetscFunctionReturn(0); 1585 } 1586 1587 /*@C 1588 ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices() 1589 1590 Not Collective 1591 1592 Input Arguments: 1593 + ltog - local to global mapping 1594 - array - array of indices 1595 1596 Level: advanced 1597 1598 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1599 @*/ 1600 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1601 { 1602 PetscFunctionBegin; 1603 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1604 PetscValidPointer(array,2); 1605 if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1606 *array = NULL; 1607 PetscFunctionReturn(0); 1608 } 1609 1610 /*@C 1611 ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings 1612 1613 Not Collective 1614 1615 Input Arguments: 1616 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate 1617 . n - number of mappings to concatenate 1618 - ltogs - local to global mappings 1619 1620 Output Arguments: 1621 . ltogcat - new mapping 1622 1623 Note: this currently always returns a mapping with block size of 1 1624 1625 Developer Note: If all the input mapping have the same block size we could easily handle that as a special case 1626 1627 Level: advanced 1628 1629 .seealso: ISLocalToGlobalMappingCreate() 1630 @*/ 1631 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat) 1632 { 1633 PetscInt i,cnt,m,*idx; 1634 PetscErrorCode ierr; 1635 1636 PetscFunctionBegin; 1637 if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n); 1638 if (n > 0) PetscValidPointer(ltogs,3); 1639 for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3); 1640 PetscValidPointer(ltogcat,4); 1641 for (cnt=0,i=0; i<n; i++) { 1642 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1643 cnt += m; 1644 } 1645 ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr); 1646 for (cnt=0,i=0; i<n; i++) { 1647 const PetscInt *subidx; 1648 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1649 ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1650 ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr); 1651 ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1652 cnt += m; 1653 } 1654 ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr); 1655 PetscFunctionReturn(0); 1656 } 1657 1658 1659