1 2 #include <petsc-private/isimpl.h> /*I "petscis.h" I*/ 3 #include <petscsf.h> 4 #include <petscviewer.h> 5 6 PetscClassId IS_LTOGM_CLASSID; 7 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "ISG2LMapApply" 11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[]) 12 { 13 PetscErrorCode ierr; 14 PetscInt i,start,end; 15 16 PetscFunctionBegin; 17 if (!mapping->globals) { 18 ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr); 19 } 20 start = mapping->globalstart; 21 end = mapping->globalend; 22 for (i=0; i<n; i++) { 23 if (in[i] < 0) out[i] = in[i]; 24 else if (in[i] < start) out[i] = -1; 25 else if (in[i] > end) out[i] = -1; 26 else out[i] = mapping->globals[in[i] - start]; 27 } 28 PetscFunctionReturn(0); 29 } 30 31 32 #undef __FUNCT__ 33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize" 34 /*@ 35 ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping 36 37 Not Collective 38 39 Input Parameter: 40 . ltog - local to global mapping 41 42 Output Parameter: 43 . n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length 44 45 Level: advanced 46 47 Concepts: mapping^local to global 48 49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 50 @*/ 51 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n) 52 { 53 PetscFunctionBegin; 54 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 55 PetscValidIntPointer(n,2); 56 *n = mapping->bs*mapping->n; 57 PetscFunctionReturn(0); 58 } 59 60 #undef __FUNCT__ 61 #define __FUNCT__ "ISLocalToGlobalMappingView" 62 /*@C 63 ISLocalToGlobalMappingView - View a local to global mapping 64 65 Not Collective 66 67 Input Parameters: 68 + ltog - local to global mapping 69 - viewer - viewer 70 71 Level: advanced 72 73 Concepts: mapping^local to global 74 75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 76 @*/ 77 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer) 78 { 79 PetscInt i; 80 PetscMPIInt rank; 81 PetscBool iascii; 82 PetscErrorCode ierr; 83 84 PetscFunctionBegin; 85 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 86 if (!viewer) { 87 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr); 88 } 89 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 90 91 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr); 92 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 93 if (iascii) { 94 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr); 95 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 96 for (i=0; i<mapping->n; i++) { 97 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr); 98 } 99 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 100 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 101 } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name); 102 PetscFunctionReturn(0); 103 } 104 105 #undef __FUNCT__ 106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS" 107 /*@ 108 ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) 109 ordering and a global parallel ordering. 110 111 Not collective 112 113 Input Parameter: 114 . is - index set containing the global numbers for each local number 115 116 Output Parameter: 117 . mapping - new mapping data structure 118 119 Notes: the block size of the IS determines the block size of the mapping 120 Level: advanced 121 122 Concepts: mapping^local to global 123 124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 125 @*/ 126 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping) 127 { 128 PetscErrorCode ierr; 129 PetscInt n,bs; 130 const PetscInt *indices; 131 MPI_Comm comm; 132 PetscBool isblock; 133 134 PetscFunctionBegin; 135 PetscValidHeaderSpecific(is,IS_CLASSID,1); 136 PetscValidPointer(mapping,2); 137 138 ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr); 139 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 140 ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr); 141 if (!isblock) { 142 ierr = ISGetIndices(is,&indices);CHKERRQ(ierr); 143 ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 144 ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr); 145 } else { 146 ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr); 147 ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr); 148 ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 149 ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr); 150 } 151 PetscFunctionReturn(0); 152 } 153 154 #undef __FUNCT__ 155 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF" 156 /*@C 157 ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) 158 ordering and a global parallel ordering. 159 160 Collective 161 162 Input Parameter: 163 + sf - star forest mapping contiguous local indices to (rank, offset) 164 - start - first global index on this process 165 166 Output Parameter: 167 . mapping - new mapping data structure 168 169 Level: advanced 170 171 Concepts: mapping^local to global 172 173 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS() 174 @*/ 175 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping) 176 { 177 PetscErrorCode ierr; 178 PetscInt i,maxlocal,nroots,nleaves,*globals,*ltog; 179 const PetscInt *ilocal; 180 MPI_Comm comm; 181 182 PetscFunctionBegin; 183 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 184 PetscValidPointer(mapping,3); 185 186 ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr); 187 ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr); 188 if (ilocal) { 189 for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1); 190 } 191 else maxlocal = nleaves; 192 ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr); 193 ierr = PetscMalloc1(maxlocal,<og);CHKERRQ(ierr); 194 for (i=0; i<nroots; i++) globals[i] = start + i; 195 for (i=0; i<maxlocal; i++) ltog[i] = -1; 196 ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr); 197 ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr); 198 ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr); 199 ierr = PetscFree(globals);CHKERRQ(ierr); 200 PetscFunctionReturn(0); 201 } 202 203 #undef __FUNCT__ 204 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize" 205 /*@ 206 ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping 207 ordering and a global parallel ordering. 208 209 Not Collective 210 211 Input Parameters: 212 . mapping - mapping data structure 213 214 Output Parameter: 215 . bs - the blocksize 216 217 Level: advanced 218 219 Concepts: mapping^local to global 220 221 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 222 @*/ 223 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs) 224 { 225 PetscFunctionBegin; 226 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 227 *bs = mapping->bs; 228 PetscFunctionReturn(0); 229 } 230 231 #undef __FUNCT__ 232 #define __FUNCT__ "ISLocalToGlobalMappingCreate" 233 /*@ 234 ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) 235 ordering and a global parallel ordering. 236 237 Not Collective, but communicator may have more than one process 238 239 Input Parameters: 240 + comm - MPI communicator 241 . bs - the block size 242 . n - the number of local elements divided by the block size, or equivalently the number of block indices 243 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs 244 - mode - see PetscCopyMode 245 246 Output Parameter: 247 . mapping - new mapping data structure 248 249 Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1 250 Level: advanced 251 252 Concepts: mapping^local to global 253 254 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 255 @*/ 256 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping) 257 { 258 PetscErrorCode ierr; 259 PetscInt *in; 260 261 PetscFunctionBegin; 262 if (n) PetscValidIntPointer(indices,3); 263 PetscValidPointer(mapping,4); 264 265 *mapping = NULL; 266 ierr = ISInitializePackage();CHKERRQ(ierr); 267 268 ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS", 269 cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr); 270 (*mapping)->n = n; 271 (*mapping)->bs = bs; 272 /* 273 Do not create the global to local mapping. This is only created if 274 ISGlobalToLocalMapping() is called 275 */ 276 (*mapping)->globals = 0; 277 if (mode == PETSC_COPY_VALUES) { 278 ierr = PetscMalloc1(n,&in);CHKERRQ(ierr); 279 ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr); 280 (*mapping)->indices = in; 281 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 282 } else if (mode == PETSC_OWN_POINTER) { 283 (*mapping)->indices = (PetscInt*)indices; 284 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 285 } 286 else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER"); 287 PetscFunctionReturn(0); 288 } 289 290 #undef __FUNCT__ 291 #define __FUNCT__ "ISLocalToGlobalMappingDestroy" 292 /*@ 293 ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) 294 ordering and a global parallel ordering. 295 296 Note Collective 297 298 Input Parameters: 299 . mapping - mapping data structure 300 301 Level: advanced 302 303 .seealso: ISLocalToGlobalMappingCreate() 304 @*/ 305 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) 306 { 307 PetscErrorCode ierr; 308 309 PetscFunctionBegin; 310 if (!*mapping) PetscFunctionReturn(0); 311 PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1); 312 if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);} 313 ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr); 314 ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr); 315 ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr); 316 *mapping = 0; 317 PetscFunctionReturn(0); 318 } 319 320 #undef __FUNCT__ 321 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS" 322 /*@ 323 ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering 324 a new index set using the global numbering defined in an ISLocalToGlobalMapping 325 context. 326 327 Not collective 328 329 Input Parameters: 330 + mapping - mapping between local and global numbering 331 - is - index set in local numbering 332 333 Output Parameters: 334 . newis - index set in global numbering 335 336 Level: advanced 337 338 Concepts: mapping^local to global 339 340 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(), 341 ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply() 342 @*/ 343 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis) 344 { 345 PetscErrorCode ierr; 346 PetscInt n,*idxout; 347 const PetscInt *idxin; 348 349 PetscFunctionBegin; 350 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 351 PetscValidHeaderSpecific(is,IS_CLASSID,2); 352 PetscValidPointer(newis,3); 353 354 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 355 ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); 356 ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr); 357 ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr); 358 ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); 359 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr); 360 PetscFunctionReturn(0); 361 } 362 363 #undef __FUNCT__ 364 #define __FUNCT__ "ISLocalToGlobalMappingApply" 365 /*@ 366 ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering 367 and converts them to the global numbering. 368 369 Not collective 370 371 Input Parameters: 372 + mapping - the local to global mapping context 373 . N - number of integers 374 - in - input indices in local numbering 375 376 Output Parameter: 377 . out - indices in global numbering 378 379 Notes: 380 The in and out array parameters may be identical. 381 382 Level: advanced 383 384 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 385 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 386 AOPetscToApplication(), ISGlobalToLocalMappingApply() 387 388 Concepts: mapping^local to global 389 @*/ 390 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 391 { 392 PetscInt i,bs,Nmax; 393 394 PetscFunctionBegin; 395 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 396 bs = mapping->bs; 397 Nmax = bs*mapping->n; 398 if (bs == 1) { 399 const PetscInt *idx = mapping->indices; 400 for (i=0; i<N; i++) { 401 if (in[i] < 0) { 402 out[i] = in[i]; 403 continue; 404 } 405 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 406 out[i] = idx[in[i]]; 407 } 408 } else { 409 const PetscInt *idx = mapping->indices; 410 for (i=0; i<N; i++) { 411 if (in[i] < 0) { 412 out[i] = in[i]; 413 continue; 414 } 415 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 416 out[i] = idx[in[i]/bs]*bs + (in[i] % bs); 417 } 418 } 419 PetscFunctionReturn(0); 420 } 421 422 #undef __FUNCT__ 423 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock" 424 /*@ 425 ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering 426 427 Not collective 428 429 Input Parameters: 430 + mapping - the local to global mapping context 431 . N - number of integers 432 - in - input indices in local block numbering 433 434 Output Parameter: 435 . out - indices in global block numbering 436 437 Notes: 438 The in and out array parameters may be identical. 439 440 Example: 441 If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0 442 (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3. 443 444 Level: advanced 445 446 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 447 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 448 AOPetscToApplication(), ISGlobalToLocalMappingApply() 449 450 Concepts: mapping^local to global 451 @*/ 452 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 453 { 454 455 PetscFunctionBegin; 456 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 457 { 458 PetscInt i,Nmax = mapping->n; 459 const PetscInt *idx = mapping->indices; 460 461 for (i=0; i<N; i++) { 462 if (in[i] < 0) { 463 out[i] = in[i]; 464 continue; 465 } 466 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i); 467 out[i] = idx[in[i]]; 468 } 469 } 470 PetscFunctionReturn(0); 471 } 472 473 /* -----------------------------------------------------------------------------------------*/ 474 475 #undef __FUNCT__ 476 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private" 477 /* 478 Creates the global fields in the ISLocalToGlobalMapping structure 479 */ 480 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping) 481 { 482 PetscErrorCode ierr; 483 PetscInt i,*idx = mapping->indices,n = mapping->n,end,start,*globals; 484 485 PetscFunctionBegin; 486 end = 0; 487 start = PETSC_MAX_INT; 488 489 for (i=0; i<n; i++) { 490 if (idx[i] < 0) continue; 491 if (idx[i] < start) start = idx[i]; 492 if (idx[i] > end) end = idx[i]; 493 } 494 if (start > end) {start = 0; end = -1;} 495 mapping->globalstart = start; 496 mapping->globalend = end; 497 498 ierr = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr); 499 mapping->globals = globals; 500 for (i=0; i<end-start+1; i++) globals[i] = -1; 501 for (i=0; i<n; i++) { 502 if (idx[i] < 0) continue; 503 globals[idx[i] - start] = i; 504 } 505 506 ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr); 507 PetscFunctionReturn(0); 508 } 509 510 #undef __FUNCT__ 511 #define __FUNCT__ "ISGlobalToLocalMappingApply" 512 /*@ 513 ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers 514 specified with a global numbering. 515 516 Not collective 517 518 Input Parameters: 519 + mapping - mapping between local and global numbering 520 . type - IS_GTOLM_MASK - replaces global indices with no local value with -1 521 IS_GTOLM_DROP - drops the indices with no local value from the output list 522 . n - number of global indices to map 523 - idx - global indices to map 524 525 Output Parameters: 526 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 527 - idxout - local index of each global index, one must pass in an array long enough 528 to hold all the indices. You can call ISGlobalToLocalMappingApply() with 529 idxout == NULL to determine the required length (returned in nout) 530 and then allocate the required space and call ISGlobalToLocalMappingApply() 531 a second time to set the values. 532 533 Notes: 534 Either nout or idxout may be NULL. idx and idxout may be identical. 535 536 This is not scalable in memory usage. Each processor requires O(Nglobal) size 537 array to compute these. 538 539 Level: advanced 540 541 Developer Note: The manual page states that idx and idxout may be identical but the calling 542 sequence declares idx as const so it cannot be the same as idxout. 543 544 Concepts: mapping^global to local 545 546 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(), 547 ISLocalToGlobalMappingDestroy() 548 @*/ 549 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, 550 PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[]) 551 { 552 PetscInt i,*globals,nf = 0,tmp,start,end,bs; 553 PetscErrorCode ierr; 554 555 PetscFunctionBegin; 556 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 557 if (!mapping->globals) { 558 ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr); 559 } 560 globals = mapping->globals; 561 start = mapping->globalstart; 562 end = mapping->globalend; 563 bs = mapping->bs; 564 565 if (type == IS_GTOLM_MASK) { 566 if (idxout) { 567 for (i=0; i<n; i++) { 568 if (idx[i] < 0) idxout[i] = idx[i]; 569 else if (idx[i] < bs*start) idxout[i] = -1; 570 else if (idx[i] > bs*end) idxout[i] = -1; 571 else idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs); 572 } 573 } 574 if (nout) *nout = n; 575 } else { 576 if (idxout) { 577 for (i=0; i<n; i++) { 578 if (idx[i] < 0) continue; 579 if (idx[i] < bs*start) continue; 580 if (idx[i] > bs*end) continue; 581 tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs); 582 if (tmp < 0) continue; 583 idxout[nf++] = tmp; 584 } 585 } else { 586 for (i=0; i<n; i++) { 587 if (idx[i] < 0) continue; 588 if (idx[i] < bs*start) continue; 589 if (idx[i] > bs*end) continue; 590 tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs); 591 if (tmp < 0) continue; 592 nf++; 593 } 594 } 595 if (nout) *nout = nf; 596 } 597 PetscFunctionReturn(0); 598 } 599 600 #undef __FUNCT__ 601 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock" 602 /*@ 603 ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers 604 specified with a block global numbering. 605 606 Not collective 607 608 Input Parameters: 609 + mapping - mapping between local and global numbering 610 . type - IS_GTOLM_MASK - replaces global indices with no local value with -1 611 IS_GTOLM_DROP - drops the indices with no local value from the output list 612 . n - number of global indices to map 613 - idx - global indices to map 614 615 Output Parameters: 616 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 617 - idxout - local index of each global index, one must pass in an array long enough 618 to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with 619 idxout == NULL to determine the required length (returned in nout) 620 and then allocate the required space and call ISGlobalToLocalMappingApplyBlock() 621 a second time to set the values. 622 623 Notes: 624 Either nout or idxout may be NULL. idx and idxout may be identical. 625 626 This is not scalable in memory usage. Each processor requires O(Nglobal) size 627 array to compute these. 628 629 Level: advanced 630 631 Developer Note: The manual page states that idx and idxout may be identical but the calling 632 sequence declares idx as const so it cannot be the same as idxout. 633 634 Concepts: mapping^global to local 635 636 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(), 637 ISLocalToGlobalMappingDestroy() 638 @*/ 639 PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, 640 PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[]) 641 { 642 PetscInt i,*globals,nf = 0,tmp,start,end; 643 PetscErrorCode ierr; 644 645 PetscFunctionBegin; 646 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 647 if (!mapping->globals) { 648 ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr); 649 } 650 globals = mapping->globals; 651 start = mapping->globalstart; 652 end = mapping->globalend; 653 654 if (type == IS_GTOLM_MASK) { 655 if (idxout) { 656 for (i=0; i<n; i++) { 657 if (idx[i] < 0) idxout[i] = idx[i]; 658 else if (idx[i] < start) idxout[i] = -1; 659 else if (idx[i] > end) idxout[i] = -1; 660 else idxout[i] = globals[idx[i] - start]; 661 } 662 } 663 if (nout) *nout = n; 664 } else { 665 if (idxout) { 666 for (i=0; i<n; i++) { 667 if (idx[i] < 0) continue; 668 if (idx[i] < start) continue; 669 if (idx[i] > end) continue; 670 tmp = globals[idx[i] - start]; 671 if (tmp < 0) continue; 672 idxout[nf++] = tmp; 673 } 674 } else { 675 for (i=0; i<n; i++) { 676 if (idx[i] < 0) continue; 677 if (idx[i] < start) continue; 678 if (idx[i] > end) continue; 679 tmp = globals[idx[i] - start]; 680 if (tmp < 0) continue; 681 nf++; 682 } 683 } 684 if (nout) *nout = nf; 685 } 686 PetscFunctionReturn(0); 687 } 688 689 #undef __FUNCT__ 690 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo" 691 /*@C 692 ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and 693 each index shared by more than one processor 694 695 Collective on ISLocalToGlobalMapping 696 697 Input Parameters: 698 . mapping - the mapping from local to global indexing 699 700 Output Parameter: 701 + nproc - number of processors that are connected to this one 702 . proc - neighboring processors 703 . numproc - number of indices for each subdomain (processor) 704 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 705 706 Level: advanced 707 708 Concepts: mapping^local to global 709 710 Fortran Usage: 711 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 712 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 713 PetscInt indices[nproc][numprocmax],ierr) 714 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 715 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 716 717 718 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 719 ISLocalToGlobalMappingRestoreInfo() 720 @*/ 721 PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 722 { 723 PetscErrorCode ierr; 724 PetscMPIInt size,rank,tag1,tag2,tag3,*len,*source,imdex; 725 PetscInt i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices; 726 PetscInt *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc; 727 PetscInt cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned; 728 PetscInt node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp; 729 PetscInt first_procs,first_numprocs,*first_indices; 730 MPI_Request *recv_waits,*send_waits; 731 MPI_Status recv_status,*send_status,*recv_statuses; 732 MPI_Comm comm; 733 PetscBool debug = PETSC_FALSE; 734 735 PetscFunctionBegin; 736 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 737 ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr); 738 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 739 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 740 if (size == 1) { 741 *nproc = 0; 742 *procs = NULL; 743 ierr = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr); 744 (*numprocs)[0] = 0; 745 ierr = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr); 746 (*indices)[0] = NULL; 747 PetscFunctionReturn(0); 748 } 749 750 ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr); 751 752 /* 753 Notes on ISLocalToGlobalMappingGetBlockInfo 754 755 globally owned node - the nodes that have been assigned to this processor in global 756 numbering, just for this routine. 757 758 nontrivial globally owned node - node assigned to this processor that is on a subdomain 759 boundary (i.e. is has more than one local owner) 760 761 locally owned node - node that exists on this processors subdomain 762 763 nontrivial locally owned node - node that is not in the interior (i.e. has more than one 764 local subdomain 765 */ 766 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr); 767 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr); 768 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr); 769 770 for (i=0; i<n; i++) { 771 if (lindices[i] > max) max = lindices[i]; 772 } 773 ierr = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr); 774 Ng++; 775 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 776 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 777 scale = Ng/size + 1; 778 ng = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng); 779 rstart = scale*rank; 780 781 /* determine ownership ranges of global indices */ 782 ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr); 783 ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 784 785 /* determine owners of each local node */ 786 ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr); 787 for (i=0; i<n; i++) { 788 proc = lindices[i]/scale; /* processor that globally owns this index */ 789 nprocs[2*proc+1] = 1; /* processor globally owns at least one of ours */ 790 owner[i] = proc; 791 nprocs[2*proc]++; /* count of how many that processor globally owns of ours */ 792 } 793 nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1]; 794 ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr); 795 796 /* inform other processors of number of messages and max length*/ 797 ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 798 ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr); 799 800 /* post receives for owned rows */ 801 ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr); 802 ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr); 803 for (i=0; i<nrecvs; i++) { 804 ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr); 805 } 806 807 /* pack messages containing lists of local nodes to owners */ 808 ierr = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr); 809 ierr = PetscMalloc1((size+1),&starts);CHKERRQ(ierr); 810 starts[0] = 0; 811 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 812 for (i=0; i<n; i++) { 813 sends[starts[owner[i]]++] = lindices[i]; 814 sends[starts[owner[i]]++] = i; 815 } 816 ierr = PetscFree(owner);CHKERRQ(ierr); 817 starts[0] = 0; 818 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 819 820 /* send the messages */ 821 ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr); 822 ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr); 823 cnt = 0; 824 for (i=0; i<size; i++) { 825 if (nprocs[2*i]) { 826 ierr = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr); 827 dest[cnt] = i; 828 cnt++; 829 } 830 } 831 ierr = PetscFree(starts);CHKERRQ(ierr); 832 833 /* wait on receives */ 834 ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr); 835 ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr); 836 cnt = nrecvs; 837 ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr); 838 ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr); 839 while (cnt) { 840 ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 841 /* unpack receives into our local space */ 842 ierr = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr); 843 source[imdex] = recv_status.MPI_SOURCE; 844 len[imdex] = len[imdex]/2; 845 /* count how many local owners for each of my global owned indices */ 846 for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++; 847 cnt--; 848 } 849 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 850 851 /* count how many globally owned indices are on an edge multiplied by how many processors own them. */ 852 nowned = 0; 853 nownedm = 0; 854 for (i=0; i<ng; i++) { 855 if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;} 856 } 857 858 /* create single array to contain rank of all local owners of each globally owned index */ 859 ierr = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr); 860 ierr = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr); 861 starts[0] = 0; 862 for (i=1; i<ng; i++) { 863 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 864 else starts[i] = starts[i-1]; 865 } 866 867 /* for each nontrival globally owned node list all arriving processors */ 868 for (i=0; i<nrecvs; i++) { 869 for (j=0; j<len[i]; j++) { 870 node = recvs[2*i*nmax+2*j]-rstart; 871 if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i]; 872 } 873 } 874 875 if (debug) { /* ----------------------------------- */ 876 starts[0] = 0; 877 for (i=1; i<ng; i++) { 878 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 879 else starts[i] = starts[i-1]; 880 } 881 for (i=0; i<ng; i++) { 882 if (nownedsenders[i] > 1) { 883 ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr); 884 for (j=0; j<nownedsenders[i]; j++) { 885 ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr); 886 } 887 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 888 } 889 } 890 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 891 } /* ----------------------------------- */ 892 893 /* wait on original sends */ 894 if (nsends) { 895 ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr); 896 ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 897 ierr = PetscFree(send_status);CHKERRQ(ierr); 898 } 899 ierr = PetscFree(send_waits);CHKERRQ(ierr); 900 ierr = PetscFree(sends);CHKERRQ(ierr); 901 ierr = PetscFree(nprocs);CHKERRQ(ierr); 902 903 /* pack messages to send back to local owners */ 904 starts[0] = 0; 905 for (i=1; i<ng; i++) { 906 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 907 else starts[i] = starts[i-1]; 908 } 909 nsends2 = nrecvs; 910 ierr = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */ 911 for (i=0; i<nrecvs; i++) { 912 nprocs[i] = 1; 913 for (j=0; j<len[i]; j++) { 914 node = recvs[2*i*nmax+2*j]-rstart; 915 if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node]; 916 } 917 } 918 nt = 0; 919 for (i=0; i<nsends2; i++) nt += nprocs[i]; 920 921 ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr); 922 ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr); 923 924 starts2[0] = 0; 925 for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1]; 926 /* 927 Each message is 1 + nprocs[i] long, and consists of 928 (0) the number of nodes being sent back 929 (1) the local node number, 930 (2) the number of processors sharing it, 931 (3) the processors sharing it 932 */ 933 for (i=0; i<nsends2; i++) { 934 cnt = 1; 935 sends2[starts2[i]] = 0; 936 for (j=0; j<len[i]; j++) { 937 node = recvs[2*i*nmax+2*j]-rstart; 938 if (nownedsenders[node] > 1) { 939 sends2[starts2[i]]++; 940 sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1]; 941 sends2[starts2[i]+cnt++] = nownedsenders[node]; 942 ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr); 943 cnt += nownedsenders[node]; 944 } 945 } 946 } 947 948 /* receive the message lengths */ 949 nrecvs2 = nsends; 950 ierr = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr); 951 ierr = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr); 952 ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr); 953 for (i=0; i<nrecvs2; i++) { 954 ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr); 955 } 956 957 /* send the message lengths */ 958 for (i=0; i<nsends2; i++) { 959 ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr); 960 } 961 962 /* wait on receives of lens */ 963 if (nrecvs2) { 964 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 965 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr); 966 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 967 } 968 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 969 970 starts3[0] = 0; 971 nt = 0; 972 for (i=0; i<nrecvs2-1; i++) { 973 starts3[i+1] = starts3[i] + lens2[i]; 974 nt += lens2[i]; 975 } 976 if (nrecvs2) nt += lens2[nrecvs2-1]; 977 978 ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr); 979 ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr); 980 for (i=0; i<nrecvs2; i++) { 981 ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr); 982 } 983 984 /* send the messages */ 985 ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr); 986 for (i=0; i<nsends2; i++) { 987 ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr); 988 } 989 990 /* wait on receives */ 991 if (nrecvs2) { 992 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 993 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr); 994 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 995 } 996 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 997 ierr = PetscFree(nprocs);CHKERRQ(ierr); 998 999 if (debug) { /* ----------------------------------- */ 1000 cnt = 0; 1001 for (i=0; i<nrecvs2; i++) { 1002 nt = recvs2[cnt++]; 1003 for (j=0; j<nt; j++) { 1004 ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr); 1005 for (k=0; k<recvs2[cnt+1]; k++) { 1006 ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr); 1007 } 1008 cnt += 2 + recvs2[cnt+1]; 1009 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1010 } 1011 } 1012 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1013 } /* ----------------------------------- */ 1014 1015 /* count number subdomains for each local node */ 1016 ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr); 1017 ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr); 1018 cnt = 0; 1019 for (i=0; i<nrecvs2; i++) { 1020 nt = recvs2[cnt++]; 1021 for (j=0; j<nt; j++) { 1022 for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++; 1023 cnt += 2 + recvs2[cnt+1]; 1024 } 1025 } 1026 nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0); 1027 *nproc = nt; 1028 ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr); 1029 ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr); 1030 ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr); 1031 for (i=0;i<nt+1;i++) (*indices)[i]=NULL; 1032 ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr); 1033 cnt = 0; 1034 for (i=0; i<size; i++) { 1035 if (nprocs[i] > 0) { 1036 bprocs[i] = cnt; 1037 (*procs)[cnt] = i; 1038 (*numprocs)[cnt] = nprocs[i]; 1039 ierr = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr); 1040 cnt++; 1041 } 1042 } 1043 1044 /* make the list of subdomains for each nontrivial local node */ 1045 ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr); 1046 cnt = 0; 1047 for (i=0; i<nrecvs2; i++) { 1048 nt = recvs2[cnt++]; 1049 for (j=0; j<nt; j++) { 1050 for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt]; 1051 cnt += 2 + recvs2[cnt+1]; 1052 } 1053 } 1054 ierr = PetscFree(bprocs);CHKERRQ(ierr); 1055 ierr = PetscFree(recvs2);CHKERRQ(ierr); 1056 1057 /* sort the node indexing by their global numbers */ 1058 nt = *nproc; 1059 for (i=0; i<nt; i++) { 1060 ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr); 1061 for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]]; 1062 ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr); 1063 ierr = PetscFree(tmp);CHKERRQ(ierr); 1064 } 1065 1066 if (debug) { /* ----------------------------------- */ 1067 nt = *nproc; 1068 for (i=0; i<nt; i++) { 1069 ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr); 1070 for (j=0; j<(*numprocs)[i]; j++) { 1071 ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr); 1072 } 1073 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 1074 } 1075 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 1076 } /* ----------------------------------- */ 1077 1078 /* wait on sends */ 1079 if (nsends2) { 1080 ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr); 1081 ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr); 1082 ierr = PetscFree(send_status);CHKERRQ(ierr); 1083 } 1084 1085 ierr = PetscFree(starts3);CHKERRQ(ierr); 1086 ierr = PetscFree(dest);CHKERRQ(ierr); 1087 ierr = PetscFree(send_waits);CHKERRQ(ierr); 1088 1089 ierr = PetscFree(nownedsenders);CHKERRQ(ierr); 1090 ierr = PetscFree(ownedsenders);CHKERRQ(ierr); 1091 ierr = PetscFree(starts);CHKERRQ(ierr); 1092 ierr = PetscFree(starts2);CHKERRQ(ierr); 1093 ierr = PetscFree(lens2);CHKERRQ(ierr); 1094 1095 ierr = PetscFree(source);CHKERRQ(ierr); 1096 ierr = PetscFree(len);CHKERRQ(ierr); 1097 ierr = PetscFree(recvs);CHKERRQ(ierr); 1098 ierr = PetscFree(nprocs);CHKERRQ(ierr); 1099 ierr = PetscFree(sends2);CHKERRQ(ierr); 1100 1101 /* put the information about myself as the first entry in the list */ 1102 first_procs = (*procs)[0]; 1103 first_numprocs = (*numprocs)[0]; 1104 first_indices = (*indices)[0]; 1105 for (i=0; i<*nproc; i++) { 1106 if ((*procs)[i] == rank) { 1107 (*procs)[0] = (*procs)[i]; 1108 (*numprocs)[0] = (*numprocs)[i]; 1109 (*indices)[0] = (*indices)[i]; 1110 (*procs)[i] = first_procs; 1111 (*numprocs)[i] = first_numprocs; 1112 (*indices)[i] = first_indices; 1113 break; 1114 } 1115 } 1116 PetscFunctionReturn(0); 1117 } 1118 1119 #undef __FUNCT__ 1120 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo" 1121 /*@C 1122 ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo() 1123 1124 Collective on ISLocalToGlobalMapping 1125 1126 Input Parameters: 1127 . mapping - the mapping from local to global indexing 1128 1129 Output Parameter: 1130 + nproc - number of processors that are connected to this one 1131 . proc - neighboring processors 1132 . numproc - number of indices for each processor 1133 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1134 1135 Level: advanced 1136 1137 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1138 ISLocalToGlobalMappingGetInfo() 1139 @*/ 1140 PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1141 { 1142 PetscErrorCode ierr; 1143 PetscInt i; 1144 1145 PetscFunctionBegin; 1146 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1147 ierr = PetscFree(*procs);CHKERRQ(ierr); 1148 ierr = PetscFree(*numprocs);CHKERRQ(ierr); 1149 if (*indices) { 1150 ierr = PetscFree((*indices)[0]);CHKERRQ(ierr); 1151 for (i=1; i<*nproc; i++) { 1152 ierr = PetscFree((*indices)[i]);CHKERRQ(ierr); 1153 } 1154 ierr = PetscFree(*indices);CHKERRQ(ierr); 1155 } 1156 PetscFunctionReturn(0); 1157 } 1158 1159 #undef __FUNCT__ 1160 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo" 1161 /*@C 1162 ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and 1163 each index shared by more than one processor 1164 1165 Collective on ISLocalToGlobalMapping 1166 1167 Input Parameters: 1168 . mapping - the mapping from local to global indexing 1169 1170 Output Parameter: 1171 + nproc - number of processors that are connected to this one 1172 . proc - neighboring processors 1173 . numproc - number of indices for each subdomain (processor) 1174 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 1175 1176 Level: advanced 1177 1178 Concepts: mapping^local to global 1179 1180 Fortran Usage: 1181 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1182 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1183 PetscInt indices[nproc][numprocmax],ierr) 1184 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 1185 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 1186 1187 1188 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1189 ISLocalToGlobalMappingRestoreInfo() 1190 @*/ 1191 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1192 { 1193 PetscErrorCode ierr; 1194 PetscInt **bindices = NULL,bs = mapping->bs,i,j,k; 1195 1196 PetscFunctionBegin; 1197 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 1198 ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,numprocs,&bindices);CHKERRQ(ierr); 1199 ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr); 1200 for (i=0; i<*nproc; i++) { 1201 ierr = PetscMalloc1(bs*(*numprocs)[i],&(*indices)[i]);CHKERRQ(ierr); 1202 for (j=0; j<(*numprocs)[i]; j++) { 1203 for (k=0; k<bs; k++) { 1204 (*indices)[i][j*bs+k] = bs*bindices[i][j] + k; 1205 } 1206 } 1207 (*numprocs)[i] *= bs; 1208 } 1209 if (bindices) { 1210 ierr = PetscFree(bindices[0]);CHKERRQ(ierr); 1211 for (i=1; i<*nproc; i++) { 1212 ierr = PetscFree(bindices[i]);CHKERRQ(ierr); 1213 } 1214 ierr = PetscFree(bindices);CHKERRQ(ierr); 1215 } 1216 PetscFunctionReturn(0); 1217 } 1218 1219 #undef __FUNCT__ 1220 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo" 1221 /*@C 1222 ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo() 1223 1224 Collective on ISLocalToGlobalMapping 1225 1226 Input Parameters: 1227 . mapping - the mapping from local to global indexing 1228 1229 Output Parameter: 1230 + nproc - number of processors that are connected to this one 1231 . proc - neighboring processors 1232 . numproc - number of indices for each processor 1233 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1234 1235 Level: advanced 1236 1237 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1238 ISLocalToGlobalMappingGetInfo() 1239 @*/ 1240 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1241 { 1242 PetscErrorCode ierr; 1243 1244 PetscFunctionBegin; 1245 ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr); 1246 PetscFunctionReturn(0); 1247 } 1248 1249 #undef __FUNCT__ 1250 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices" 1251 /*@C 1252 ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped 1253 1254 Not Collective 1255 1256 Input Arguments: 1257 . ltog - local to global mapping 1258 1259 Output Arguments: 1260 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize() 1261 1262 Level: advanced 1263 1264 Notes: ISLocalToGlobalMappingGetSize() returns the length the this array 1265 1266 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices() 1267 @*/ 1268 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1269 { 1270 PetscFunctionBegin; 1271 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1272 PetscValidPointer(array,2); 1273 if (ltog->bs == 1) { 1274 *array = ltog->indices; 1275 } else { 1276 PetscInt *jj,k,i,j,n = ltog->n, bs = ltog->bs; 1277 const PetscInt *ii; 1278 PetscErrorCode ierr; 1279 1280 ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr); 1281 *array = jj; 1282 k = 0; 1283 ii = ltog->indices; 1284 for (i=0; i<n; i++) 1285 for (j=0; j<bs; j++) 1286 jj[k++] = bs*ii[i] + j; 1287 } 1288 PetscFunctionReturn(0); 1289 } 1290 1291 #undef __FUNCT__ 1292 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices" 1293 /*@C 1294 ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices() 1295 1296 Not Collective 1297 1298 Input Arguments: 1299 + ltog - local to global mapping 1300 - array - array of indices 1301 1302 Level: advanced 1303 1304 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1305 @*/ 1306 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1307 { 1308 PetscFunctionBegin; 1309 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1310 PetscValidPointer(array,2); 1311 if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1312 1313 if (ltog->bs > 1) { 1314 PetscErrorCode ierr; 1315 ierr = PetscFree(*(void**)array);CHKERRQ(ierr); 1316 } 1317 PetscFunctionReturn(0); 1318 } 1319 1320 #undef __FUNCT__ 1321 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices" 1322 /*@C 1323 ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block 1324 1325 Not Collective 1326 1327 Input Arguments: 1328 . ltog - local to global mapping 1329 1330 Output Arguments: 1331 . array - array of indices 1332 1333 Level: advanced 1334 1335 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices() 1336 @*/ 1337 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1338 { 1339 PetscFunctionBegin; 1340 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1341 PetscValidPointer(array,2); 1342 *array = ltog->indices; 1343 PetscFunctionReturn(0); 1344 } 1345 1346 #undef __FUNCT__ 1347 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices" 1348 /*@C 1349 ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices() 1350 1351 Not Collective 1352 1353 Input Arguments: 1354 + ltog - local to global mapping 1355 - array - array of indices 1356 1357 Level: advanced 1358 1359 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1360 @*/ 1361 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1362 { 1363 PetscFunctionBegin; 1364 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1365 PetscValidPointer(array,2); 1366 if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1367 *array = NULL; 1368 PetscFunctionReturn(0); 1369 } 1370 1371 #undef __FUNCT__ 1372 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate" 1373 /*@C 1374 ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings 1375 1376 Not Collective 1377 1378 Input Arguments: 1379 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate 1380 . n - number of mappings to concatenate 1381 - ltogs - local to global mappings 1382 1383 Output Arguments: 1384 . ltogcat - new mapping 1385 1386 Note: this currently always returns a mapping with block size of 1 1387 1388 Developer Note: If all the input mapping have the same block size we could easily handle that as a special case 1389 1390 Level: advanced 1391 1392 .seealso: ISLocalToGlobalMappingCreate() 1393 @*/ 1394 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat) 1395 { 1396 PetscInt i,cnt,m,*idx; 1397 PetscErrorCode ierr; 1398 1399 PetscFunctionBegin; 1400 if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n); 1401 if (n > 0) PetscValidPointer(ltogs,3); 1402 for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3); 1403 PetscValidPointer(ltogcat,4); 1404 for (cnt=0,i=0; i<n; i++) { 1405 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1406 cnt += m; 1407 } 1408 ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr); 1409 for (cnt=0,i=0; i<n; i++) { 1410 const PetscInt *subidx; 1411 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1412 ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1413 ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr); 1414 ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1415 cnt += m; 1416 } 1417 ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr); 1418 PetscFunctionReturn(0); 1419 } 1420 1421 1422