1 #include <petsc-private/sfimpl.h> /*I "petscsf.h" I*/ 2 #include <petscctable.h> 3 4 /* Logging support */ 5 PetscLogEvent PETSCSF_SetGraph, PETSCSF_BcastBegin, PETSCSF_BcastEnd, PETSCSF_ReduceBegin, PETSCSF_ReduceEnd, PETSCSF_FetchAndOpBegin, PETSCSF_FetchAndOpEnd; 6 7 #if defined(PETSC_USE_DEBUG) 8 # define PetscSFCheckGraphSet(sf,arg) do { \ 9 if (PetscUnlikely(!(sf)->graphset)) \ 10 SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call PetscSFSetGraph() on argument %D \"%s\" before %s()",(arg),#sf,PETSC_FUNCTION_NAME); \ 11 } while (0) 12 #else 13 # define PetscSFCheckGraphSet(sf,arg) do {} while (0) 14 #endif 15 16 const char *const PetscSFDuplicateOptions[] = {"CONFONLY","RANKS","GRAPH","PetscSFDuplicateOption","PETSCSF_DUPLICATE_",0}; 17 18 #undef __FUNCT__ 19 #define __FUNCT__ "PetscSFCreate" 20 /*@C 21 PetscSFCreate - create a star forest communication context 22 23 Not Collective 24 25 Input Arguments: 26 . comm - communicator on which the star forest will operate 27 28 Output Arguments: 29 . sf - new star forest context 30 31 Level: intermediate 32 33 .seealso: PetscSFSetGraph(), PetscSFDestroy() 34 @*/ 35 PetscErrorCode PetscSFCreate(MPI_Comm comm,PetscSF *sf) 36 { 37 PetscErrorCode ierr; 38 PetscSF b; 39 40 PetscFunctionBegin; 41 PetscValidPointer(sf,2); 42 ierr = PetscSFInitializePackage();CHKERRQ(ierr); 43 44 ierr = PetscHeaderCreate(b,_p_PetscSF,struct _PetscSFOps,PETSCSF_CLASSID,"PetscSF","Star Forest","PetscSF",comm,PetscSFDestroy,PetscSFView);CHKERRQ(ierr); 45 46 b->nroots = -1; 47 b->nleaves = -1; 48 b->nranks = -1; 49 b->rankorder = PETSC_TRUE; 50 b->ingroup = MPI_GROUP_NULL; 51 b->outgroup = MPI_GROUP_NULL; 52 b->graphset = PETSC_FALSE; 53 54 *sf = b; 55 PetscFunctionReturn(0); 56 } 57 58 #undef __FUNCT__ 59 #define __FUNCT__ "PetscSFReset" 60 /*@C 61 PetscSFReset - Reset a star forest so that different sizes or neighbors can be used 62 63 Collective 64 65 Input Arguments: 66 . sf - star forest 67 68 Level: advanced 69 70 .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFDestroy() 71 @*/ 72 PetscErrorCode PetscSFReset(PetscSF sf) 73 { 74 PetscErrorCode ierr; 75 76 PetscFunctionBegin; 77 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 78 sf->mine = NULL; 79 ierr = PetscFree(sf->mine_alloc);CHKERRQ(ierr); 80 sf->remote = NULL; 81 ierr = PetscFree(sf->remote_alloc);CHKERRQ(ierr); 82 ierr = PetscFree4(sf->ranks,sf->roffset,sf->rmine,sf->rremote);CHKERRQ(ierr); 83 ierr = PetscFree(sf->degree);CHKERRQ(ierr); 84 if (sf->ingroup != MPI_GROUP_NULL) {ierr = MPI_Group_free(&sf->ingroup);CHKERRQ(ierr);} 85 if (sf->outgroup != MPI_GROUP_NULL) {ierr = MPI_Group_free(&sf->outgroup);CHKERRQ(ierr);} 86 ierr = PetscSFDestroy(&sf->multi);CHKERRQ(ierr); 87 sf->graphset = PETSC_FALSE; 88 if (sf->ops->Reset) {ierr = (*sf->ops->Reset)(sf);CHKERRQ(ierr);} 89 sf->setupcalled = PETSC_FALSE; 90 PetscFunctionReturn(0); 91 } 92 93 #undef __FUNCT__ 94 #define __FUNCT__ "PetscSFSetType" 95 /*@C 96 PetscSFSetType - set the PetscSF communication implementation 97 98 Collective on PetscSF 99 100 Input Parameters: 101 + sf - the PetscSF context 102 - type - a known method 103 104 Options Database Key: 105 . -sf_type <type> - Sets the method; use -help for a list 106 of available methods (for instance, window, pt2pt, neighbor) 107 108 Notes: 109 See "include/petscsf.h" for available methods (for instance) 110 + PETSCSFWINDOW - MPI-2/3 one-sided 111 - PETSCSFBASIC - basic implementation using MPI-1 two-sided 112 113 Level: intermediate 114 115 .keywords: PetscSF, set, type 116 117 .seealso: PetscSFType, PetscSFCreate() 118 @*/ 119 PetscErrorCode PetscSFSetType(PetscSF sf,PetscSFType type) 120 { 121 PetscErrorCode ierr,(*r)(PetscSF); 122 PetscBool match; 123 124 PetscFunctionBegin; 125 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 126 PetscValidCharPointer(type,2); 127 128 ierr = PetscObjectTypeCompare((PetscObject)sf,type,&match);CHKERRQ(ierr); 129 if (match) PetscFunctionReturn(0); 130 131 ierr = PetscFunctionListFind(PetscSFList,type,&r);CHKERRQ(ierr); 132 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested PetscSF type %s",type); 133 /* Destroy the previous private PetscSF context */ 134 if (sf->ops->Destroy) { 135 ierr = (*(sf)->ops->Destroy)(sf);CHKERRQ(ierr); 136 } 137 ierr = PetscMemzero(sf->ops,sizeof(*sf->ops));CHKERRQ(ierr); 138 ierr = PetscObjectChangeTypeName((PetscObject)sf,type);CHKERRQ(ierr); 139 ierr = (*r)(sf);CHKERRQ(ierr); 140 PetscFunctionReturn(0); 141 } 142 143 #undef __FUNCT__ 144 #define __FUNCT__ "PetscSFDestroy" 145 /*@C 146 PetscSFDestroy - destroy star forest 147 148 Collective 149 150 Input Arguments: 151 . sf - address of star forest 152 153 Level: intermediate 154 155 .seealso: PetscSFCreate(), PetscSFReset() 156 @*/ 157 PetscErrorCode PetscSFDestroy(PetscSF *sf) 158 { 159 PetscErrorCode ierr; 160 161 PetscFunctionBegin; 162 if (!*sf) PetscFunctionReturn(0); 163 PetscValidHeaderSpecific((*sf),PETSCSF_CLASSID,1); 164 if (--((PetscObject)(*sf))->refct > 0) {*sf = 0; PetscFunctionReturn(0);} 165 ierr = PetscSFReset(*sf);CHKERRQ(ierr); 166 if ((*sf)->ops->Destroy) {ierr = (*(*sf)->ops->Destroy)(*sf);CHKERRQ(ierr);} 167 ierr = PetscHeaderDestroy(sf);CHKERRQ(ierr); 168 PetscFunctionReturn(0); 169 } 170 171 #undef __FUNCT__ 172 #define __FUNCT__ "PetscSFSetUp" 173 /*@ 174 PetscSFSetUp - set up communication structures 175 176 Collective 177 178 Input Arguments: 179 . sf - star forest communication object 180 181 Level: beginner 182 183 .seealso: PetscSFSetFromOptions(), PetscSFSetType() 184 @*/ 185 PetscErrorCode PetscSFSetUp(PetscSF sf) 186 { 187 PetscErrorCode ierr; 188 189 PetscFunctionBegin; 190 if (sf->setupcalled) PetscFunctionReturn(0); 191 if (!((PetscObject)sf)->type_name) {ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);} 192 if (sf->ops->SetUp) {ierr = (*sf->ops->SetUp)(sf);CHKERRQ(ierr);} 193 sf->setupcalled = PETSC_TRUE; 194 PetscFunctionReturn(0); 195 } 196 197 #undef __FUNCT__ 198 #define __FUNCT__ "PetscSFSetFromOptions" 199 /*@C 200 PetscSFSetFromOptions - set PetscSF options using the options database 201 202 Logically Collective 203 204 Input Arguments: 205 . sf - star forest 206 207 Options Database Keys: 208 + -sf_type - implementation type, see PetscSFSetType() 209 - -sf_rank_order - sort composite points for gathers and scatters in rank order, gathers are non-deterministic otherwise 210 211 Level: intermediate 212 213 .keywords: KSP, set, from, options, database 214 215 .seealso: PetscSFWindowSetSyncType() 216 @*/ 217 PetscErrorCode PetscSFSetFromOptions(PetscSF sf) 218 { 219 PetscSFType deft; 220 char type[256]; 221 PetscErrorCode ierr; 222 PetscBool flg; 223 224 PetscFunctionBegin; 225 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 226 ierr = PetscObjectOptionsBegin((PetscObject)sf);CHKERRQ(ierr); 227 deft = ((PetscObject)sf)->type_name ? ((PetscObject)sf)->type_name : PETSCSFBASIC; 228 ierr = PetscOptionsFList("-sf_type","PetscSF implementation type","PetscSFSetType",PetscSFList,deft,type,256,&flg);CHKERRQ(ierr); 229 ierr = PetscSFSetType(sf,flg ? type : deft);CHKERRQ(ierr); 230 ierr = PetscOptionsBool("-sf_rank_order","sort composite points for gathers and scatters in rank order, gathers are non-deterministic otherwise","PetscSFSetRankOrder",sf->rankorder,&sf->rankorder,NULL);CHKERRQ(ierr); 231 if (sf->ops->SetFromOptions) {ierr = (*sf->ops->SetFromOptions)(sf);CHKERRQ(ierr);} 232 ierr = PetscOptionsEnd();CHKERRQ(ierr); 233 PetscFunctionReturn(0); 234 } 235 236 #undef __FUNCT__ 237 #define __FUNCT__ "PetscSFSetRankOrder" 238 /*@C 239 PetscSFSetRankOrder - sort multi-points for gathers and scatters by rank order 240 241 Logically Collective 242 243 Input Arguments: 244 + sf - star forest 245 - flg - PETSC_TRUE to sort, PETSC_FALSE to skip sorting (lower setup cost, but non-deterministic) 246 247 Level: advanced 248 249 .seealso: PetscSFGatherBegin(), PetscSFScatterBegin() 250 @*/ 251 PetscErrorCode PetscSFSetRankOrder(PetscSF sf,PetscBool flg) 252 { 253 254 PetscFunctionBegin; 255 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 256 PetscValidLogicalCollectiveBool(sf,flg,2); 257 if (sf->multi) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"Rank ordering must be set before first call to PetscSFGatherBegin() or PetscSFScatterBegin()"); 258 sf->rankorder = flg; 259 PetscFunctionReturn(0); 260 } 261 262 #undef __FUNCT__ 263 #define __FUNCT__ "PetscSFSetGraph" 264 /*@C 265 PetscSFSetGraph - Set a parallel star forest 266 267 Collective 268 269 Input Arguments: 270 + sf - star forest 271 . nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves) 272 . nleaves - number of leaf vertices on the current process, each of these references a root on any process 273 . ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage 274 . localmode - copy mode for ilocal 275 . iremote - remote locations of root vertices for each leaf on the current process 276 - remotemode - copy mode for iremote 277 278 Level: intermediate 279 280 .seealso: PetscSFCreate(), PetscSFView(), PetscSFGetGraph() 281 @*/ 282 PetscErrorCode PetscSFSetGraph(PetscSF sf,PetscInt nroots,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscSFNode *iremote,PetscCopyMode remotemode) 283 { 284 PetscErrorCode ierr; 285 PetscTable table; 286 PetscTablePosition pos; 287 PetscMPIInt size; 288 PetscInt i,*rcount,*ranks; 289 290 PetscFunctionBegin; 291 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 292 ierr = PetscLogEventBegin(PETSCSF_SetGraph,sf,0,0,0);CHKERRQ(ierr); 293 if (nleaves && ilocal) PetscValidIntPointer(ilocal,4); 294 if (nleaves) PetscValidPointer(iremote,6); 295 if (nroots < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"roots %D, cannot be negative",nroots); 296 if (nleaves < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nleaves %D, cannot be negative",nleaves); 297 ierr = PetscSFReset(sf);CHKERRQ(ierr); 298 sf->nroots = nroots; 299 sf->nleaves = nleaves; 300 if (ilocal) { 301 switch (localmode) { 302 case PETSC_COPY_VALUES: 303 ierr = PetscMalloc1(nleaves,&sf->mine_alloc);CHKERRQ(ierr); 304 sf->mine = sf->mine_alloc; 305 ierr = PetscMemcpy(sf->mine,ilocal,nleaves*sizeof(*sf->mine));CHKERRQ(ierr); 306 sf->minleaf = PETSC_MAX_INT; 307 sf->maxleaf = PETSC_MIN_INT; 308 for (i=0; i<nleaves; i++) { 309 sf->minleaf = PetscMin(sf->minleaf,ilocal[i]); 310 sf->maxleaf = PetscMax(sf->maxleaf,ilocal[i]); 311 } 312 break; 313 case PETSC_OWN_POINTER: 314 sf->mine_alloc = (PetscInt*)ilocal; 315 sf->mine = sf->mine_alloc; 316 break; 317 case PETSC_USE_POINTER: 318 sf->mine = (PetscInt*)ilocal; 319 break; 320 default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown localmode"); 321 } 322 } 323 if (!ilocal || nleaves > 0) { 324 sf->minleaf = 0; 325 sf->maxleaf = nleaves - 1; 326 } 327 switch (remotemode) { 328 case PETSC_COPY_VALUES: 329 ierr = PetscMalloc1(nleaves,&sf->remote_alloc);CHKERRQ(ierr); 330 sf->remote = sf->remote_alloc; 331 ierr = PetscMemcpy(sf->remote,iremote,nleaves*sizeof(*sf->remote));CHKERRQ(ierr); 332 break; 333 case PETSC_OWN_POINTER: 334 sf->remote_alloc = (PetscSFNode*)iremote; 335 sf->remote = sf->remote_alloc; 336 break; 337 case PETSC_USE_POINTER: 338 sf->remote = (PetscSFNode*)iremote; 339 break; 340 default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown remotemode"); 341 } 342 343 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)sf),&size);CHKERRQ(ierr); 344 ierr = PetscTableCreate(10,size,&table);CHKERRQ(ierr); 345 for (i=0; i<nleaves; i++) { 346 /* Log 1-based rank */ 347 ierr = PetscTableAdd(table,iremote[i].rank+1,1,ADD_VALUES);CHKERRQ(ierr); 348 } 349 ierr = PetscTableGetCount(table,&sf->nranks);CHKERRQ(ierr); 350 ierr = PetscMalloc4(sf->nranks,&sf->ranks,sf->nranks+1,&sf->roffset,nleaves,&sf->rmine,nleaves,&sf->rremote);CHKERRQ(ierr); 351 ierr = PetscMalloc2(sf->nranks,&rcount,sf->nranks,&ranks);CHKERRQ(ierr); 352 ierr = PetscTableGetHeadPosition(table,&pos);CHKERRQ(ierr); 353 for (i=0; i<sf->nranks; i++) { 354 ierr = PetscTableGetNext(table,&pos,&ranks[i],&rcount[i]);CHKERRQ(ierr); 355 ranks[i]--; /* Convert back to 0-based */ 356 } 357 ierr = PetscTableDestroy(&table);CHKERRQ(ierr); 358 ierr = PetscSortIntWithArray(sf->nranks,ranks,rcount);CHKERRQ(ierr); 359 sf->roffset[0] = 0; 360 for (i=0; i<sf->nranks; i++) { 361 ierr = PetscMPIIntCast(ranks[i],sf->ranks+i);CHKERRQ(ierr); 362 sf->roffset[i+1] = sf->roffset[i] + rcount[i]; 363 rcount[i] = 0; 364 } 365 for (i=0; i<nleaves; i++) { 366 PetscInt lo,hi,irank; 367 /* Search for index of iremote[i].rank in sf->ranks */ 368 lo = 0; hi = sf->nranks; 369 while (hi - lo > 1) { 370 PetscInt mid = lo + (hi - lo)/2; 371 if (iremote[i].rank < sf->ranks[mid]) hi = mid; 372 else lo = mid; 373 } 374 if (hi - lo == 1 && iremote[i].rank == sf->ranks[lo]) irank = lo; 375 else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Could not find rank %D in array",iremote[i].rank); 376 sf->rmine[sf->roffset[irank] + rcount[irank]] = ilocal ? ilocal[i] : i; 377 sf->rremote[sf->roffset[irank] + rcount[irank]] = iremote[i].index; 378 rcount[irank]++; 379 } 380 ierr = PetscFree2(rcount,ranks);CHKERRQ(ierr); 381 #if !defined(PETSC_USE_64BIT_INDICES) 382 if (nroots == PETSC_DETERMINE) { 383 /* Jed, if you have a better way to do this, put it in */ 384 PetscInt *numRankLeaves, *leafOff, *leafIndices, *numRankRoots, *rootOff, *rootIndices, maxRoots = 0; 385 386 /* All to all to determine number of leaf indices from each (you can do this using Scan and asynch messages) */ 387 ierr = PetscMalloc4(size,&numRankLeaves,size+1,&leafOff,size,&numRankRoots,size+1,&rootOff);CHKERRQ(ierr); 388 ierr = PetscMemzero(numRankLeaves, size * sizeof(PetscInt));CHKERRQ(ierr); 389 for (i = 0; i < nleaves; ++i) ++numRankLeaves[iremote[i].rank]; 390 ierr = MPI_Alltoall(numRankLeaves, 1, MPIU_INT, numRankRoots, 1, MPIU_INT, PetscObjectComm((PetscObject)sf));CHKERRQ(ierr); 391 /* Could set nroots to this maximum */ 392 for (i = 0; i < size; ++i) maxRoots += numRankRoots[i]; 393 394 /* Gather all indices */ 395 ierr = PetscMalloc2(nleaves,&leafIndices,maxRoots,&rootIndices);CHKERRQ(ierr); 396 leafOff[0] = 0; 397 for (i = 0; i < size; ++i) leafOff[i+1] = leafOff[i] + numRankLeaves[i]; 398 for (i = 0; i < nleaves; ++i) leafIndices[leafOff[iremote[i].rank]++] = iremote[i].index; 399 leafOff[0] = 0; 400 for (i = 0; i < size; ++i) leafOff[i+1] = leafOff[i] + numRankLeaves[i]; 401 rootOff[0] = 0; 402 for (i = 0; i < size; ++i) rootOff[i+1] = rootOff[i] + numRankRoots[i]; 403 ierr = MPI_Alltoallv(leafIndices, numRankLeaves, leafOff, MPIU_INT, rootIndices, numRankRoots, rootOff, MPIU_INT, PetscObjectComm((PetscObject)sf));CHKERRQ(ierr); 404 /* Sort and reduce */ 405 ierr = PetscSortRemoveDupsInt(&maxRoots, rootIndices);CHKERRQ(ierr); 406 ierr = PetscFree2(leafIndices,rootIndices);CHKERRQ(ierr); 407 ierr = PetscFree4(numRankLeaves,leafOff,numRankRoots,rootOff);CHKERRQ(ierr); 408 sf->nroots = maxRoots; 409 } 410 #endif 411 412 sf->graphset = PETSC_TRUE; 413 ierr = PetscLogEventEnd(PETSCSF_SetGraph,sf,0,0,0);CHKERRQ(ierr); 414 PetscFunctionReturn(0); 415 } 416 417 #undef __FUNCT__ 418 #define __FUNCT__ "PetscSFCreateInverseSF" 419 /*@C 420 PetscSFCreateInverseSF - given a PetscSF in which all vertices have degree 1, creates the inverse map 421 422 Collective 423 424 Input Arguments: 425 . sf - star forest to invert 426 427 Output Arguments: 428 . isf - inverse of sf 429 430 Level: advanced 431 432 Notes: 433 All roots must have degree 1. 434 435 The local space may be a permutation, but cannot be sparse. 436 437 .seealso: PetscSFSetGraph() 438 @*/ 439 PetscErrorCode PetscSFCreateInverseSF(PetscSF sf,PetscSF *isf) 440 { 441 PetscErrorCode ierr; 442 PetscMPIInt rank; 443 PetscInt i,nroots,nleaves,maxlocal,count,*newilocal; 444 const PetscInt *ilocal; 445 PetscSFNode *roots,*leaves; 446 447 PetscFunctionBegin; 448 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr); 449 ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr); 450 for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,(ilocal ? ilocal[i] : i)+1); 451 ierr = PetscMalloc2(nroots,&roots,nleaves,&leaves);CHKERRQ(ierr); 452 for (i=0; i<nleaves; i++) { 453 leaves[i].rank = rank; 454 leaves[i].index = i; 455 } 456 for (i=0; i <nroots; i++) { 457 roots[i].rank = -1; 458 roots[i].index = -1; 459 } 460 ierr = PetscSFReduceBegin(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);CHKERRQ(ierr); 461 ierr = PetscSFReduceEnd(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);CHKERRQ(ierr); 462 463 /* Check whether our leaves are sparse */ 464 for (i=0,count=0; i<nroots; i++) if (roots[i].rank >= 0) count++; 465 if (count == nroots) newilocal = NULL; 466 else { /* Index for sparse leaves and compact "roots" array (which is to become our leaves). */ 467 ierr = PetscMalloc1(count,&newilocal);CHKERRQ(ierr); 468 for (i=0,count=0; i<nroots; i++) { 469 if (roots[i].rank >= 0) { 470 newilocal[count] = i; 471 roots[count].rank = roots[i].rank; 472 roots[count].index = roots[i].index; 473 count++; 474 } 475 } 476 } 477 478 ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,isf);CHKERRQ(ierr); 479 ierr = PetscSFSetGraph(*isf,maxlocal,count,newilocal,PETSC_OWN_POINTER,roots,PETSC_COPY_VALUES);CHKERRQ(ierr); 480 ierr = PetscFree2(roots,leaves);CHKERRQ(ierr); 481 PetscFunctionReturn(0); 482 } 483 484 #undef __FUNCT__ 485 #define __FUNCT__ "PetscSFDuplicate" 486 /*@ 487 PetscSFDuplicate - duplicate a PetscSF, optionally preserving rank connectivity and graph 488 489 Collective 490 491 Input Arguments: 492 + sf - communication object to duplicate 493 - opt - PETSCSF_DUPLICATE_CONFONLY, PETSCSF_DUPLICATE_RANKS, or PETSCSF_DUPLICATE_GRAPH (see PetscSFDuplicateOption) 494 495 Output Arguments: 496 . newsf - new communication object 497 498 Level: beginner 499 500 .seealso: PetscSFCreate(), PetscSFSetType(), PetscSFSetGraph() 501 @*/ 502 PetscErrorCode PetscSFDuplicate(PetscSF sf,PetscSFDuplicateOption opt,PetscSF *newsf) 503 { 504 PetscErrorCode ierr; 505 506 PetscFunctionBegin; 507 ierr = PetscSFCreate(PetscObjectComm((PetscObject)sf),newsf);CHKERRQ(ierr); 508 ierr = PetscSFSetType(*newsf,((PetscObject)sf)->type_name);CHKERRQ(ierr); 509 if (sf->ops->Duplicate) {ierr = (*sf->ops->Duplicate)(sf,opt,*newsf);CHKERRQ(ierr);} 510 if (opt == PETSCSF_DUPLICATE_GRAPH) { 511 PetscInt nroots,nleaves; 512 const PetscInt *ilocal; 513 const PetscSFNode *iremote; 514 ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,&iremote);CHKERRQ(ierr); 515 ierr = PetscSFSetGraph(*newsf,nroots,nleaves,ilocal,PETSC_COPY_VALUES,iremote,PETSC_COPY_VALUES);CHKERRQ(ierr); 516 } 517 PetscFunctionReturn(0); 518 } 519 520 #undef __FUNCT__ 521 #define __FUNCT__ "PetscSFGetGraph" 522 /*@C 523 PetscSFGetGraph - Get the graph specifying a parallel star forest 524 525 Not Collective 526 527 Input Arguments: 528 . sf - star forest 529 530 Output Arguments: 531 + nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves) 532 . nleaves - number of leaf vertices on the current process, each of these references a root on any process 533 . ilocal - locations of leaves in leafdata buffers 534 - iremote - remote locations of root vertices for each leaf on the current process 535 536 Level: intermediate 537 538 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph() 539 @*/ 540 PetscErrorCode PetscSFGetGraph(PetscSF sf,PetscInt *nroots,PetscInt *nleaves,const PetscInt **ilocal,const PetscSFNode **iremote) 541 { 542 543 PetscFunctionBegin; 544 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 545 /* We are not currently requiring that the graph is set, thus returning nroots=-1 if it has not been set */ 546 /* if (!sf->graphset) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"Graph has not been set, must call PetscSFSetGraph()"); */ 547 if (nroots) *nroots = sf->nroots; 548 if (nleaves) *nleaves = sf->nleaves; 549 if (ilocal) *ilocal = sf->mine; 550 if (iremote) *iremote = sf->remote; 551 PetscFunctionReturn(0); 552 } 553 554 #undef __FUNCT__ 555 #define __FUNCT__ "PetscSFGetLeafRange" 556 /*@C 557 PetscSFGetLeafRange - Get the active leaf ranges 558 559 Not Collective 560 561 Input Arguments: 562 . sf - star forest 563 564 Output Arguments: 565 + minleaf - minimum active leaf on this process 566 - maxleaf - maximum active leaf on this process 567 568 Level: developer 569 570 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph() 571 @*/ 572 PetscErrorCode PetscSFGetLeafRange(PetscSF sf,PetscInt *minleaf,PetscInt *maxleaf) 573 { 574 575 PetscFunctionBegin; 576 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 577 if (minleaf) *minleaf = sf->minleaf; 578 if (maxleaf) *maxleaf = sf->maxleaf; 579 PetscFunctionReturn(0); 580 } 581 582 #undef __FUNCT__ 583 #define __FUNCT__ "PetscSFView" 584 /*@C 585 PetscSFView - view a star forest 586 587 Collective 588 589 Input Arguments: 590 + sf - star forest 591 - viewer - viewer to display graph, for example PETSC_VIEWER_STDOUT_WORLD 592 593 Level: beginner 594 595 .seealso: PetscSFCreate(), PetscSFSetGraph() 596 @*/ 597 PetscErrorCode PetscSFView(PetscSF sf,PetscViewer viewer) 598 { 599 PetscErrorCode ierr; 600 PetscBool iascii; 601 PetscViewerFormat format; 602 603 PetscFunctionBegin; 604 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 605 if (!viewer) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)sf),&viewer);CHKERRQ(ierr);} 606 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 607 PetscCheckSameComm(sf,1,viewer,2); 608 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 609 if (iascii) { 610 PetscMPIInt rank; 611 PetscInt i,j; 612 613 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)sf,viewer);CHKERRQ(ierr); 614 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 615 if (sf->ops->View) {ierr = (*sf->ops->View)(sf,viewer);CHKERRQ(ierr);} 616 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr); 617 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 618 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Number of roots=%D, leaves=%D, remote ranks=%D\n",rank,sf->nroots,sf->nleaves,sf->nranks);CHKERRQ(ierr); 619 for (i=0; i<sf->nleaves; i++) { 620 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D <- (%D,%D)\n",rank,sf->mine ? sf->mine[i] : i,sf->remote[i].rank,sf->remote[i].index);CHKERRQ(ierr); 621 } 622 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 623 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 624 if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 625 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Roots referenced by my leaves, by rank\n",rank);CHKERRQ(ierr); 626 for (i=0; i<sf->nranks; i++) { 627 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d: %D edges\n",rank,sf->ranks[i],sf->roffset[i+1]-sf->roffset[i]);CHKERRQ(ierr); 628 for (j=sf->roffset[i]; j<sf->roffset[i+1]; j++) { 629 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D <- %D\n",rank,sf->rmine[j],sf->rremote[j]);CHKERRQ(ierr); 630 } 631 } 632 } 633 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 634 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 635 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 636 } 637 PetscFunctionReturn(0); 638 } 639 640 #undef __FUNCT__ 641 #define __FUNCT__ "PetscSFGetRanks" 642 /*@C 643 PetscSFGetRanks - Get ranks and number of vertices referenced by leaves on this process 644 645 Not Collective 646 647 Input Arguments: 648 . sf - star forest 649 650 Output Arguments: 651 + nranks - number of ranks referenced by local part 652 . ranks - array of ranks 653 . roffset - offset in rmine/rremote for each rank (length nranks+1) 654 . rmine - concatenated array holding local indices referencing each remote rank 655 - rremote - concatenated array holding remote indices referenced for each remote rank 656 657 Level: developer 658 659 .seealso: PetscSFSetGraph() 660 @*/ 661 PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote) 662 { 663 664 PetscFunctionBegin; 665 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 666 if (nranks) *nranks = sf->nranks; 667 if (ranks) *ranks = sf->ranks; 668 if (roffset) *roffset = sf->roffset; 669 if (rmine) *rmine = sf->rmine; 670 if (rremote) *rremote = sf->rremote; 671 PetscFunctionReturn(0); 672 } 673 674 #undef __FUNCT__ 675 #define __FUNCT__ "PetscSFGetGroups" 676 /*@C 677 PetscSFGetGroups - gets incoming and outgoing process groups 678 679 Collective 680 681 Input Argument: 682 . sf - star forest 683 684 Output Arguments: 685 + incoming - group of origin processes for incoming edges (leaves that reference my roots) 686 - outgoing - group of destination processes for outgoing edges (roots that I reference) 687 688 Level: developer 689 690 .seealso: PetscSFGetWindow(), PetscSFRestoreWindow() 691 @*/ 692 PetscErrorCode PetscSFGetGroups(PetscSF sf,MPI_Group *incoming,MPI_Group *outgoing) 693 { 694 PetscErrorCode ierr; 695 MPI_Group group; 696 697 PetscFunctionBegin; 698 if (sf->ingroup == MPI_GROUP_NULL) { 699 PetscInt i; 700 const PetscInt *indegree; 701 PetscMPIInt rank,*outranks,*inranks; 702 PetscSFNode *remote; 703 PetscSF bgcount; 704 705 /* Compute the number of incoming ranks */ 706 ierr = PetscMalloc1(sf->nranks,&remote);CHKERRQ(ierr); 707 for (i=0; i<sf->nranks; i++) { 708 remote[i].rank = sf->ranks[i]; 709 remote[i].index = 0; 710 } 711 ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,&bgcount);CHKERRQ(ierr); 712 ierr = PetscSFSetGraph(bgcount,1,sf->nranks,NULL,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);CHKERRQ(ierr); 713 ierr = PetscSFComputeDegreeBegin(bgcount,&indegree);CHKERRQ(ierr); 714 ierr = PetscSFComputeDegreeEnd(bgcount,&indegree);CHKERRQ(ierr); 715 716 /* Enumerate the incoming ranks */ 717 ierr = PetscMalloc2(indegree[0],&inranks,sf->nranks,&outranks);CHKERRQ(ierr); 718 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr); 719 for (i=0; i<sf->nranks; i++) outranks[i] = rank; 720 ierr = PetscSFGatherBegin(bgcount,MPI_INT,outranks,inranks);CHKERRQ(ierr); 721 ierr = PetscSFGatherEnd(bgcount,MPI_INT,outranks,inranks);CHKERRQ(ierr); 722 ierr = MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);CHKERRQ(ierr); 723 ierr = MPI_Group_incl(group,indegree[0],inranks,&sf->ingroup);CHKERRQ(ierr); 724 ierr = MPI_Group_free(&group);CHKERRQ(ierr); 725 ierr = PetscFree2(inranks,outranks);CHKERRQ(ierr); 726 ierr = PetscSFDestroy(&bgcount);CHKERRQ(ierr); 727 } 728 *incoming = sf->ingroup; 729 730 if (sf->outgroup == MPI_GROUP_NULL) { 731 ierr = MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);CHKERRQ(ierr); 732 ierr = MPI_Group_incl(group,sf->nranks,sf->ranks,&sf->outgroup);CHKERRQ(ierr); 733 ierr = MPI_Group_free(&group);CHKERRQ(ierr); 734 } 735 *outgoing = sf->outgroup; 736 PetscFunctionReturn(0); 737 } 738 739 #undef __FUNCT__ 740 #define __FUNCT__ "PetscSFGetMultiSF" 741 /*@C 742 PetscSFGetMultiSF - gets the inner SF implemeting gathers and scatters 743 744 Collective 745 746 Input Argument: 747 . sf - star forest that may contain roots with 0 or with more than 1 vertex 748 749 Output Arguments: 750 . multi - star forest with split roots, such that each root has degree exactly 1 751 752 Level: developer 753 754 Notes: 755 756 In most cases, users should use PetscSFGatherBegin() and PetscSFScatterBegin() instead of manipulating multi 757 directly. Since multi satisfies the stronger condition that each entry in the global space has exactly one incoming 758 edge, it is a candidate for future optimization that might involve its removal. 759 760 .seealso: PetscSFSetGraph(), PetscSFGatherBegin(), PetscSFScatterBegin() 761 @*/ 762 PetscErrorCode PetscSFGetMultiSF(PetscSF sf,PetscSF *multi) 763 { 764 PetscErrorCode ierr; 765 766 PetscFunctionBegin; 767 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 768 PetscValidPointer(multi,2); 769 if (sf->nroots < 0) { /* Graph has not been set yet; why do we need this? */ 770 ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);CHKERRQ(ierr); 771 *multi = sf->multi; 772 PetscFunctionReturn(0); 773 } 774 if (!sf->multi) { 775 const PetscInt *indegree; 776 PetscInt i,*inoffset,*outones,*outoffset; 777 PetscSFNode *remote; 778 ierr = PetscSFComputeDegreeBegin(sf,&indegree);CHKERRQ(ierr); 779 ierr = PetscSFComputeDegreeEnd(sf,&indegree);CHKERRQ(ierr); 780 ierr = PetscMalloc3(sf->nroots+1,&inoffset,sf->nleaves,&outones,sf->nleaves,&outoffset);CHKERRQ(ierr); 781 inoffset[0] = 0; 782 #if 1 783 for (i=0; i<sf->nroots; i++) inoffset[i+1] = PetscMax(i+1, inoffset[i] + indegree[i]); 784 #else 785 for (i=0; i<sf->nroots; i++) inoffset[i+1] = inoffset[i] + indegree[i]; 786 #endif 787 for (i=0; i<sf->nleaves; i++) outones[i] = 1; 788 ierr = PetscSFFetchAndOpBegin(sf,MPIU_INT,inoffset,outones,outoffset,MPIU_SUM);CHKERRQ(ierr); 789 ierr = PetscSFFetchAndOpEnd(sf,MPIU_INT,inoffset,outones,outoffset,MPIU_SUM);CHKERRQ(ierr); 790 for (i=0; i<sf->nroots; i++) inoffset[i] -= indegree[i]; /* Undo the increment */ 791 #if 0 792 #if defined(PETSC_USE_DEBUG) /* Check that the expected number of increments occurred */ 793 for (i=0; i<sf->nroots; i++) { 794 if (inoffset[i] + indegree[i] != inoffset[i+1]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect result after PetscSFFetchAndOp"); 795 } 796 #endif 797 #endif 798 ierr = PetscMalloc1(sf->nleaves,&remote);CHKERRQ(ierr); 799 for (i=0; i<sf->nleaves; i++) { 800 remote[i].rank = sf->remote[i].rank; 801 remote[i].index = outoffset[i]; 802 } 803 ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);CHKERRQ(ierr); 804 ierr = PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,NULL,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);CHKERRQ(ierr); 805 if (sf->rankorder) { /* Sort the ranks */ 806 PetscMPIInt rank; 807 PetscInt *inranks,*newoffset,*outranks,*newoutoffset,*tmpoffset,maxdegree; 808 PetscSFNode *newremote; 809 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr); 810 for (i=0,maxdegree=0; i<sf->nroots; i++) maxdegree = PetscMax(maxdegree,indegree[i]); 811 ierr = PetscMalloc5(sf->multi->nroots,&inranks,sf->multi->nroots,&newoffset,sf->nleaves,&outranks,sf->nleaves,&newoutoffset,maxdegree,&tmpoffset);CHKERRQ(ierr); 812 for (i=0; i<sf->nleaves; i++) outranks[i] = rank; 813 ierr = PetscSFReduceBegin(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);CHKERRQ(ierr); 814 ierr = PetscSFReduceEnd(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);CHKERRQ(ierr); 815 /* Sort the incoming ranks at each vertex, build the inverse map */ 816 for (i=0; i<sf->nroots; i++) { 817 PetscInt j; 818 for (j=0; j<indegree[i]; j++) tmpoffset[j] = j; 819 ierr = PetscSortIntWithArray(indegree[i],inranks+inoffset[i],tmpoffset);CHKERRQ(ierr); 820 for (j=0; j<indegree[i]; j++) newoffset[inoffset[i] + tmpoffset[j]] = inoffset[i] + j; 821 } 822 ierr = PetscSFBcastBegin(sf->multi,MPIU_INT,newoffset,newoutoffset);CHKERRQ(ierr); 823 ierr = PetscSFBcastEnd(sf->multi,MPIU_INT,newoffset,newoutoffset);CHKERRQ(ierr); 824 ierr = PetscMalloc1(sf->nleaves,&newremote);CHKERRQ(ierr); 825 for (i=0; i<sf->nleaves; i++) { 826 newremote[i].rank = sf->remote[i].rank; 827 newremote[i].index = newoutoffset[i]; 828 } 829 ierr = PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,NULL,PETSC_COPY_VALUES,newremote,PETSC_OWN_POINTER);CHKERRQ(ierr); 830 ierr = PetscFree5(inranks,newoffset,outranks,newoutoffset,tmpoffset);CHKERRQ(ierr); 831 } 832 ierr = PetscFree3(inoffset,outones,outoffset);CHKERRQ(ierr); 833 } 834 *multi = sf->multi; 835 PetscFunctionReturn(0); 836 } 837 838 #undef __FUNCT__ 839 #define __FUNCT__ "PetscSFCreateEmbeddedSF" 840 /*@C 841 PetscSFCreateEmbeddedSF - removes edges from all but the selected roots, does not remap indices 842 843 Collective 844 845 Input Arguments: 846 + sf - original star forest 847 . nroots - number of roots to select on this process 848 - selected - selected roots on this process 849 850 Output Arguments: 851 . newsf - new star forest 852 853 Level: advanced 854 855 Note: 856 To use the new PetscSF, it may be necessary to know the indices of the leaves that are still participating. This can 857 be done by calling PetscSFGetGraph(). 858 859 .seealso: PetscSFSetGraph(), PetscSFGetGraph() 860 @*/ 861 PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF sf,PetscInt nroots,const PetscInt *selected,PetscSF *newsf) 862 { 863 PetscInt *rootdata, *leafdata, *ilocal; 864 PetscSFNode *iremote; 865 PetscInt leafsize = 0, nleaves = 0, n, i; 866 PetscErrorCode ierr; 867 868 PetscFunctionBegin; 869 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 870 if (nroots) PetscValidPointer(selected,3); 871 PetscValidPointer(newsf,4); 872 if (sf->mine) for (i = 0; i < sf->nleaves; ++i) {leafsize = PetscMax(leafsize, sf->mine[i]+1);} 873 else leafsize = sf->nleaves; 874 ierr = PetscCalloc2(sf->nroots,&rootdata,leafsize,&leafdata);CHKERRQ(ierr); 875 for (i=0; i<nroots; ++i) rootdata[selected[i]] = 1; 876 ierr = PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);CHKERRQ(ierr); 877 ierr = PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);CHKERRQ(ierr); 878 879 for (i = 0; i < leafsize; ++i) nleaves += leafdata[i]; 880 ierr = PetscMalloc1(nleaves,&ilocal);CHKERRQ(ierr); 881 ierr = PetscMalloc1(nleaves,&iremote);CHKERRQ(ierr); 882 for (i = 0, n = 0; i < sf->nleaves; ++i) { 883 const PetscInt lidx = sf->mine ? sf->mine[i] : i; 884 885 if (leafdata[lidx]) { 886 ilocal[n] = lidx; 887 iremote[n].rank = sf->remote[i].rank; 888 iremote[n].index = sf->remote[i].index; 889 ++n; 890 } 891 } 892 if (n != nleaves) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "There is a size mismatch in the SF embedding, %d != %d", n, nleaves); 893 ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,newsf);CHKERRQ(ierr); 894 ierr = PetscSFSetGraph(*newsf,sf->nroots,nleaves,ilocal,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr); 895 ierr = PetscFree2(rootdata,leafdata);CHKERRQ(ierr); 896 PetscFunctionReturn(0); 897 } 898 899 #undef __FUNCT__ 900 #define __FUNCT__ "PetscSFBcastBegin" 901 /*@C 902 PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd() 903 904 Collective on PetscSF 905 906 Input Arguments: 907 + sf - star forest on which to communicate 908 . unit - data type associated with each node 909 - rootdata - buffer to broadcast 910 911 Output Arguments: 912 . leafdata - buffer to update with values from each leaf's respective root 913 914 Level: intermediate 915 916 .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin() 917 @*/ 918 PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata) 919 { 920 PetscErrorCode ierr; 921 922 PetscFunctionBegin; 923 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 924 PetscSFCheckGraphSet(sf,1); 925 ierr = PetscLogEventBegin(PETSCSF_BcastBegin,sf,0,0,0);CHKERRQ(ierr); 926 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 927 ierr = (*sf->ops->BcastBegin)(sf,unit,rootdata,leafdata);CHKERRQ(ierr); 928 ierr = PetscLogEventEnd(PETSCSF_BcastBegin,sf,0,0,0);CHKERRQ(ierr); 929 PetscFunctionReturn(0); 930 } 931 932 #undef __FUNCT__ 933 #define __FUNCT__ "PetscSFBcastEnd" 934 /*@C 935 PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin() 936 937 Collective 938 939 Input Arguments: 940 + sf - star forest 941 . unit - data type 942 - rootdata - buffer to broadcast 943 944 Output Arguments: 945 . leafdata - buffer to update with values from each leaf's respective root 946 947 Level: intermediate 948 949 .seealso: PetscSFSetGraph(), PetscSFReduceEnd() 950 @*/ 951 PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata) 952 { 953 PetscErrorCode ierr; 954 955 PetscFunctionBegin; 956 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 957 PetscSFCheckGraphSet(sf,1); 958 ierr = PetscLogEventBegin(PETSCSF_BcastEnd,sf,0,0,0);CHKERRQ(ierr); 959 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 960 ierr = (*sf->ops->BcastEnd)(sf,unit,rootdata,leafdata);CHKERRQ(ierr); 961 ierr = PetscLogEventEnd(PETSCSF_BcastEnd,sf,0,0,0);CHKERRQ(ierr); 962 PetscFunctionReturn(0); 963 } 964 965 #undef __FUNCT__ 966 #define __FUNCT__ "PetscSFReduceBegin" 967 /*@C 968 PetscSFReduceBegin - begin reduction of leafdata into rootdata, to be completed with call to PetscSFReduceEnd() 969 970 Collective 971 972 Input Arguments: 973 + sf - star forest 974 . unit - data type 975 . leafdata - values to reduce 976 - op - reduction operation 977 978 Output Arguments: 979 . rootdata - result of reduction of values from all leaves of each root 980 981 Level: intermediate 982 983 .seealso: PetscSFBcastBegin() 984 @*/ 985 PetscErrorCode PetscSFReduceBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op) 986 { 987 PetscErrorCode ierr; 988 989 PetscFunctionBegin; 990 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 991 PetscSFCheckGraphSet(sf,1); 992 ierr = PetscLogEventBegin(PETSCSF_ReduceBegin,sf,0,0,0);CHKERRQ(ierr); 993 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 994 ierr = (sf->ops->ReduceBegin)(sf,unit,leafdata,rootdata,op);CHKERRQ(ierr); 995 ierr = PetscLogEventEnd(PETSCSF_ReduceBegin,sf,0,0,0);CHKERRQ(ierr); 996 PetscFunctionReturn(0); 997 } 998 999 #undef __FUNCT__ 1000 #define __FUNCT__ "PetscSFReduceEnd" 1001 /*@C 1002 PetscSFReduceEnd - end a reduction operation started with PetscSFReduceBegin() 1003 1004 Collective 1005 1006 Input Arguments: 1007 + sf - star forest 1008 . unit - data type 1009 . leafdata - values to reduce 1010 - op - reduction operation 1011 1012 Output Arguments: 1013 . rootdata - result of reduction of values from all leaves of each root 1014 1015 Level: intermediate 1016 1017 .seealso: PetscSFSetGraph(), PetscSFBcastEnd() 1018 @*/ 1019 PetscErrorCode PetscSFReduceEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op) 1020 { 1021 PetscErrorCode ierr; 1022 1023 PetscFunctionBegin; 1024 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1025 PetscSFCheckGraphSet(sf,1); 1026 ierr = PetscLogEventBegin(PETSCSF_ReduceEnd,sf,0,0,0);CHKERRQ(ierr); 1027 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1028 ierr = (*sf->ops->ReduceEnd)(sf,unit,leafdata,rootdata,op);CHKERRQ(ierr); 1029 ierr = PetscLogEventEnd(PETSCSF_ReduceEnd,sf,0,0,0);CHKERRQ(ierr); 1030 PetscFunctionReturn(0); 1031 } 1032 1033 #undef __FUNCT__ 1034 #define __FUNCT__ "PetscSFComputeDegreeBegin" 1035 /*@C 1036 PetscSFComputeDegreeBegin - begin computation of degree for each root vertex, to be completed with PetscSFComputeDegreeEnd() 1037 1038 Collective 1039 1040 Input Arguments: 1041 . sf - star forest 1042 1043 Output Arguments: 1044 . degree - degree of each root vertex 1045 1046 Level: advanced 1047 1048 .seealso: PetscSFGatherBegin() 1049 @*/ 1050 PetscErrorCode PetscSFComputeDegreeBegin(PetscSF sf,const PetscInt **degree) 1051 { 1052 PetscErrorCode ierr; 1053 1054 PetscFunctionBegin; 1055 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1056 PetscSFCheckGraphSet(sf,1); 1057 PetscValidPointer(degree,2); 1058 if (!sf->degree) { 1059 PetscInt i; 1060 ierr = PetscMalloc1(sf->nroots,&sf->degree);CHKERRQ(ierr); 1061 ierr = PetscMalloc1(sf->nleaves,&sf->degreetmp);CHKERRQ(ierr); 1062 for (i=0; i<sf->nroots; i++) sf->degree[i] = 0; 1063 for (i=0; i<sf->nleaves; i++) sf->degreetmp[i] = 1; 1064 ierr = PetscSFReduceBegin(sf,MPIU_INT,sf->degreetmp,sf->degree,MPIU_SUM);CHKERRQ(ierr); 1065 } 1066 *degree = NULL; 1067 PetscFunctionReturn(0); 1068 } 1069 1070 #undef __FUNCT__ 1071 #define __FUNCT__ "PetscSFComputeDegreeEnd" 1072 /*@C 1073 PetscSFComputeDegreeEnd - complete computation of degree for each root vertex, started with PetscSFComputeDegreeBegin() 1074 1075 Collective 1076 1077 Input Arguments: 1078 . sf - star forest 1079 1080 Output Arguments: 1081 . degree - degree of each root vertex 1082 1083 Level: developer 1084 1085 .seealso: 1086 @*/ 1087 PetscErrorCode PetscSFComputeDegreeEnd(PetscSF sf,const PetscInt **degree) 1088 { 1089 PetscErrorCode ierr; 1090 1091 PetscFunctionBegin; 1092 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1093 PetscSFCheckGraphSet(sf,1); 1094 if (!sf->degreeknown) { 1095 ierr = PetscSFReduceEnd(sf,MPIU_INT,sf->degreetmp,sf->degree,MPIU_SUM);CHKERRQ(ierr); 1096 ierr = PetscFree(sf->degreetmp);CHKERRQ(ierr); 1097 1098 sf->degreeknown = PETSC_TRUE; 1099 } 1100 *degree = sf->degree; 1101 PetscFunctionReturn(0); 1102 } 1103 1104 #undef __FUNCT__ 1105 #define __FUNCT__ "PetscSFFetchAndOpBegin" 1106 /*@C 1107 PetscSFFetchAndOpBegin - begin operation that fetches values from root and updates atomically by applying operation using my leaf value, to be completed with PetscSFFetchAndOpEnd() 1108 1109 Collective 1110 1111 Input Arguments: 1112 + sf - star forest 1113 . unit - data type 1114 . leafdata - leaf values to use in reduction 1115 - op - operation to use for reduction 1116 1117 Output Arguments: 1118 + rootdata - root values to be updated, input state is seen by first process to perform an update 1119 - leafupdate - state at each leaf's respective root immediately prior to my atomic update 1120 1121 Level: advanced 1122 1123 Note: 1124 The update is only atomic at the granularity provided by the hardware. Different roots referenced by the same process 1125 might be updated in a different order. Furthermore, if a composite type is used for the unit datatype, atomicity is 1126 not guaranteed across the whole vertex. Therefore, this function is mostly only used with primitive types such as 1127 integers. 1128 1129 .seealso: PetscSFComputeDegreeBegin(), PetscSFReduceBegin(), PetscSFSetGraph() 1130 @*/ 1131 PetscErrorCode PetscSFFetchAndOpBegin(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op) 1132 { 1133 PetscErrorCode ierr; 1134 1135 PetscFunctionBegin; 1136 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1137 PetscSFCheckGraphSet(sf,1); 1138 ierr = PetscLogEventBegin(PETSCSF_FetchAndOpBegin,sf,0,0,0);CHKERRQ(ierr); 1139 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1140 ierr = (*sf->ops->FetchAndOpBegin)(sf,unit,rootdata,leafdata,leafupdate,op);CHKERRQ(ierr); 1141 ierr = PetscLogEventEnd(PETSCSF_FetchAndOpBegin,sf,0,0,0);CHKERRQ(ierr); 1142 PetscFunctionReturn(0); 1143 } 1144 1145 #undef __FUNCT__ 1146 #define __FUNCT__ "PetscSFFetchAndOpEnd" 1147 /*@C 1148 PetscSFFetchAndOpEnd - end operation started in matching call to PetscSFFetchAndOpBegin() to fetch values from roots and update atomically by applying operation using my leaf value 1149 1150 Collective 1151 1152 Input Arguments: 1153 + sf - star forest 1154 . unit - data type 1155 . leafdata - leaf values to use in reduction 1156 - op - operation to use for reduction 1157 1158 Output Arguments: 1159 + rootdata - root values to be updated, input state is seen by first process to perform an update 1160 - leafupdate - state at each leaf's respective root immediately prior to my atomic update 1161 1162 Level: advanced 1163 1164 .seealso: PetscSFComputeDegreeEnd(), PetscSFReduceEnd(), PetscSFSetGraph() 1165 @*/ 1166 PetscErrorCode PetscSFFetchAndOpEnd(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op) 1167 { 1168 PetscErrorCode ierr; 1169 1170 PetscFunctionBegin; 1171 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1172 PetscSFCheckGraphSet(sf,1); 1173 ierr = PetscLogEventBegin(PETSCSF_FetchAndOpEnd,sf,0,0,0);CHKERRQ(ierr); 1174 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1175 ierr = (*sf->ops->FetchAndOpEnd)(sf,unit,rootdata,leafdata,leafupdate,op);CHKERRQ(ierr); 1176 ierr = PetscLogEventEnd(PETSCSF_FetchAndOpEnd,sf,0,0,0);CHKERRQ(ierr); 1177 PetscFunctionReturn(0); 1178 } 1179 1180 #undef __FUNCT__ 1181 #define __FUNCT__ "PetscSFGatherBegin" 1182 /*@C 1183 PetscSFGatherBegin - begin pointwise gather of all leaves into multi-roots, to be completed with PetscSFGatherEnd() 1184 1185 Collective 1186 1187 Input Arguments: 1188 + sf - star forest 1189 . unit - data type 1190 - leafdata - leaf data to gather to roots 1191 1192 Output Argument: 1193 . multirootdata - root buffer to gather into, amount of space per root is equal to its degree 1194 1195 Level: intermediate 1196 1197 .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin() 1198 @*/ 1199 PetscErrorCode PetscSFGatherBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata) 1200 { 1201 PetscErrorCode ierr; 1202 PetscSF multi; 1203 1204 PetscFunctionBegin; 1205 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1206 ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr); 1207 ierr = PetscSFReduceBegin(multi,unit,leafdata,multirootdata,MPIU_REPLACE);CHKERRQ(ierr); 1208 PetscFunctionReturn(0); 1209 } 1210 1211 #undef __FUNCT__ 1212 #define __FUNCT__ "PetscSFGatherEnd" 1213 /*@C 1214 PetscSFGatherEnd - ends pointwise gather operation that was started with PetscSFGatherBegin() 1215 1216 Collective 1217 1218 Input Arguments: 1219 + sf - star forest 1220 . unit - data type 1221 - leafdata - leaf data to gather to roots 1222 1223 Output Argument: 1224 . multirootdata - root buffer to gather into, amount of space per root is equal to its degree 1225 1226 Level: intermediate 1227 1228 .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd() 1229 @*/ 1230 PetscErrorCode PetscSFGatherEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata) 1231 { 1232 PetscErrorCode ierr; 1233 PetscSF multi; 1234 1235 PetscFunctionBegin; 1236 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1237 PetscSFCheckGraphSet(sf,1); 1238 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1239 ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr); 1240 ierr = PetscSFReduceEnd(multi,unit,leafdata,multirootdata,MPIU_REPLACE);CHKERRQ(ierr); 1241 PetscFunctionReturn(0); 1242 } 1243 1244 #undef __FUNCT__ 1245 #define __FUNCT__ "PetscSFScatterBegin" 1246 /*@C 1247 PetscSFScatterBegin - begin pointwise scatter operation from multi-roots to leaves, to be completed with PetscSFScatterEnd() 1248 1249 Collective 1250 1251 Input Arguments: 1252 + sf - star forest 1253 . unit - data type 1254 - multirootdata - root buffer to send to each leaf, one unit of data per leaf 1255 1256 Output Argument: 1257 . leafdata - leaf data to be update with personal data from each respective root 1258 1259 Level: intermediate 1260 1261 .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin() 1262 @*/ 1263 PetscErrorCode PetscSFScatterBegin(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata) 1264 { 1265 PetscErrorCode ierr; 1266 PetscSF multi; 1267 1268 PetscFunctionBegin; 1269 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1270 PetscSFCheckGraphSet(sf,1); 1271 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1272 ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr); 1273 ierr = PetscSFBcastBegin(multi,unit,multirootdata,leafdata);CHKERRQ(ierr); 1274 PetscFunctionReturn(0); 1275 } 1276 1277 #undef __FUNCT__ 1278 #define __FUNCT__ "PetscSFScatterEnd" 1279 /*@C 1280 PetscSFScatterEnd - ends pointwise scatter operation that was started with PetscSFScatterBegin() 1281 1282 Collective 1283 1284 Input Arguments: 1285 + sf - star forest 1286 . unit - data type 1287 - multirootdata - root buffer to send to each leaf, one unit of data per leaf 1288 1289 Output Argument: 1290 . leafdata - leaf data to be update with personal data from each respective root 1291 1292 Level: intermediate 1293 1294 .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd() 1295 @*/ 1296 PetscErrorCode PetscSFScatterEnd(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata) 1297 { 1298 PetscErrorCode ierr; 1299 PetscSF multi; 1300 1301 PetscFunctionBegin; 1302 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1303 PetscSFCheckGraphSet(sf,1); 1304 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1305 ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr); 1306 ierr = PetscSFBcastEnd(multi,unit,multirootdata,leafdata);CHKERRQ(ierr); 1307 PetscFunctionReturn(0); 1308 } 1309