1 #include <petsc-private/sfimpl.h> /*I "petscsf.h" I*/ 2 #include <petscctable.h> 3 4 /* Logging support */ 5 PetscLogEvent PETSCSF_SetGraph, PETSCSF_BcastBegin, PETSCSF_BcastEnd, PETSCSF_ReduceBegin, PETSCSF_ReduceEnd, PETSCSF_FetchAndOpBegin, PETSCSF_FetchAndOpEnd; 6 7 #if defined(PETSC_USE_DEBUG) 8 # define PetscSFCheckGraphSet(sf,arg) do { \ 9 if (PetscUnlikely(!(sf)->graphset)) \ 10 SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call PetscSFSetGraph() on argument %D \"%s\" before %s()",(arg),#sf,PETSC_FUNCTION_NAME); \ 11 } while (0) 12 #else 13 # define PetscSFCheckGraphSet(sf,arg) do {} while (0) 14 #endif 15 16 const char *const PetscSFDuplicateOptions[] = {"CONFONLY","RANKS","GRAPH","PetscSFDuplicateOption","PETSCSF_DUPLICATE_",0}; 17 18 #undef __FUNCT__ 19 #define __FUNCT__ "PetscSFCreate" 20 /*@C 21 PetscSFCreate - create a star forest communication context 22 23 Not Collective 24 25 Input Arguments: 26 . comm - communicator on which the star forest will operate 27 28 Output Arguments: 29 . sf - new star forest context 30 31 Level: intermediate 32 33 .seealso: PetscSFSetGraph(), PetscSFDestroy() 34 @*/ 35 PetscErrorCode PetscSFCreate(MPI_Comm comm,PetscSF *sf) 36 { 37 PetscErrorCode ierr; 38 PetscSF b; 39 40 PetscFunctionBegin; 41 PetscValidPointer(sf,2); 42 ierr = PetscSFInitializePackage();CHKERRQ(ierr); 43 44 ierr = PetscHeaderCreate(b,_p_PetscSF,struct _PetscSFOps,PETSCSF_CLASSID,"PetscSF","Star Forest","PetscSF",comm,PetscSFDestroy,PetscSFView);CHKERRQ(ierr); 45 46 b->nroots = -1; 47 b->nleaves = -1; 48 b->nranks = -1; 49 b->rankorder = PETSC_TRUE; 50 b->ingroup = MPI_GROUP_NULL; 51 b->outgroup = MPI_GROUP_NULL; 52 b->graphset = PETSC_FALSE; 53 54 *sf = b; 55 PetscFunctionReturn(0); 56 } 57 58 #undef __FUNCT__ 59 #define __FUNCT__ "PetscSFReset" 60 /*@C 61 PetscSFReset - Reset a star forest so that different sizes or neighbors can be used 62 63 Collective 64 65 Input Arguments: 66 . sf - star forest 67 68 Level: advanced 69 70 .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFDestroy() 71 @*/ 72 PetscErrorCode PetscSFReset(PetscSF sf) 73 { 74 PetscErrorCode ierr; 75 76 PetscFunctionBegin; 77 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 78 sf->mine = NULL; 79 ierr = PetscFree(sf->mine_alloc);CHKERRQ(ierr); 80 sf->remote = NULL; 81 ierr = PetscFree(sf->remote_alloc);CHKERRQ(ierr); 82 ierr = PetscFree4(sf->ranks,sf->roffset,sf->rmine,sf->rremote);CHKERRQ(ierr); 83 ierr = PetscFree(sf->degree);CHKERRQ(ierr); 84 if (sf->ingroup != MPI_GROUP_NULL) {ierr = MPI_Group_free(&sf->ingroup);CHKERRQ(ierr);} 85 if (sf->outgroup != MPI_GROUP_NULL) {ierr = MPI_Group_free(&sf->outgroup);CHKERRQ(ierr);} 86 ierr = PetscSFDestroy(&sf->multi);CHKERRQ(ierr); 87 sf->graphset = PETSC_FALSE; 88 if (sf->ops->Reset) {ierr = (*sf->ops->Reset)(sf);CHKERRQ(ierr);} 89 sf->setupcalled = PETSC_FALSE; 90 PetscFunctionReturn(0); 91 } 92 93 #undef __FUNCT__ 94 #define __FUNCT__ "PetscSFSetType" 95 /*@C 96 PetscSFSetType - set the PetscSF communication implementation 97 98 Collective on PetscSF 99 100 Input Parameters: 101 + sf - the PetscSF context 102 - type - a known method 103 104 Options Database Key: 105 . -sf_type <type> - Sets the method; use -help for a list 106 of available methods (for instance, window, pt2pt, neighbor) 107 108 Notes: 109 See "include/petscsf.h" for available methods (for instance) 110 + PETSCSFWINDOW - MPI-2/3 one-sided 111 - PETSCSFBASIC - basic implementation using MPI-1 two-sided 112 113 Level: intermediate 114 115 .keywords: PetscSF, set, type 116 117 .seealso: PetscSFType, PetscSFCreate() 118 @*/ 119 PetscErrorCode PetscSFSetType(PetscSF sf,PetscSFType type) 120 { 121 PetscErrorCode ierr,(*r)(PetscSF); 122 PetscBool match; 123 124 PetscFunctionBegin; 125 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 126 PetscValidCharPointer(type,2); 127 128 ierr = PetscObjectTypeCompare((PetscObject)sf,type,&match);CHKERRQ(ierr); 129 if (match) PetscFunctionReturn(0); 130 131 ierr = PetscFunctionListFind(PetscSFList,type,&r);CHKERRQ(ierr); 132 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested PetscSF type %s",type); 133 /* Destroy the previous private PetscSF context */ 134 if (sf->ops->Destroy) { 135 ierr = (*(sf)->ops->Destroy)(sf);CHKERRQ(ierr); 136 } 137 ierr = PetscMemzero(sf->ops,sizeof(*sf->ops));CHKERRQ(ierr); 138 ierr = PetscObjectChangeTypeName((PetscObject)sf,type);CHKERRQ(ierr); 139 ierr = (*r)(sf);CHKERRQ(ierr); 140 PetscFunctionReturn(0); 141 } 142 143 #undef __FUNCT__ 144 #define __FUNCT__ "PetscSFDestroy" 145 /*@C 146 PetscSFDestroy - destroy star forest 147 148 Collective 149 150 Input Arguments: 151 . sf - address of star forest 152 153 Level: intermediate 154 155 .seealso: PetscSFCreate(), PetscSFReset() 156 @*/ 157 PetscErrorCode PetscSFDestroy(PetscSF *sf) 158 { 159 PetscErrorCode ierr; 160 161 PetscFunctionBegin; 162 if (!*sf) PetscFunctionReturn(0); 163 PetscValidHeaderSpecific((*sf),PETSCSF_CLASSID,1); 164 if (--((PetscObject)(*sf))->refct > 0) {*sf = 0; PetscFunctionReturn(0);} 165 ierr = PetscSFReset(*sf);CHKERRQ(ierr); 166 if ((*sf)->ops->Destroy) {ierr = (*(*sf)->ops->Destroy)(*sf);CHKERRQ(ierr);} 167 ierr = PetscHeaderDestroy(sf);CHKERRQ(ierr); 168 PetscFunctionReturn(0); 169 } 170 171 #undef __FUNCT__ 172 #define __FUNCT__ "PetscSFSetUp" 173 /*@ 174 PetscSFSetUp - set up communication structures 175 176 Collective 177 178 Input Arguments: 179 . sf - star forest communication object 180 181 Level: beginner 182 183 .seealso: PetscSFSetFromOptions(), PetscSFSetType() 184 @*/ 185 PetscErrorCode PetscSFSetUp(PetscSF sf) 186 { 187 PetscErrorCode ierr; 188 189 PetscFunctionBegin; 190 if (sf->setupcalled) PetscFunctionReturn(0); 191 if (!((PetscObject)sf)->type_name) {ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);} 192 if (sf->ops->SetUp) {ierr = (*sf->ops->SetUp)(sf);CHKERRQ(ierr);} 193 sf->setupcalled = PETSC_TRUE; 194 PetscFunctionReturn(0); 195 } 196 197 #undef __FUNCT__ 198 #define __FUNCT__ "PetscSFSetFromOptions" 199 /*@C 200 PetscSFSetFromOptions - set PetscSF options using the options database 201 202 Logically Collective 203 204 Input Arguments: 205 . sf - star forest 206 207 Options Database Keys: 208 . -sf_synchronization - synchronization type used by PetscSF 209 210 Level: intermediate 211 212 .keywords: KSP, set, from, options, database 213 214 .seealso: PetscSFWindowSetSyncType() 215 @*/ 216 PetscErrorCode PetscSFSetFromOptions(PetscSF sf) 217 { 218 PetscSFType deft; 219 char type[256]; 220 PetscErrorCode ierr; 221 PetscBool flg; 222 223 PetscFunctionBegin; 224 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 225 ierr = PetscObjectOptionsBegin((PetscObject)sf);CHKERRQ(ierr); 226 deft = ((PetscObject)sf)->type_name ? ((PetscObject)sf)->type_name : PETSCSFBASIC; 227 ierr = PetscOptionsFList("-sf_type","PetscSF implementation type","PetscSFSetType",PetscSFList,deft,type,256,&flg);CHKERRQ(ierr); 228 ierr = PetscSFSetType(sf,flg ? type : deft);CHKERRQ(ierr); 229 ierr = PetscOptionsBool("-sf_rank_order","sort composite points for gathers and scatters in rank order, gathers are non-deterministic otherwise","PetscSFSetRankOrder",sf->rankorder,&sf->rankorder,NULL);CHKERRQ(ierr); 230 if (sf->ops->SetFromOptions) {ierr = (*sf->ops->SetFromOptions)(sf);CHKERRQ(ierr);} 231 ierr = PetscOptionsEnd();CHKERRQ(ierr); 232 PetscFunctionReturn(0); 233 } 234 235 #undef __FUNCT__ 236 #define __FUNCT__ "PetscSFSetRankOrder" 237 /*@C 238 PetscSFSetRankOrder - sort multi-points for gathers and scatters by rank order 239 240 Logically Collective 241 242 Input Arguments: 243 + sf - star forest 244 - flg - PETSC_TRUE to sort, PETSC_FALSE to skip sorting (lower setup cost, but non-deterministic) 245 246 Level: advanced 247 248 .seealso: PetscSFGatherBegin(), PetscSFScatterBegin() 249 @*/ 250 PetscErrorCode PetscSFSetRankOrder(PetscSF sf,PetscBool flg) 251 { 252 253 PetscFunctionBegin; 254 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 255 PetscValidLogicalCollectiveBool(sf,flg,2); 256 if (sf->multi) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"Rank ordering must be set before first call to PetscSFGatherBegin() or PetscSFScatterBegin()"); 257 sf->rankorder = flg; 258 PetscFunctionReturn(0); 259 } 260 261 #undef __FUNCT__ 262 #define __FUNCT__ "PetscSFSetGraph" 263 /*@C 264 PetscSFSetGraph - Set a parallel star forest 265 266 Collective 267 268 Input Arguments: 269 + sf - star forest 270 . nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves) 271 . nleaves - number of leaf vertices on the current process, each of these references a root on any process 272 . ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage 273 . localmode - copy mode for ilocal 274 . iremote - remote locations of root vertices for each leaf on the current process 275 - remotemode - copy mode for iremote 276 277 Level: intermediate 278 279 .seealso: PetscSFCreate(), PetscSFView(), PetscSFGetGraph() 280 @*/ 281 PetscErrorCode PetscSFSetGraph(PetscSF sf,PetscInt nroots,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscSFNode *iremote,PetscCopyMode remotemode) 282 { 283 PetscErrorCode ierr; 284 PetscTable table; 285 PetscTablePosition pos; 286 PetscMPIInt size; 287 PetscInt i,*rcount,*ranks; 288 289 PetscFunctionBegin; 290 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 291 ierr = PetscLogEventBegin(PETSCSF_SetGraph,sf,0,0,0);CHKERRQ(ierr); 292 if (nleaves && ilocal) PetscValidIntPointer(ilocal,4); 293 if (nleaves) PetscValidPointer(iremote,6); 294 if (nroots < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"roots %D, cannot be negative",nroots); 295 if (nleaves < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nleaves %D, cannot be negative",nleaves); 296 ierr = PetscSFReset(sf);CHKERRQ(ierr); 297 sf->nroots = nroots; 298 sf->nleaves = nleaves; 299 if (ilocal) { 300 switch (localmode) { 301 case PETSC_COPY_VALUES: 302 ierr = PetscMalloc1(nleaves,&sf->mine_alloc);CHKERRQ(ierr); 303 sf->mine = sf->mine_alloc; 304 ierr = PetscMemcpy(sf->mine,ilocal,nleaves*sizeof(*sf->mine));CHKERRQ(ierr); 305 sf->minleaf = PETSC_MAX_INT; 306 sf->maxleaf = PETSC_MIN_INT; 307 for (i=0; i<nleaves; i++) { 308 sf->minleaf = PetscMin(sf->minleaf,ilocal[i]); 309 sf->maxleaf = PetscMax(sf->maxleaf,ilocal[i]); 310 } 311 break; 312 case PETSC_OWN_POINTER: 313 sf->mine_alloc = (PetscInt*)ilocal; 314 sf->mine = sf->mine_alloc; 315 break; 316 case PETSC_USE_POINTER: 317 sf->mine = (PetscInt*)ilocal; 318 break; 319 default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown localmode"); 320 } 321 } 322 if (!ilocal || nleaves > 0) { 323 sf->minleaf = 0; 324 sf->maxleaf = nleaves - 1; 325 } 326 switch (remotemode) { 327 case PETSC_COPY_VALUES: 328 ierr = PetscMalloc1(nleaves,&sf->remote_alloc);CHKERRQ(ierr); 329 sf->remote = sf->remote_alloc; 330 ierr = PetscMemcpy(sf->remote,iremote,nleaves*sizeof(*sf->remote));CHKERRQ(ierr); 331 break; 332 case PETSC_OWN_POINTER: 333 sf->remote_alloc = (PetscSFNode*)iremote; 334 sf->remote = sf->remote_alloc; 335 break; 336 case PETSC_USE_POINTER: 337 sf->remote = (PetscSFNode*)iremote; 338 break; 339 default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown remotemode"); 340 } 341 342 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)sf),&size);CHKERRQ(ierr); 343 ierr = PetscTableCreate(10,size,&table);CHKERRQ(ierr); 344 for (i=0; i<nleaves; i++) { 345 /* Log 1-based rank */ 346 ierr = PetscTableAdd(table,iremote[i].rank+1,1,ADD_VALUES);CHKERRQ(ierr); 347 } 348 ierr = PetscTableGetCount(table,&sf->nranks);CHKERRQ(ierr); 349 ierr = PetscMalloc4(sf->nranks,&sf->ranks,sf->nranks+1,&sf->roffset,nleaves,&sf->rmine,nleaves,&sf->rremote);CHKERRQ(ierr); 350 ierr = PetscMalloc2(sf->nranks,&rcount,sf->nranks,&ranks);CHKERRQ(ierr); 351 ierr = PetscTableGetHeadPosition(table,&pos);CHKERRQ(ierr); 352 for (i=0; i<sf->nranks; i++) { 353 ierr = PetscTableGetNext(table,&pos,&ranks[i],&rcount[i]);CHKERRQ(ierr); 354 ranks[i]--; /* Convert back to 0-based */ 355 } 356 ierr = PetscTableDestroy(&table);CHKERRQ(ierr); 357 ierr = PetscSortIntWithArray(sf->nranks,ranks,rcount);CHKERRQ(ierr); 358 sf->roffset[0] = 0; 359 for (i=0; i<sf->nranks; i++) { 360 ierr = PetscMPIIntCast(ranks[i],sf->ranks+i);CHKERRQ(ierr); 361 sf->roffset[i+1] = sf->roffset[i] + rcount[i]; 362 rcount[i] = 0; 363 } 364 for (i=0; i<nleaves; i++) { 365 PetscInt lo,hi,irank; 366 /* Search for index of iremote[i].rank in sf->ranks */ 367 lo = 0; hi = sf->nranks; 368 while (hi - lo > 1) { 369 PetscInt mid = lo + (hi - lo)/2; 370 if (iremote[i].rank < sf->ranks[mid]) hi = mid; 371 else lo = mid; 372 } 373 if (hi - lo == 1 && iremote[i].rank == sf->ranks[lo]) irank = lo; 374 else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Could not find rank %D in array",iremote[i].rank); 375 sf->rmine[sf->roffset[irank] + rcount[irank]] = ilocal ? ilocal[i] : i; 376 sf->rremote[sf->roffset[irank] + rcount[irank]] = iremote[i].index; 377 rcount[irank]++; 378 } 379 ierr = PetscFree2(rcount,ranks);CHKERRQ(ierr); 380 #if !defined(PETSC_USE_64BIT_INDICES) 381 if (nroots == PETSC_DETERMINE) { 382 /* Jed, if you have a better way to do this, put it in */ 383 PetscInt *numRankLeaves, *leafOff, *leafIndices, *numRankRoots, *rootOff, *rootIndices, maxRoots = 0; 384 385 /* All to all to determine number of leaf indices from each (you can do this using Scan and asynch messages) */ 386 ierr = PetscMalloc4(size,&numRankLeaves,size+1,&leafOff,size,&numRankRoots,size+1,&rootOff);CHKERRQ(ierr); 387 ierr = PetscMemzero(numRankLeaves, size * sizeof(PetscInt));CHKERRQ(ierr); 388 for (i = 0; i < nleaves; ++i) ++numRankLeaves[iremote[i].rank]; 389 ierr = MPI_Alltoall(numRankLeaves, 1, MPIU_INT, numRankRoots, 1, MPIU_INT, PetscObjectComm((PetscObject)sf));CHKERRQ(ierr); 390 /* Could set nroots to this maximum */ 391 for (i = 0; i < size; ++i) maxRoots += numRankRoots[i]; 392 393 /* Gather all indices */ 394 ierr = PetscMalloc2(nleaves,&leafIndices,maxRoots,&rootIndices);CHKERRQ(ierr); 395 leafOff[0] = 0; 396 for (i = 0; i < size; ++i) leafOff[i+1] = leafOff[i] + numRankLeaves[i]; 397 for (i = 0; i < nleaves; ++i) leafIndices[leafOff[iremote[i].rank]++] = iremote[i].index; 398 leafOff[0] = 0; 399 for (i = 0; i < size; ++i) leafOff[i+1] = leafOff[i] + numRankLeaves[i]; 400 rootOff[0] = 0; 401 for (i = 0; i < size; ++i) rootOff[i+1] = rootOff[i] + numRankRoots[i]; 402 ierr = MPI_Alltoallv(leafIndices, numRankLeaves, leafOff, MPIU_INT, rootIndices, numRankRoots, rootOff, MPIU_INT, PetscObjectComm((PetscObject)sf));CHKERRQ(ierr); 403 /* Sort and reduce */ 404 ierr = PetscSortRemoveDupsInt(&maxRoots, rootIndices);CHKERRQ(ierr); 405 ierr = PetscFree2(leafIndices,rootIndices);CHKERRQ(ierr); 406 ierr = PetscFree4(numRankLeaves,leafOff,numRankRoots,rootOff);CHKERRQ(ierr); 407 sf->nroots = maxRoots; 408 } 409 #endif 410 411 sf->graphset = PETSC_TRUE; 412 ierr = PetscLogEventEnd(PETSCSF_SetGraph,sf,0,0,0);CHKERRQ(ierr); 413 PetscFunctionReturn(0); 414 } 415 416 #undef __FUNCT__ 417 #define __FUNCT__ "PetscSFCreateInverseSF" 418 /*@C 419 PetscSFCreateInverseSF - given a PetscSF in which all vertices have degree 1, creates the inverse map 420 421 Collective 422 423 Input Arguments: 424 . sf - star forest to invert 425 426 Output Arguments: 427 . isf - inverse of sf 428 429 Level: advanced 430 431 Notes: 432 All roots must have degree 1. 433 434 The local space may be a permutation, but cannot be sparse. 435 436 .seealso: PetscSFSetGraph() 437 @*/ 438 PetscErrorCode PetscSFCreateInverseSF(PetscSF sf,PetscSF *isf) 439 { 440 PetscErrorCode ierr; 441 PetscMPIInt rank; 442 PetscInt i,nroots,nleaves,maxlocal,count,*newilocal; 443 const PetscInt *ilocal; 444 PetscSFNode *roots,*leaves; 445 446 PetscFunctionBegin; 447 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr); 448 ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr); 449 for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,(ilocal ? ilocal[i] : i)+1); 450 ierr = PetscMalloc2(nroots,&roots,nleaves,&leaves);CHKERRQ(ierr); 451 for (i=0; i<nleaves; i++) { 452 leaves[i].rank = rank; 453 leaves[i].index = i; 454 } 455 for (i=0; i <nroots; i++) { 456 roots[i].rank = -1; 457 roots[i].index = -1; 458 } 459 ierr = PetscSFReduceBegin(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);CHKERRQ(ierr); 460 ierr = PetscSFReduceEnd(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);CHKERRQ(ierr); 461 462 /* Check whether our leaves are sparse */ 463 for (i=0,count=0; i<nroots; i++) if (roots[i].rank >= 0) count++; 464 if (count == nroots) newilocal = NULL; 465 else { /* Index for sparse leaves and compact "roots" array (which is to become our leaves). */ 466 ierr = PetscMalloc1(count,&newilocal);CHKERRQ(ierr); 467 for (i=0,count=0; i<nroots; i++) { 468 if (roots[i].rank >= 0) { 469 newilocal[count] = i; 470 roots[count].rank = roots[i].rank; 471 roots[count].index = roots[i].index; 472 count++; 473 } 474 } 475 } 476 477 ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,isf);CHKERRQ(ierr); 478 ierr = PetscSFSetGraph(*isf,maxlocal,count,newilocal,PETSC_OWN_POINTER,roots,PETSC_COPY_VALUES);CHKERRQ(ierr); 479 ierr = PetscFree2(roots,leaves);CHKERRQ(ierr); 480 PetscFunctionReturn(0); 481 } 482 483 #undef __FUNCT__ 484 #define __FUNCT__ "PetscSFDuplicate" 485 /*@ 486 PetscSFDuplicate - duplicate a PetscSF, optionally preserving rank connectivity and graph 487 488 Collective 489 490 Input Arguments: 491 + sf - communication object to duplicate 492 - opt - PETSCSF_DUPLICATE_CONFONLY, PETSCSF_DUPLICATE_RANKS, or PETSCSF_DUPLICATE_GRAPH (see PetscSFDuplicateOption) 493 494 Output Arguments: 495 . newsf - new communication object 496 497 Level: beginner 498 499 .seealso: PetscSFCreate(), PetscSFSetType(), PetscSFSetGraph() 500 @*/ 501 PetscErrorCode PetscSFDuplicate(PetscSF sf,PetscSFDuplicateOption opt,PetscSF *newsf) 502 { 503 PetscErrorCode ierr; 504 505 PetscFunctionBegin; 506 ierr = PetscSFCreate(PetscObjectComm((PetscObject)sf),newsf);CHKERRQ(ierr); 507 ierr = PetscSFSetType(*newsf,((PetscObject)sf)->type_name);CHKERRQ(ierr); 508 if (sf->ops->Duplicate) {ierr = (*sf->ops->Duplicate)(sf,opt,*newsf);CHKERRQ(ierr);} 509 if (opt == PETSCSF_DUPLICATE_GRAPH) { 510 PetscInt nroots,nleaves; 511 const PetscInt *ilocal; 512 const PetscSFNode *iremote; 513 ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,&iremote);CHKERRQ(ierr); 514 ierr = PetscSFSetGraph(*newsf,nroots,nleaves,ilocal,PETSC_COPY_VALUES,iremote,PETSC_COPY_VALUES);CHKERRQ(ierr); 515 } 516 PetscFunctionReturn(0); 517 } 518 519 #undef __FUNCT__ 520 #define __FUNCT__ "PetscSFGetGraph" 521 /*@C 522 PetscSFGetGraph - Get the graph specifying a parallel star forest 523 524 Not Collective 525 526 Input Arguments: 527 . sf - star forest 528 529 Output Arguments: 530 + nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves) 531 . nleaves - number of leaf vertices on the current process, each of these references a root on any process 532 . ilocal - locations of leaves in leafdata buffers 533 - iremote - remote locations of root vertices for each leaf on the current process 534 535 Level: intermediate 536 537 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph() 538 @*/ 539 PetscErrorCode PetscSFGetGraph(PetscSF sf,PetscInt *nroots,PetscInt *nleaves,const PetscInt **ilocal,const PetscSFNode **iremote) 540 { 541 542 PetscFunctionBegin; 543 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 544 /* We are not currently requiring that the graph is set, thus returning nroots=-1 if it has not been set */ 545 /* if (!sf->graphset) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"Graph has not been set, must call PetscSFSetGraph()"); */ 546 if (nroots) *nroots = sf->nroots; 547 if (nleaves) *nleaves = sf->nleaves; 548 if (ilocal) *ilocal = sf->mine; 549 if (iremote) *iremote = sf->remote; 550 PetscFunctionReturn(0); 551 } 552 553 #undef __FUNCT__ 554 #define __FUNCT__ "PetscSFGetLeafRange" 555 /*@C 556 PetscSFGetLeafRange - Get the active leaf ranges 557 558 Not Collective 559 560 Input Arguments: 561 . sf - star forest 562 563 Output Arguments: 564 + minleaf - minimum active leaf on this process 565 - maxleaf - maximum active leaf on this process 566 567 Level: developer 568 569 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph() 570 @*/ 571 PetscErrorCode PetscSFGetLeafRange(PetscSF sf,PetscInt *minleaf,PetscInt *maxleaf) 572 { 573 574 PetscFunctionBegin; 575 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 576 if (minleaf) *minleaf = sf->minleaf; 577 if (maxleaf) *maxleaf = sf->maxleaf; 578 PetscFunctionReturn(0); 579 } 580 581 #undef __FUNCT__ 582 #define __FUNCT__ "PetscSFView" 583 /*@C 584 PetscSFView - view a star forest 585 586 Collective 587 588 Input Arguments: 589 + sf - star forest 590 - viewer - viewer to display graph, for example PETSC_VIEWER_STDOUT_WORLD 591 592 Level: beginner 593 594 .seealso: PetscSFCreate(), PetscSFSetGraph() 595 @*/ 596 PetscErrorCode PetscSFView(PetscSF sf,PetscViewer viewer) 597 { 598 PetscErrorCode ierr; 599 PetscBool iascii; 600 PetscViewerFormat format; 601 602 PetscFunctionBegin; 603 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 604 if (!viewer) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)sf),&viewer);CHKERRQ(ierr);} 605 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 606 PetscCheckSameComm(sf,1,viewer,2); 607 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 608 if (iascii) { 609 PetscMPIInt rank; 610 PetscInt i,j; 611 612 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)sf,viewer);CHKERRQ(ierr); 613 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 614 if (sf->ops->View) {ierr = (*sf->ops->View)(sf,viewer);CHKERRQ(ierr);} 615 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr); 616 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 617 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Number of roots=%D, leaves=%D, remote ranks=%D\n",rank,sf->nroots,sf->nleaves,sf->nranks);CHKERRQ(ierr); 618 for (i=0; i<sf->nleaves; i++) { 619 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D <- (%D,%D)\n",rank,sf->mine ? sf->mine[i] : i,sf->remote[i].rank,sf->remote[i].index);CHKERRQ(ierr); 620 } 621 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 622 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 623 if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 624 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Roots referenced by my leaves, by rank\n",rank);CHKERRQ(ierr); 625 for (i=0; i<sf->nranks; i++) { 626 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d: %D edges\n",rank,sf->ranks[i],sf->roffset[i+1]-sf->roffset[i]);CHKERRQ(ierr); 627 for (j=sf->roffset[i]; j<sf->roffset[i+1]; j++) { 628 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D <- %D\n",rank,sf->rmine[j],sf->rremote[j]);CHKERRQ(ierr); 629 } 630 } 631 } 632 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 633 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 634 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 635 } 636 PetscFunctionReturn(0); 637 } 638 639 #undef __FUNCT__ 640 #define __FUNCT__ "PetscSFGetRanks" 641 /*@C 642 PetscSFGetRanks - Get ranks and number of vertices referenced by leaves on this process 643 644 Not Collective 645 646 Input Arguments: 647 . sf - star forest 648 649 Output Arguments: 650 + nranks - number of ranks referenced by local part 651 . ranks - array of ranks 652 . roffset - offset in rmine/rremote for each rank (length nranks+1) 653 . rmine - concatenated array holding local indices referencing each remote rank 654 - rremote - concatenated array holding remote indices referenced for each remote rank 655 656 Level: developer 657 658 .seealso: PetscSFSetGraph() 659 @*/ 660 PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote) 661 { 662 663 PetscFunctionBegin; 664 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 665 if (nranks) *nranks = sf->nranks; 666 if (ranks) *ranks = sf->ranks; 667 if (roffset) *roffset = sf->roffset; 668 if (rmine) *rmine = sf->rmine; 669 if (rremote) *rremote = sf->rremote; 670 PetscFunctionReturn(0); 671 } 672 673 #undef __FUNCT__ 674 #define __FUNCT__ "PetscSFGetGroups" 675 /*@C 676 PetscSFGetGroups - gets incoming and outgoing process groups 677 678 Collective 679 680 Input Argument: 681 . sf - star forest 682 683 Output Arguments: 684 + incoming - group of origin processes for incoming edges (leaves that reference my roots) 685 - outgoing - group of destination processes for outgoing edges (roots that I reference) 686 687 Level: developer 688 689 .seealso: PetscSFGetWindow(), PetscSFRestoreWindow() 690 @*/ 691 PetscErrorCode PetscSFGetGroups(PetscSF sf,MPI_Group *incoming,MPI_Group *outgoing) 692 { 693 PetscErrorCode ierr; 694 MPI_Group group; 695 696 PetscFunctionBegin; 697 if (sf->ingroup == MPI_GROUP_NULL) { 698 PetscInt i; 699 const PetscInt *indegree; 700 PetscMPIInt rank,*outranks,*inranks; 701 PetscSFNode *remote; 702 PetscSF bgcount; 703 704 /* Compute the number of incoming ranks */ 705 ierr = PetscMalloc1(sf->nranks,&remote);CHKERRQ(ierr); 706 for (i=0; i<sf->nranks; i++) { 707 remote[i].rank = sf->ranks[i]; 708 remote[i].index = 0; 709 } 710 ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,&bgcount);CHKERRQ(ierr); 711 ierr = PetscSFSetGraph(bgcount,1,sf->nranks,NULL,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);CHKERRQ(ierr); 712 ierr = PetscSFComputeDegreeBegin(bgcount,&indegree);CHKERRQ(ierr); 713 ierr = PetscSFComputeDegreeEnd(bgcount,&indegree);CHKERRQ(ierr); 714 715 /* Enumerate the incoming ranks */ 716 ierr = PetscMalloc2(indegree[0],&inranks,sf->nranks,&outranks);CHKERRQ(ierr); 717 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr); 718 for (i=0; i<sf->nranks; i++) outranks[i] = rank; 719 ierr = PetscSFGatherBegin(bgcount,MPI_INT,outranks,inranks);CHKERRQ(ierr); 720 ierr = PetscSFGatherEnd(bgcount,MPI_INT,outranks,inranks);CHKERRQ(ierr); 721 ierr = MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);CHKERRQ(ierr); 722 ierr = MPI_Group_incl(group,indegree[0],inranks,&sf->ingroup);CHKERRQ(ierr); 723 ierr = MPI_Group_free(&group);CHKERRQ(ierr); 724 ierr = PetscFree2(inranks,outranks);CHKERRQ(ierr); 725 ierr = PetscSFDestroy(&bgcount);CHKERRQ(ierr); 726 } 727 *incoming = sf->ingroup; 728 729 if (sf->outgroup == MPI_GROUP_NULL) { 730 ierr = MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);CHKERRQ(ierr); 731 ierr = MPI_Group_incl(group,sf->nranks,sf->ranks,&sf->outgroup);CHKERRQ(ierr); 732 ierr = MPI_Group_free(&group);CHKERRQ(ierr); 733 } 734 *outgoing = sf->outgroup; 735 PetscFunctionReturn(0); 736 } 737 738 #undef __FUNCT__ 739 #define __FUNCT__ "PetscSFGetMultiSF" 740 /*@C 741 PetscSFGetMultiSF - gets the inner SF implemeting gathers and scatters 742 743 Collective 744 745 Input Argument: 746 . sf - star forest that may contain roots with 0 or with more than 1 vertex 747 748 Output Arguments: 749 . multi - star forest with split roots, such that each root has degree exactly 1 750 751 Level: developer 752 753 Notes: 754 755 In most cases, users should use PetscSFGatherBegin() and PetscSFScatterBegin() instead of manipulating multi 756 directly. Since multi satisfies the stronger condition that each entry in the global space has exactly one incoming 757 edge, it is a candidate for future optimization that might involve its removal. 758 759 .seealso: PetscSFSetGraph(), PetscSFGatherBegin(), PetscSFScatterBegin() 760 @*/ 761 PetscErrorCode PetscSFGetMultiSF(PetscSF sf,PetscSF *multi) 762 { 763 PetscErrorCode ierr; 764 765 PetscFunctionBegin; 766 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 767 PetscValidPointer(multi,2); 768 if (sf->nroots < 0) { /* Graph has not been set yet; why do we need this? */ 769 ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);CHKERRQ(ierr); 770 *multi = sf->multi; 771 PetscFunctionReturn(0); 772 } 773 if (!sf->multi) { 774 const PetscInt *indegree; 775 PetscInt i,*inoffset,*outones,*outoffset; 776 PetscSFNode *remote; 777 ierr = PetscSFComputeDegreeBegin(sf,&indegree);CHKERRQ(ierr); 778 ierr = PetscSFComputeDegreeEnd(sf,&indegree);CHKERRQ(ierr); 779 ierr = PetscMalloc3(sf->nroots+1,&inoffset,sf->nroots,&outones,sf->nroots,&outoffset);CHKERRQ(ierr); 780 inoffset[0] = 0; 781 for (i=0; i<sf->nroots; i++) inoffset[i+1] = inoffset[i] + indegree[i]; 782 for (i=0; i<sf->nroots; i++) outones[i] = 1; 783 ierr = PetscSFFetchAndOpBegin(sf,MPIU_INT,inoffset,outones,outoffset,MPIU_SUM);CHKERRQ(ierr); 784 ierr = PetscSFFetchAndOpEnd(sf,MPIU_INT,inoffset,outones,outoffset,MPIU_SUM);CHKERRQ(ierr); 785 for (i=0; i<sf->nroots; i++) inoffset[i] -= indegree[i]; /* Undo the increment */ 786 #if 0 787 #if defined(PETSC_USE_DEBUG) /* Check that the expected number of increments occurred */ 788 for (i=0; i<sf->nroots; i++) { 789 if (inoffset[i] + indegree[i] != inoffset[i+1]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect result after PetscSFFetchAndOp"); 790 } 791 #endif 792 #endif 793 ierr = PetscMalloc1(sf->nleaves,&remote);CHKERRQ(ierr); 794 for (i=0; i<sf->nleaves; i++) { 795 remote[i].rank = sf->remote[i].rank; 796 remote[i].index = outoffset[sf->mine[i]]; 797 } 798 ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);CHKERRQ(ierr); 799 ierr = PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,NULL,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);CHKERRQ(ierr); 800 if (sf->rankorder) { /* Sort the ranks */ 801 PetscMPIInt rank; 802 PetscInt *inranks,*newoffset,*outranks,*newoutoffset,*tmpoffset,maxdegree; 803 PetscSFNode *newremote; 804 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr); 805 for (i=0,maxdegree=0; i<sf->nroots; i++) maxdegree = PetscMax(maxdegree,indegree[i]); 806 ierr = PetscMalloc5(sf->multi->nroots,&inranks,sf->multi->nroots,&newoffset,sf->nroots,&outranks,sf->nroots,&newoutoffset,maxdegree,&tmpoffset);CHKERRQ(ierr); 807 for (i=0; i<sf->nleaves; i++) outranks[i] = rank; 808 ierr = PetscSFReduceBegin(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);CHKERRQ(ierr); 809 ierr = PetscSFReduceEnd(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);CHKERRQ(ierr); 810 /* Sort the incoming ranks at each vertex, build the inverse map */ 811 for (i=0; i<sf->nroots; i++) { 812 PetscInt j; 813 for (j=0; j<indegree[i]; j++) tmpoffset[j] = j; 814 ierr = PetscSortIntWithArray(indegree[i],inranks+inoffset[i],tmpoffset);CHKERRQ(ierr); 815 for (j=0; j<indegree[i]; j++) newoffset[inoffset[i] + tmpoffset[j]] = inoffset[i] + j; 816 } 817 ierr = PetscSFBcastBegin(sf->multi,MPIU_INT,newoffset,newoutoffset);CHKERRQ(ierr); 818 ierr = PetscSFBcastEnd(sf->multi,MPIU_INT,newoffset,newoutoffset);CHKERRQ(ierr); 819 ierr = PetscMalloc1(sf->nleaves,&newremote);CHKERRQ(ierr); 820 for (i=0; i<sf->nleaves; i++) { 821 newremote[i].rank = sf->remote[i].rank; 822 newremote[i].index = newoutoffset[i]; 823 } 824 ierr = PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,NULL,PETSC_COPY_VALUES,newremote,PETSC_OWN_POINTER);CHKERRQ(ierr); 825 ierr = PetscFree5(inranks,newoffset,outranks,newoutoffset,tmpoffset);CHKERRQ(ierr); 826 } 827 ierr = PetscFree3(inoffset,outones,outoffset);CHKERRQ(ierr); 828 } 829 *multi = sf->multi; 830 PetscFunctionReturn(0); 831 } 832 833 #undef __FUNCT__ 834 #define __FUNCT__ "PetscSFCreateEmbeddedSF" 835 /*@C 836 PetscSFCreateEmbeddedSF - removes edges from all but the selected roots, does not remap indices 837 838 Collective 839 840 Input Arguments: 841 + sf - original star forest 842 . nroots - number of roots to select on this process 843 - selected - selected roots on this process 844 845 Output Arguments: 846 . newsf - new star forest 847 848 Level: advanced 849 850 Note: 851 To use the new PetscSF, it may be necessary to know the indices of the leaves that are still participating. This can 852 be done by calling PetscSFGetGraph(). 853 854 .seealso: PetscSFSetGraph(), PetscSFGetGraph() 855 @*/ 856 PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF sf,PetscInt nroots,const PetscInt *selected,PetscSF *newsf) 857 { 858 PetscInt *rootdata, *leafdata, *ilocal; 859 PetscSFNode *iremote; 860 PetscInt leafsize = 0, nleaves = 0, n, i; 861 PetscErrorCode ierr; 862 863 PetscFunctionBegin; 864 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 865 if (nroots) PetscValidPointer(selected,3); 866 PetscValidPointer(newsf,4); 867 if (sf->mine) for (i = 0; i < sf->nleaves; ++i) {leafsize = PetscMax(leafsize, sf->mine[i]+1);} 868 else leafsize = sf->nleaves; 869 ierr = PetscCalloc2(sf->nroots,&rootdata,leafsize,&leafdata);CHKERRQ(ierr); 870 for (i=0; i<nroots; ++i) rootdata[selected[i]] = 1; 871 ierr = PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);CHKERRQ(ierr); 872 ierr = PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);CHKERRQ(ierr); 873 874 for (i = 0; i < leafsize; ++i) nleaves += leafdata[i]; 875 ierr = PetscMalloc1(nleaves,&ilocal);CHKERRQ(ierr); 876 ierr = PetscMalloc1(nleaves,&iremote);CHKERRQ(ierr); 877 for (i = 0, n = 0; i < sf->nleaves; ++i) { 878 const PetscInt lidx = sf->mine ? sf->mine[i] : i; 879 880 if (leafdata[lidx]) { 881 ilocal[n] = lidx; 882 iremote[n].rank = sf->remote[i].rank; 883 iremote[n].index = sf->remote[i].index; 884 ++n; 885 } 886 } 887 if (n != nleaves) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "There is a size mismatch in the SF embedding, %d != %d", n, nleaves); 888 ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,newsf);CHKERRQ(ierr); 889 ierr = PetscSFSetGraph(*newsf,sf->nroots,nleaves,ilocal,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr); 890 ierr = PetscFree2(rootdata,leafdata);CHKERRQ(ierr); 891 PetscFunctionReturn(0); 892 } 893 894 #undef __FUNCT__ 895 #define __FUNCT__ "PetscSFBcastBegin" 896 /*@C 897 PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd() 898 899 Collective on PetscSF 900 901 Input Arguments: 902 + sf - star forest on which to communicate 903 . unit - data type associated with each node 904 - rootdata - buffer to broadcast 905 906 Output Arguments: 907 . leafdata - buffer to update with values from each leaf's respective root 908 909 Level: intermediate 910 911 .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin() 912 @*/ 913 PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata) 914 { 915 PetscErrorCode ierr; 916 917 PetscFunctionBegin; 918 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 919 PetscSFCheckGraphSet(sf,1); 920 ierr = PetscLogEventBegin(PETSCSF_BcastBegin,sf,0,0,0);CHKERRQ(ierr); 921 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 922 ierr = (*sf->ops->BcastBegin)(sf,unit,rootdata,leafdata);CHKERRQ(ierr); 923 ierr = PetscLogEventEnd(PETSCSF_BcastBegin,sf,0,0,0);CHKERRQ(ierr); 924 PetscFunctionReturn(0); 925 } 926 927 #undef __FUNCT__ 928 #define __FUNCT__ "PetscSFBcastEnd" 929 /*@C 930 PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin() 931 932 Collective 933 934 Input Arguments: 935 + sf - star forest 936 . unit - data type 937 - rootdata - buffer to broadcast 938 939 Output Arguments: 940 . leafdata - buffer to update with values from each leaf's respective root 941 942 Level: intermediate 943 944 .seealso: PetscSFSetGraph(), PetscSFReduceEnd() 945 @*/ 946 PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata) 947 { 948 PetscErrorCode ierr; 949 950 PetscFunctionBegin; 951 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 952 PetscSFCheckGraphSet(sf,1); 953 ierr = PetscLogEventBegin(PETSCSF_BcastEnd,sf,0,0,0);CHKERRQ(ierr); 954 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 955 ierr = (*sf->ops->BcastEnd)(sf,unit,rootdata,leafdata);CHKERRQ(ierr); 956 ierr = PetscLogEventEnd(PETSCSF_BcastEnd,sf,0,0,0);CHKERRQ(ierr); 957 PetscFunctionReturn(0); 958 } 959 960 #undef __FUNCT__ 961 #define __FUNCT__ "PetscSFReduceBegin" 962 /*@C 963 PetscSFReduceBegin - begin reduction of leafdata into rootdata, to be completed with call to PetscSFReduceEnd() 964 965 Collective 966 967 Input Arguments: 968 + sf - star forest 969 . unit - data type 970 . leafdata - values to reduce 971 - op - reduction operation 972 973 Output Arguments: 974 . rootdata - result of reduction of values from all leaves of each root 975 976 Level: intermediate 977 978 .seealso: PetscSFBcastBegin() 979 @*/ 980 PetscErrorCode PetscSFReduceBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op) 981 { 982 PetscErrorCode ierr; 983 984 PetscFunctionBegin; 985 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 986 PetscSFCheckGraphSet(sf,1); 987 ierr = PetscLogEventBegin(PETSCSF_ReduceBegin,sf,0,0,0);CHKERRQ(ierr); 988 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 989 ierr = (sf->ops->ReduceBegin)(sf,unit,leafdata,rootdata,op);CHKERRQ(ierr); 990 ierr = PetscLogEventEnd(PETSCSF_ReduceBegin,sf,0,0,0);CHKERRQ(ierr); 991 PetscFunctionReturn(0); 992 } 993 994 #undef __FUNCT__ 995 #define __FUNCT__ "PetscSFReduceEnd" 996 /*@C 997 PetscSFReduceEnd - end a reduction operation started with PetscSFReduceBegin() 998 999 Collective 1000 1001 Input Arguments: 1002 + sf - star forest 1003 . unit - data type 1004 . leafdata - values to reduce 1005 - op - reduction operation 1006 1007 Output Arguments: 1008 . rootdata - result of reduction of values from all leaves of each root 1009 1010 Level: intermediate 1011 1012 .seealso: PetscSFSetGraph(), PetscSFBcastEnd() 1013 @*/ 1014 PetscErrorCode PetscSFReduceEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op) 1015 { 1016 PetscErrorCode ierr; 1017 1018 PetscFunctionBegin; 1019 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1020 PetscSFCheckGraphSet(sf,1); 1021 ierr = PetscLogEventBegin(PETSCSF_ReduceEnd,sf,0,0,0);CHKERRQ(ierr); 1022 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1023 ierr = (*sf->ops->ReduceEnd)(sf,unit,leafdata,rootdata,op);CHKERRQ(ierr); 1024 ierr = PetscLogEventEnd(PETSCSF_ReduceEnd,sf,0,0,0);CHKERRQ(ierr); 1025 PetscFunctionReturn(0); 1026 } 1027 1028 #undef __FUNCT__ 1029 #define __FUNCT__ "PetscSFComputeDegreeBegin" 1030 /*@C 1031 PetscSFComputeDegreeBegin - begin computation of degree for each root vertex, to be completed with PetscSFComputeDegreeEnd() 1032 1033 Collective 1034 1035 Input Arguments: 1036 . sf - star forest 1037 1038 Output Arguments: 1039 . degree - degree of each root vertex 1040 1041 Level: advanced 1042 1043 .seealso: PetscSFGatherBegin() 1044 @*/ 1045 PetscErrorCode PetscSFComputeDegreeBegin(PetscSF sf,const PetscInt **degree) 1046 { 1047 PetscErrorCode ierr; 1048 1049 PetscFunctionBegin; 1050 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1051 PetscSFCheckGraphSet(sf,1); 1052 PetscValidPointer(degree,2); 1053 if (!sf->degree) { 1054 PetscInt i; 1055 ierr = PetscMalloc1(sf->nroots,&sf->degree);CHKERRQ(ierr); 1056 ierr = PetscMalloc1(sf->nroots,&sf->degreetmp);CHKERRQ(ierr); 1057 for (i=0; i<sf->nroots; i++) sf->degree[i] = 0; 1058 for (i=0; i<sf->nroots; i++) sf->degreetmp[i] = 1; 1059 ierr = PetscSFReduceBegin(sf,MPIU_INT,sf->degreetmp,sf->degree,MPIU_SUM);CHKERRQ(ierr); 1060 } 1061 *degree = NULL; 1062 PetscFunctionReturn(0); 1063 } 1064 1065 #undef __FUNCT__ 1066 #define __FUNCT__ "PetscSFComputeDegreeEnd" 1067 /*@C 1068 PetscSFComputeDegreeEnd - complete computation of degree for each root vertex, started with PetscSFComputeDegreeBegin() 1069 1070 Collective 1071 1072 Input Arguments: 1073 . sf - star forest 1074 1075 Output Arguments: 1076 . degree - degree of each root vertex 1077 1078 Level: developer 1079 1080 .seealso: 1081 @*/ 1082 PetscErrorCode PetscSFComputeDegreeEnd(PetscSF sf,const PetscInt **degree) 1083 { 1084 PetscErrorCode ierr; 1085 1086 PetscFunctionBegin; 1087 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1088 PetscSFCheckGraphSet(sf,1); 1089 if (!sf->degreeknown) { 1090 ierr = PetscSFReduceEnd(sf,MPIU_INT,sf->degreetmp,sf->degree,MPIU_SUM);CHKERRQ(ierr); 1091 ierr = PetscFree(sf->degreetmp);CHKERRQ(ierr); 1092 1093 sf->degreeknown = PETSC_TRUE; 1094 } 1095 *degree = sf->degree; 1096 PetscFunctionReturn(0); 1097 } 1098 1099 #undef __FUNCT__ 1100 #define __FUNCT__ "PetscSFFetchAndOpBegin" 1101 /*@C 1102 PetscSFFetchAndOpBegin - begin operation that fetches values from root and updates atomically by applying operation using my leaf value, to be completed with PetscSFFetchAndOpEnd() 1103 1104 Collective 1105 1106 Input Arguments: 1107 + sf - star forest 1108 . unit - data type 1109 . leafdata - leaf values to use in reduction 1110 - op - operation to use for reduction 1111 1112 Output Arguments: 1113 + rootdata - root values to be updated, input state is seen by first process to perform an update 1114 - leafupdate - state at each leaf's respective root immediately prior to my atomic update 1115 1116 Level: advanced 1117 1118 Note: 1119 The update is only atomic at the granularity provided by the hardware. Different roots referenced by the same process 1120 might be updated in a different order. Furthermore, if a composite type is used for the unit datatype, atomicity is 1121 not guaranteed across the whole vertex. Therefore, this function is mostly only used with primitive types such as 1122 integers. 1123 1124 .seealso: PetscSFComputeDegreeBegin(), PetscSFReduceBegin(), PetscSFSetGraph() 1125 @*/ 1126 PetscErrorCode PetscSFFetchAndOpBegin(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op) 1127 { 1128 PetscErrorCode ierr; 1129 1130 PetscFunctionBegin; 1131 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1132 PetscSFCheckGraphSet(sf,1); 1133 ierr = PetscLogEventBegin(PETSCSF_FetchAndOpBegin,sf,0,0,0);CHKERRQ(ierr); 1134 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1135 ierr = (*sf->ops->FetchAndOpBegin)(sf,unit,rootdata,leafdata,leafupdate,op);CHKERRQ(ierr); 1136 ierr = PetscLogEventEnd(PETSCSF_FetchAndOpBegin,sf,0,0,0);CHKERRQ(ierr); 1137 PetscFunctionReturn(0); 1138 } 1139 1140 #undef __FUNCT__ 1141 #define __FUNCT__ "PetscSFFetchAndOpEnd" 1142 /*@C 1143 PetscSFFetchAndOpEnd - end operation started in matching call to PetscSFFetchAndOpBegin() to fetch values from roots and update atomically by applying operation using my leaf value 1144 1145 Collective 1146 1147 Input Arguments: 1148 + sf - star forest 1149 . unit - data type 1150 . leafdata - leaf values to use in reduction 1151 - op - operation to use for reduction 1152 1153 Output Arguments: 1154 + rootdata - root values to be updated, input state is seen by first process to perform an update 1155 - leafupdate - state at each leaf's respective root immediately prior to my atomic update 1156 1157 Level: advanced 1158 1159 .seealso: PetscSFComputeDegreeEnd(), PetscSFReduceEnd(), PetscSFSetGraph() 1160 @*/ 1161 PetscErrorCode PetscSFFetchAndOpEnd(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op) 1162 { 1163 PetscErrorCode ierr; 1164 1165 PetscFunctionBegin; 1166 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1167 PetscSFCheckGraphSet(sf,1); 1168 ierr = PetscLogEventBegin(PETSCSF_FetchAndOpEnd,sf,0,0,0);CHKERRQ(ierr); 1169 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1170 ierr = (*sf->ops->FetchAndOpEnd)(sf,unit,rootdata,leafdata,leafupdate,op);CHKERRQ(ierr); 1171 ierr = PetscLogEventEnd(PETSCSF_FetchAndOpEnd,sf,0,0,0);CHKERRQ(ierr); 1172 PetscFunctionReturn(0); 1173 } 1174 1175 #undef __FUNCT__ 1176 #define __FUNCT__ "PetscSFGatherBegin" 1177 /*@C 1178 PetscSFGatherBegin - begin pointwise gather of all leaves into multi-roots, to be completed with PetscSFGatherEnd() 1179 1180 Collective 1181 1182 Input Arguments: 1183 + sf - star forest 1184 . unit - data type 1185 - leafdata - leaf data to gather to roots 1186 1187 Output Argument: 1188 . multirootdata - root buffer to gather into, amount of space per root is equal to its degree 1189 1190 Level: intermediate 1191 1192 .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin() 1193 @*/ 1194 PetscErrorCode PetscSFGatherBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata) 1195 { 1196 PetscErrorCode ierr; 1197 PetscSF multi; 1198 1199 PetscFunctionBegin; 1200 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1201 ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr); 1202 ierr = PetscSFReduceBegin(multi,unit,leafdata,multirootdata,MPIU_REPLACE);CHKERRQ(ierr); 1203 PetscFunctionReturn(0); 1204 } 1205 1206 #undef __FUNCT__ 1207 #define __FUNCT__ "PetscSFGatherEnd" 1208 /*@C 1209 PetscSFGatherEnd - ends pointwise gather operation that was started with PetscSFGatherBegin() 1210 1211 Collective 1212 1213 Input Arguments: 1214 + sf - star forest 1215 . unit - data type 1216 - leafdata - leaf data to gather to roots 1217 1218 Output Argument: 1219 . multirootdata - root buffer to gather into, amount of space per root is equal to its degree 1220 1221 Level: intermediate 1222 1223 .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd() 1224 @*/ 1225 PetscErrorCode PetscSFGatherEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata) 1226 { 1227 PetscErrorCode ierr; 1228 PetscSF multi; 1229 1230 PetscFunctionBegin; 1231 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1232 PetscSFCheckGraphSet(sf,1); 1233 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1234 ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr); 1235 ierr = PetscSFReduceEnd(multi,unit,leafdata,multirootdata,MPIU_REPLACE);CHKERRQ(ierr); 1236 PetscFunctionReturn(0); 1237 } 1238 1239 #undef __FUNCT__ 1240 #define __FUNCT__ "PetscSFScatterBegin" 1241 /*@C 1242 PetscSFScatterBegin - begin pointwise scatter operation from multi-roots to leaves, to be completed with PetscSFScatterEnd() 1243 1244 Collective 1245 1246 Input Arguments: 1247 + sf - star forest 1248 . unit - data type 1249 - multirootdata - root buffer to send to each leaf, one unit of data per leaf 1250 1251 Output Argument: 1252 . leafdata - leaf data to be update with personal data from each respective root 1253 1254 Level: intermediate 1255 1256 .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin() 1257 @*/ 1258 PetscErrorCode PetscSFScatterBegin(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata) 1259 { 1260 PetscErrorCode ierr; 1261 PetscSF multi; 1262 1263 PetscFunctionBegin; 1264 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1265 PetscSFCheckGraphSet(sf,1); 1266 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1267 ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr); 1268 ierr = PetscSFBcastBegin(multi,unit,multirootdata,leafdata);CHKERRQ(ierr); 1269 PetscFunctionReturn(0); 1270 } 1271 1272 #undef __FUNCT__ 1273 #define __FUNCT__ "PetscSFScatterEnd" 1274 /*@C 1275 PetscSFScatterEnd - ends pointwise scatter operation that was started with PetscSFScatterBegin() 1276 1277 Collective 1278 1279 Input Arguments: 1280 + sf - star forest 1281 . unit - data type 1282 - multirootdata - root buffer to send to each leaf, one unit of data per leaf 1283 1284 Output Argument: 1285 . leafdata - leaf data to be update with personal data from each respective root 1286 1287 Level: intermediate 1288 1289 .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd() 1290 @*/ 1291 PetscErrorCode PetscSFScatterEnd(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata) 1292 { 1293 PetscErrorCode ierr; 1294 PetscSF multi; 1295 1296 PetscFunctionBegin; 1297 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 1298 PetscSFCheckGraphSet(sf,1); 1299 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 1300 ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr); 1301 ierr = PetscSFBcastEnd(multi,unit,multirootdata,leafdata);CHKERRQ(ierr); 1302 PetscFunctionReturn(0); 1303 } 1304