1 #include <../src/vec/is/sf/impls/basic/allgatherv/sfallgatherv.h> 2 3 PETSC_INTERN PetscErrorCode PetscSFBcastBegin_Gatherv(PetscSF,MPI_Datatype,PetscMemType,const void*,PetscMemType,void*,MPI_Op); 4 5 /* PetscSFGetGraph is non-collective. An implementation should not have collective calls */ 6 PETSC_INTERN PetscErrorCode PetscSFGetGraph_Allgatherv(PetscSF sf,PetscInt *nroots,PetscInt *nleaves,const PetscInt **ilocal,const PetscSFNode **iremote) 7 { 8 PetscErrorCode ierr; 9 PetscInt i,j,k; 10 const PetscInt *range; 11 PetscMPIInt size; 12 13 PetscFunctionBegin; 14 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)sf),&size);CHKERRMPI(ierr); 15 if (nroots) *nroots = sf->nroots; 16 if (nleaves) *nleaves = sf->nleaves; 17 if (ilocal) *ilocal = NULL; /* Contiguous leaves */ 18 if (iremote) { 19 if (!sf->remote && sf->nleaves) { /* The && sf->nleaves makes sfgatherv able to inherit this routine */ 20 ierr = PetscLayoutGetRanges(sf->map,&range);CHKERRQ(ierr); 21 ierr = PetscMalloc1(sf->nleaves,&sf->remote);CHKERRQ(ierr); 22 sf->remote_alloc = sf->remote; 23 for (i=0; i<size; i++) { 24 for (j=range[i],k=0; j<range[i+1]; j++,k++) { 25 sf->remote[j].rank = i; 26 sf->remote[j].index = k; 27 } 28 } 29 } 30 *iremote = sf->remote; 31 } 32 PetscFunctionReturn(0); 33 } 34 35 PETSC_INTERN PetscErrorCode PetscSFSetUp_Allgatherv(PetscSF sf) 36 { 37 PetscErrorCode ierr; 38 PetscSF_Allgatherv *dat = (PetscSF_Allgatherv*)sf->data; 39 PetscMPIInt size; 40 PetscInt i; 41 const PetscInt *range; 42 43 PetscFunctionBegin; 44 ierr = PetscSFSetUp_Allgather(sf);CHKERRQ(ierr); 45 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)sf),&size);CHKERRMPI(ierr); 46 if (sf->nleaves) { /* This if (sf->nleaves) test makes sfgatherv able to inherit this routine */ 47 ierr = PetscMalloc1(size,&dat->recvcounts);CHKERRQ(ierr); 48 ierr = PetscMalloc1(size,&dat->displs);CHKERRQ(ierr); 49 ierr = PetscLayoutGetRanges(sf->map,&range);CHKERRQ(ierr); 50 51 for (i=0; i<size; i++) { 52 ierr = PetscMPIIntCast(range[i],&dat->displs[i]);CHKERRQ(ierr); 53 ierr = PetscMPIIntCast(range[i+1]-range[i],&dat->recvcounts[i]);CHKERRQ(ierr); 54 } 55 } 56 PetscFunctionReturn(0); 57 } 58 59 PETSC_INTERN PetscErrorCode PetscSFReset_Allgatherv(PetscSF sf) 60 { 61 PetscErrorCode ierr; 62 PetscSF_Allgatherv *dat = (PetscSF_Allgatherv*)sf->data; 63 PetscSFLink link = dat->avail,next; 64 65 PetscFunctionBegin; 66 ierr = PetscFree(dat->iranks);CHKERRQ(ierr); 67 ierr = PetscFree(dat->ioffset);CHKERRQ(ierr); 68 ierr = PetscFree(dat->irootloc);CHKERRQ(ierr); 69 ierr = PetscFree(dat->recvcounts);CHKERRQ(ierr); 70 ierr = PetscFree(dat->displs);CHKERRQ(ierr); 71 if (dat->inuse) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"Outstanding operation has not been completed"); 72 for (; link; link=next) {next = link->next; ierr = PetscSFLinkDestroy(sf,link);CHKERRQ(ierr);} 73 dat->avail = NULL; 74 PetscFunctionReturn(0); 75 } 76 77 PETSC_INTERN PetscErrorCode PetscSFDestroy_Allgatherv(PetscSF sf) 78 { 79 PetscErrorCode ierr; 80 81 PetscFunctionBegin; 82 ierr = PetscSFReset_Allgatherv(sf);CHKERRQ(ierr); 83 ierr = PetscFree(sf->data);CHKERRQ(ierr); 84 PetscFunctionReturn(0); 85 } 86 87 static PetscErrorCode PetscSFBcastBegin_Allgatherv(PetscSF sf,MPI_Datatype unit,PetscMemType rootmtype,const void *rootdata,PetscMemType leafmtype,void *leafdata,MPI_Op op) 88 { 89 PetscErrorCode ierr; 90 PetscSFLink link; 91 PetscMPIInt sendcount; 92 MPI_Comm comm; 93 void *rootbuf = NULL,*leafbuf = NULL; 94 MPI_Request *req; 95 PetscSF_Allgatherv *dat = (PetscSF_Allgatherv*)sf->data; 96 97 PetscFunctionBegin; 98 ierr = PetscSFLinkCreate(sf,unit,rootmtype,rootdata,leafmtype,leafdata,op,PETSCSF_BCAST,&link);CHKERRQ(ierr); 99 ierr = PetscSFLinkPackRootData(sf,link,PETSCSF_REMOTE,rootdata);CHKERRQ(ierr); 100 ierr = PetscSFLinkCopyRootBufferInCaseNotUseGpuAwareMPI(sf,link,PETSC_TRUE/* device2host before sending */);CHKERRQ(ierr); 101 ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr); 102 ierr = PetscMPIIntCast(sf->nroots,&sendcount);CHKERRQ(ierr); 103 ierr = PetscSFLinkGetMPIBuffersAndRequests(sf,link,PETSCSF_ROOT2LEAF,&rootbuf,&leafbuf,&req,NULL);CHKERRQ(ierr); 104 ierr = PetscSFLinkSyncStreamBeforeCallMPI(sf,link,PETSCSF_ROOT2LEAF);CHKERRQ(ierr); 105 ierr = MPIU_Iallgatherv(rootbuf,sendcount,unit,leafbuf,dat->recvcounts,dat->displs,unit,comm,req);CHKERRQ(ierr); 106 PetscFunctionReturn(0); 107 } 108 109 static PetscErrorCode PetscSFReduceBegin_Allgatherv(PetscSF sf,MPI_Datatype unit,PetscMemType leafmtype,const void *leafdata,PetscMemType rootmtype,void *rootdata,MPI_Op op) 110 { 111 PetscErrorCode ierr; 112 PetscSFLink link; 113 PetscSF_Allgatherv *dat = (PetscSF_Allgatherv*)sf->data; 114 PetscInt rstart; 115 PetscMPIInt rank,count,recvcount; 116 MPI_Comm comm; 117 void *rootbuf = NULL,*leafbuf = NULL; 118 MPI_Request *req; 119 120 PetscFunctionBegin; 121 ierr = PetscSFLinkCreate(sf,unit,rootmtype,rootdata,leafmtype,leafdata,op,PETSCSF_REDUCE,&link);CHKERRQ(ierr); 122 if (op == MPI_REPLACE) { 123 /* REPLACE is only meaningful when all processes have the same leafdata to reduce. Therefore copying from local leafdata is fine */ 124 ierr = PetscLayoutGetRange(sf->map,&rstart,NULL);CHKERRQ(ierr); 125 ierr = (*link->Memcpy)(link,rootmtype,rootdata,leafmtype,(const char*)leafdata+(size_t)rstart*link->unitbytes,(size_t)sf->nroots*link->unitbytes);CHKERRQ(ierr); 126 if (PetscMemTypeDevice(leafmtype) && PetscMemTypeHost(rootmtype)) {ierr = (*link->SyncStream)(link);CHKERRQ(ierr);} 127 } else { 128 /* Reduce leafdata, then scatter to rootdata */ 129 ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr); 130 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 131 ierr = PetscSFLinkPackLeafData(sf,link,PETSCSF_REMOTE,leafdata);CHKERRQ(ierr); 132 ierr = PetscSFLinkCopyLeafBufferInCaseNotUseGpuAwareMPI(sf,link,PETSC_TRUE/* device2host before sending */);CHKERRQ(ierr); 133 ierr = PetscSFLinkGetMPIBuffersAndRequests(sf,link,PETSCSF_LEAF2ROOT,&rootbuf,&leafbuf,&req,NULL);CHKERRQ(ierr); 134 ierr = PetscMPIIntCast(dat->rootbuflen[PETSCSF_REMOTE],&recvcount);CHKERRQ(ierr); 135 /* Allocate a separate leaf buffer on rank 0 */ 136 if (!rank && !link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi]) { 137 ierr = PetscSFMalloc(sf,link->leafmtype_mpi,sf->leafbuflen[PETSCSF_REMOTE]*link->unitbytes,(void**)&link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi]);CHKERRQ(ierr); 138 } 139 /* In case we already copied leafdata from device to host (i.e., no use_gpu_aware_mpi), we need to adjust leafbuf on rank 0 */ 140 if (!rank && link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi] == leafbuf) leafbuf = MPI_IN_PLACE; 141 ierr = PetscMPIIntCast(sf->nleaves*link->bs,&count);CHKERRQ(ierr); 142 ierr = PetscSFLinkSyncStreamBeforeCallMPI(sf,link,PETSCSF_LEAF2ROOT);CHKERRQ(ierr); 143 ierr = MPI_Reduce(leafbuf,link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi],count,link->basicunit,op,0,comm);CHKERRMPI(ierr); /* Must do reduce with MPI builltin datatype basicunit */ 144 ierr = MPIU_Iscatterv(link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi],dat->recvcounts,dat->displs,unit,rootbuf,recvcount,unit,0,comm,req);CHKERRMPI(ierr); 145 } 146 PetscFunctionReturn(0); 147 } 148 149 static PetscErrorCode PetscSFBcastToZero_Allgatherv(PetscSF sf,MPI_Datatype unit,PetscMemType rootmtype,const void *rootdata,PetscMemType leafmtype,void *leafdata) 150 { 151 PetscErrorCode ierr; 152 PetscSFLink link; 153 PetscMPIInt rank; 154 155 PetscFunctionBegin; 156 ierr = PetscSFBcastBegin_Gatherv(sf,unit,rootmtype,rootdata,leafmtype,leafdata,MPI_REPLACE);CHKERRQ(ierr); 157 ierr = PetscSFLinkGetInUse(sf,unit,rootdata,leafdata,PETSC_OWN_POINTER,&link);CHKERRQ(ierr); 158 ierr = PetscSFLinkFinishCommunication(sf,link,PETSCSF_ROOT2LEAF);CHKERRQ(ierr); 159 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRMPI(ierr); 160 if (!rank && PetscMemTypeDevice(leafmtype) && !sf->use_gpu_aware_mpi) { 161 ierr = (*link->Memcpy)(link,PETSC_MEMTYPE_DEVICE,leafdata,PETSC_MEMTYPE_HOST,link->leafbuf[PETSC_MEMTYPE_HOST],sf->leafbuflen[PETSCSF_REMOTE]*link->unitbytes);CHKERRQ(ierr); 162 } 163 ierr = PetscSFLinkReclaim(sf,&link);CHKERRQ(ierr); 164 PetscFunctionReturn(0); 165 } 166 167 /* This routine is very tricky (I believe it is rarely used with this kind of graph so just provide a simple but not-optimal implementation). 168 169 Suppose we have three ranks. Rank 0 has a root with value 1. Rank 0,1,2 has a leaf with value 2,3,4 respectively. The leaves are connected 170 to the root on rank 0. Suppose op=MPI_SUM and rank 0,1,2 gets root state in their rank order. By definition of this routine, rank 0 sees 1 171 in root, fetches it into its leafupate, then updates root to 1 + 2 = 3; rank 1 sees 3 in root, fetches it into its leafupate, then updates 172 root to 3 + 3 = 6; rank 2 sees 6 in root, fetches it into its leafupdate, then updates root to 6 + 4 = 10. At the end, leafupdate on rank 173 0,1,2 is 1,3,6 respectively. root is 10. 174 175 We use a simpler implementation. From the same initial state, we copy leafdata to leafupdate 176 rank-0 rank-1 rank-2 177 Root 1 178 Leaf 2 3 4 179 Leafupdate 2 3 4 180 181 Do MPI_Exscan on leafupdate, 182 rank-0 rank-1 rank-2 183 Root 1 184 Leaf 2 3 4 185 Leafupdate 2 2 5 186 187 BcastAndOp from root to leafupdate, 188 rank-0 rank-1 rank-2 189 Root 1 190 Leaf 2 3 4 191 Leafupdate 3 3 6 192 193 Copy root to leafupdate on rank-0 194 rank-0 rank-1 rank-2 195 Root 1 196 Leaf 2 3 4 197 Leafupdate 1 3 6 198 199 Reduce from leaf to root, 200 rank-0 rank-1 rank-2 201 Root 10 202 Leaf 2 3 4 203 Leafupdate 1 3 6 204 */ 205 PETSC_INTERN PetscErrorCode PetscSFFetchAndOpBegin_Allgatherv(PetscSF sf,MPI_Datatype unit,PetscMemType rootmtype,void *rootdata,PetscMemType leafmtype,const void *leafdata,void *leafupdate,MPI_Op op) 206 { 207 PetscErrorCode ierr; 208 PetscSFLink link; 209 MPI_Comm comm; 210 PetscMPIInt count; 211 212 PetscFunctionBegin; 213 ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr); 214 if (PetscMemTypeDevice(rootmtype) || PetscMemTypeDevice(leafmtype)) SETERRQ(comm,PETSC_ERR_SUP,"Do FetchAndOp on device"); 215 /* Copy leafdata to leafupdate */ 216 ierr = PetscSFLinkCreate(sf,unit,rootmtype,rootdata,leafmtype,leafdata,op,PETSCSF_FETCH,&link);CHKERRQ(ierr); 217 ierr = PetscSFLinkPackLeafData(sf,link,PETSCSF_REMOTE,leafdata);CHKERRQ(ierr); /* Sync the device */ 218 ierr = (*link->Memcpy)(link,leafmtype,leafupdate,leafmtype,leafdata,sf->nleaves*link->unitbytes);CHKERRQ(ierr); 219 ierr = PetscSFLinkGetInUse(sf,unit,rootdata,leafdata,PETSC_OWN_POINTER,&link);CHKERRQ(ierr); 220 221 /* Exscan on leafupdate and then BcastAndOp rootdata to leafupdate */ 222 if (op == MPI_REPLACE) { 223 PetscMPIInt size,rank,prev,next; 224 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 225 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 226 prev = rank ? rank-1 : MPI_PROC_NULL; 227 next = (rank < size-1) ? rank+1 : MPI_PROC_NULL; 228 ierr = PetscMPIIntCast(sf->nleaves,&count);CHKERRQ(ierr); 229 ierr = MPI_Sendrecv_replace(leafupdate,count,unit,next,link->tag,prev,link->tag,comm,MPI_STATUSES_IGNORE);CHKERRMPI(ierr); 230 } else { 231 ierr = PetscMPIIntCast(sf->nleaves*link->bs,&count);CHKERRQ(ierr); 232 ierr = MPI_Exscan(MPI_IN_PLACE,leafupdate,count,link->basicunit,op,comm);CHKERRMPI(ierr); 233 } 234 ierr = PetscSFLinkReclaim(sf,&link);CHKERRQ(ierr); 235 ierr = PetscSFBcastBegin(sf,unit,rootdata,leafupdate,op);CHKERRQ(ierr); 236 ierr = PetscSFBcastEnd(sf,unit,rootdata,leafupdate,op);CHKERRQ(ierr); 237 238 /* Bcast roots to rank 0's leafupdate */ 239 ierr = PetscSFBcastToZero_Private(sf,unit,rootdata,leafupdate);CHKERRQ(ierr); /* Using this line makes Allgather SFs able to inherit this routine */ 240 241 /* Reduce leafdata to rootdata */ 242 ierr = PetscSFReduceBegin(sf,unit,leafdata,rootdata,op);CHKERRQ(ierr); 243 PetscFunctionReturn(0); 244 } 245 246 PETSC_INTERN PetscErrorCode PetscSFFetchAndOpEnd_Allgatherv(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op) 247 { 248 PetscErrorCode ierr; 249 250 PetscFunctionBegin; 251 ierr = PetscSFReduceEnd(sf,unit,leafdata,rootdata,op);CHKERRQ(ierr); 252 PetscFunctionReturn(0); 253 } 254 255 /* Get root ranks accessing my leaves */ 256 PETSC_INTERN PetscErrorCode PetscSFGetRootRanks_Allgatherv(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote) 257 { 258 PetscErrorCode ierr; 259 PetscInt i,j,k,size; 260 const PetscInt *range; 261 262 PetscFunctionBegin; 263 /* Lazily construct these large arrays if users really need them for this type of SF. Very likely, they do not */ 264 if (sf->nranks && !sf->ranks) { /* On rank!=0, sf->nranks=0. The sf->nranks test makes this routine also works for sfgatherv */ 265 size = sf->nranks; 266 ierr = PetscLayoutGetRanges(sf->map,&range);CHKERRQ(ierr); 267 ierr = PetscMalloc4(size,&sf->ranks,size+1,&sf->roffset,sf->nleaves,&sf->rmine,sf->nleaves,&sf->rremote);CHKERRQ(ierr); 268 for (i=0; i<size; i++) sf->ranks[i] = i; 269 ierr = PetscArraycpy(sf->roffset,range,size+1);CHKERRQ(ierr); 270 for (i=0; i<sf->nleaves; i++) sf->rmine[i] = i; /*rmine are never NULL even for contiguous leaves */ 271 for (i=0; i<size; i++) { 272 for (j=range[i],k=0; j<range[i+1]; j++,k++) sf->rremote[j] = k; 273 } 274 } 275 276 if (nranks) *nranks = sf->nranks; 277 if (ranks) *ranks = sf->ranks; 278 if (roffset) *roffset = sf->roffset; 279 if (rmine) *rmine = sf->rmine; 280 if (rremote) *rremote = sf->rremote; 281 PetscFunctionReturn(0); 282 } 283 284 /* Get leaf ranks accessing my roots */ 285 PETSC_INTERN PetscErrorCode PetscSFGetLeafRanks_Allgatherv(PetscSF sf,PetscInt *niranks,const PetscMPIInt **iranks,const PetscInt **ioffset,const PetscInt **irootloc) 286 { 287 PetscErrorCode ierr; 288 PetscSF_Allgatherv *dat = (PetscSF_Allgatherv*)sf->data; 289 MPI_Comm comm; 290 PetscMPIInt size,rank; 291 PetscInt i,j; 292 293 PetscFunctionBegin; 294 /* Lazily construct these large arrays if users really need them for this type of SF. Very likely, they do not */ 295 ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr); 296 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 297 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 298 if (niranks) *niranks = size; 299 300 /* PetscSF_Basic has distinguished incoming ranks. Here we do not need that. But we must put self as the first and 301 sort other ranks. See comments in PetscSFSetUp_Basic about MatGetBrowsOfAoCols_MPIAIJ on why. 302 */ 303 if (iranks) { 304 if (!dat->iranks) { 305 ierr = PetscMalloc1(size,&dat->iranks);CHKERRQ(ierr); 306 dat->iranks[0] = rank; 307 for (i=0,j=1; i<size; i++) {if (i == rank) continue; dat->iranks[j++] = i;} 308 } 309 *iranks = dat->iranks; /* dat->iranks was init'ed to NULL by PetscNewLog */ 310 } 311 312 if (ioffset) { 313 if (!dat->ioffset) { 314 ierr = PetscMalloc1(size+1,&dat->ioffset);CHKERRQ(ierr); 315 for (i=0; i<=size; i++) dat->ioffset[i] = i*sf->nroots; 316 } 317 *ioffset = dat->ioffset; 318 } 319 320 if (irootloc) { 321 if (!dat->irootloc) { 322 ierr = PetscMalloc1(sf->nleaves,&dat->irootloc);CHKERRQ(ierr); 323 for (i=0; i<size; i++) { 324 for (j=0; j<sf->nroots; j++) dat->irootloc[i*sf->nroots+j] = j; 325 } 326 } 327 *irootloc = dat->irootloc; 328 } 329 PetscFunctionReturn(0); 330 } 331 332 PETSC_INTERN PetscErrorCode PetscSFCreateLocalSF_Allgatherv(PetscSF sf,PetscSF *out) 333 { 334 PetscInt i,nroots,nleaves,rstart,*ilocal; 335 PetscSFNode *iremote; 336 PetscSF lsf; 337 PetscErrorCode ierr; 338 339 PetscFunctionBegin; 340 nleaves = sf->nleaves ? sf->nroots : 0; /* sf->nleaves can be zero with SFGather(v) */ 341 nroots = nleaves; 342 ierr = PetscMalloc1(nleaves,&ilocal);CHKERRQ(ierr); 343 ierr = PetscMalloc1(nleaves,&iremote);CHKERRQ(ierr); 344 ierr = PetscLayoutGetRange(sf->map,&rstart,NULL);CHKERRQ(ierr); 345 346 for (i=0; i<nleaves; i++) { 347 ilocal[i] = rstart + i; /* lsf does not change leave indices */ 348 iremote[i].rank = 0; /* rank in PETSC_COMM_SELF */ 349 iremote[i].index = i; /* root index */ 350 } 351 352 ierr = PetscSFCreate(PETSC_COMM_SELF,&lsf);CHKERRQ(ierr); 353 ierr = PetscSFSetGraph(lsf,nroots,nleaves,ilocal,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr); 354 ierr = PetscSFSetUp(lsf);CHKERRQ(ierr); 355 *out = lsf; 356 PetscFunctionReturn(0); 357 } 358 359 PETSC_INTERN PetscErrorCode PetscSFCreate_Allgatherv(PetscSF sf) 360 { 361 PetscErrorCode ierr; 362 PetscSF_Allgatherv *dat = (PetscSF_Allgatherv*)sf->data; 363 364 PetscFunctionBegin; 365 sf->ops->BcastEnd = PetscSFBcastEnd_Basic; 366 sf->ops->ReduceEnd = PetscSFReduceEnd_Basic; 367 368 sf->ops->SetUp = PetscSFSetUp_Allgatherv; 369 sf->ops->Reset = PetscSFReset_Allgatherv; 370 sf->ops->Destroy = PetscSFDestroy_Allgatherv; 371 sf->ops->GetRootRanks = PetscSFGetRootRanks_Allgatherv; 372 sf->ops->GetLeafRanks = PetscSFGetLeafRanks_Allgatherv; 373 sf->ops->GetGraph = PetscSFGetGraph_Allgatherv; 374 sf->ops->BcastBegin = PetscSFBcastBegin_Allgatherv; 375 sf->ops->ReduceBegin = PetscSFReduceBegin_Allgatherv; 376 sf->ops->FetchAndOpBegin = PetscSFFetchAndOpBegin_Allgatherv; 377 sf->ops->FetchAndOpEnd = PetscSFFetchAndOpEnd_Allgatherv; 378 sf->ops->CreateLocalSF = PetscSFCreateLocalSF_Allgatherv; 379 sf->ops->BcastToZero = PetscSFBcastToZero_Allgatherv; 380 381 ierr = PetscNewLog(sf,&dat);CHKERRQ(ierr); 382 sf->data = (void*)dat; 383 PetscFunctionReturn(0); 384 } 385