xref: /petsc/src/vec/is/sf/impls/basic/allgather/sfallgather.c (revision 6f3d89d0b697a3d79aa40878724d2c791c95edac)
1 #include <../src/vec/is/sf/impls/basic/allgatherv/sfallgatherv.h>
2 
3 /* Reuse the type. The difference is some fields (i.e., displs, recvcounts) are not used in Allgather on rank != 0, which is not a big deal */
4 typedef PetscSF_Allgatherv PetscSF_Allgather;
5 
6 PETSC_INTERN PetscErrorCode PetscSFBcastAndOpBegin_Gather(PetscSF,MPI_Datatype,PetscMemType,const void*,PetscMemType,void*,MPI_Op);
7 
8 PetscErrorCode PetscSFSetUp_Allgather(PetscSF sf)
9 {
10   PetscInt              i;
11   PetscSF_Allgather     *dat = (PetscSF_Allgather*)sf->data;
12 
13   PetscFunctionBegin;
14   for (i=PETSCSF_LOCAL; i<=PETSCSF_REMOTE; i++) {
15     sf->leafbuflen[i]  = 0;
16     sf->leafstart[i]   = 0;
17     sf->leafcontig[i]  = PETSC_TRUE;
18     sf->leafdups[i]    = PETSC_FALSE;
19     dat->rootbuflen[i] = 0;
20     dat->rootstart[i]  = 0;
21     dat->rootcontig[i] = PETSC_TRUE;
22     dat->rootdups[i]   = PETSC_FALSE;
23   }
24 
25   sf->leafbuflen[PETSCSF_REMOTE]  = sf->nleaves;
26   dat->rootbuflen[PETSCSF_REMOTE] = sf->nroots;
27   sf->persistent = PETSC_FALSE;
28   sf->nleafreqs  = 0; /* MPI collectives only need one request. We treat it as a root request. */
29   dat->nrootreqs = 1;
30   PetscFunctionReturn(0);
31 }
32 
33 static PetscErrorCode PetscSFBcastAndOpBegin_Allgather(PetscSF sf,MPI_Datatype unit,PetscMemType rootmtype,const void *rootdata,PetscMemType leafmtype,void *leafdata,MPI_Op op)
34 {
35   PetscErrorCode        ierr;
36   PetscSFLink           link;
37   PetscMPIInt           sendcount;
38   MPI_Comm              comm;
39   void                  *rootbuf = NULL,*leafbuf = NULL; /* buffer seen by MPI */
40   MPI_Request           *req;
41 
42   PetscFunctionBegin;
43   ierr = PetscSFLinkCreate(sf,unit,rootmtype,rootdata,leafmtype,leafdata,op,PETSCSF_BCAST,&link);CHKERRQ(ierr);
44   ierr = PetscSFLinkPackRootData(sf,link,PETSCSF_REMOTE,rootdata);CHKERRQ(ierr);
45   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
46   ierr = PetscMPIIntCast(sf->nroots,&sendcount);CHKERRQ(ierr);
47   ierr = PetscSFLinkGetMPIBuffersAndRequests(sf,link,PETSCSF_ROOT2LEAF,&rootbuf,&leafbuf,&req,NULL);CHKERRQ(ierr);
48   ierr = MPIU_Iallgather(rootbuf,sendcount,unit,leafbuf,sendcount,unit,comm,req);CHKERRQ(ierr);
49   PetscFunctionReturn(0);
50 }
51 
52 static PetscErrorCode PetscSFReduceBegin_Allgather(PetscSF sf,MPI_Datatype unit,PetscMemType leafmtype,const void *leafdata,PetscMemType rootmtype,void *rootdata,MPI_Op op)
53 {
54   PetscErrorCode        ierr;
55   PetscSFLink           link;
56   PetscInt              rstart;
57   MPI_Comm              comm;
58   PetscMPIInt           rank,count,recvcount;
59   void                  *rootbuf = NULL,*leafbuf = NULL; /* buffer seen by MPI */
60   PetscSF_Allgather     *dat = (PetscSF_Allgather*)sf->data;
61   MPI_Request           *req;
62 
63   PetscFunctionBegin;
64   ierr = PetscSFLinkCreate(sf,unit,rootmtype,rootdata,leafmtype,leafdata,op,PETSCSF_REDUCE,&link);CHKERRQ(ierr);
65   if (op == MPIU_REPLACE) {
66     /* REPLACE is only meaningful when all processes have the same leafdata to reduce. Therefore copy from local leafdata is fine */
67     ierr = PetscLayoutGetRange(sf->map,&rstart,NULL);CHKERRQ(ierr);
68     ierr = (*link->Memcpy)(link,rootmtype,rootdata,leafmtype,(const char*)leafdata+(size_t)rstart*link->unitbytes,(size_t)sf->nroots*link->unitbytes);CHKERRQ(ierr);
69 #if defined(PETSC_HAVE_DEVICE)
70     if (PetscMemTypeHost(rootmtype)  && PetscMemTypeDevice(leafmtype)) {ierr = (*link->d_SyncStream)(link);CHKERRQ(ierr);}
71 #endif
72   } else {
73     ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
74     ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
75     ierr = PetscSFLinkPackLeafData(sf,link,PETSCSF_REMOTE,leafdata);CHKERRQ(ierr);
76     ierr = PetscSFLinkGetMPIBuffersAndRequests(sf,link,PETSCSF_LEAF2ROOT,&rootbuf,&leafbuf,&req,NULL);CHKERRQ(ierr);
77     ierr = PetscMPIIntCast(dat->rootbuflen[PETSCSF_REMOTE],&recvcount);CHKERRQ(ierr);
78     if (!rank && !link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi]) {
79       ierr = PetscSFMalloc(sf,link->leafmtype_mpi,sf->leafbuflen[PETSCSF_REMOTE]*link->unitbytes,(void**)&link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi]);CHKERRQ(ierr);
80     }
81     if (!rank && link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi] == leafbuf) leafbuf = MPI_IN_PLACE;
82     ierr = PetscMPIIntCast(sf->nleaves*link->bs,&count);CHKERRQ(ierr);
83     ierr = MPI_Reduce(leafbuf,link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi],count,link->basicunit,op,0,comm);CHKERRMPI(ierr);
84     ierr = MPIU_Iscatter(link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi],recvcount,unit,rootbuf,recvcount,unit,0/*rank 0*/,comm,req);CHKERRQ(ierr);
85   }
86   PetscFunctionReturn(0);
87 }
88 
89 static PetscErrorCode PetscSFBcastToZero_Allgather(PetscSF sf,MPI_Datatype unit,PetscMemType rootmtype,const void *rootdata,PetscMemType leafmtype,void *leafdata)
90 {
91   PetscErrorCode        ierr;
92   PetscSFLink           link;
93   PetscMPIInt           rank;
94 
95   PetscFunctionBegin;
96   ierr = PetscSFBcastAndOpBegin_Gather(sf,unit,rootmtype,rootdata,leafmtype,leafdata,MPIU_REPLACE);CHKERRQ(ierr);
97   ierr = PetscSFLinkGetInUse(sf,unit,rootdata,leafdata,PETSC_OWN_POINTER,&link);CHKERRQ(ierr);
98   ierr = PetscSFLinkMPIWaitall(sf,link,PETSCSF_ROOT2LEAF);CHKERRQ(ierr);
99   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRMPI(ierr);
100   if (!rank && leafmtype == PETSC_MEMTYPE_DEVICE && !sf->use_gpu_aware_mpi) {
101     ierr = (*link->Memcpy)(link,PETSC_MEMTYPE_DEVICE,leafdata,PETSC_MEMTYPE_HOST,link->leafbuf[PETSCSF_REMOTE][PETSC_MEMTYPE_HOST],sf->leafbuflen[PETSCSF_REMOTE]*link->unitbytes);CHKERRQ(ierr);
102   }
103   ierr = PetscSFLinkReclaim(sf,&link);CHKERRQ(ierr);
104   PetscFunctionReturn(0);
105 }
106 
107 PETSC_INTERN PetscErrorCode PetscSFCreate_Allgather(PetscSF sf)
108 {
109   PetscErrorCode    ierr;
110   PetscSF_Allgather *dat = (PetscSF_Allgather*)sf->data;
111 
112   PetscFunctionBegin;
113   sf->ops->BcastAndOpEnd   = PetscSFBcastAndOpEnd_Basic;
114   sf->ops->ReduceEnd       = PetscSFReduceEnd_Basic;
115 
116   /* Inherit from Allgatherv */
117   sf->ops->Reset           = PetscSFReset_Allgatherv;
118   sf->ops->Destroy         = PetscSFDestroy_Allgatherv;
119   sf->ops->FetchAndOpBegin = PetscSFFetchAndOpBegin_Allgatherv;
120   sf->ops->FetchAndOpEnd   = PetscSFFetchAndOpEnd_Allgatherv;
121   sf->ops->GetRootRanks    = PetscSFGetRootRanks_Allgatherv;
122   sf->ops->CreateLocalSF   = PetscSFCreateLocalSF_Allgatherv;
123   sf->ops->GetGraph        = PetscSFGetGraph_Allgatherv;
124   sf->ops->GetLeafRanks    = PetscSFGetLeafRanks_Allgatherv;
125 
126   /* Allgather stuff */
127   sf->ops->SetUp           = PetscSFSetUp_Allgather;
128   sf->ops->BcastAndOpBegin = PetscSFBcastAndOpBegin_Allgather;
129   sf->ops->ReduceBegin     = PetscSFReduceBegin_Allgather;
130   sf->ops->BcastToZero     = PetscSFBcastToZero_Allgather;
131 
132   ierr = PetscNewLog(sf,&dat);CHKERRQ(ierr);
133   sf->data = (void*)dat;
134   PetscFunctionReturn(0);
135 }
136