xref: /petsc/src/vec/is/sf/impls/basic/allgather/sfallgather.c (revision efbe7e8a80d07327753dbe0b33efee01e046af3f)
1 #include <../src/vec/is/sf/impls/basic/allgatherv/sfallgatherv.h>
2 
3 /* Reuse the type. The difference is some fields (i.e., displs, recvcounts) are not used in Allgather on rank != 0, which is not a big deal */
4 typedef PetscSF_Allgatherv PetscSF_Allgather;
5 
6 PETSC_INTERN PetscErrorCode PetscSFBcastAndOpBegin_Gather(PetscSF,MPI_Datatype,PetscMemType,const void*,PetscMemType,void*,MPI_Op);
7 
8 PetscErrorCode PetscSFSetUp_Allgather(PetscSF sf)
9 {
10   PetscInt              i;
11   PetscSF_Allgather     *dat = (PetscSF_Allgather*)sf->data;
12 
13   PetscFunctionBegin;
14   for (i=PETSCSF_LOCAL; i<=PETSCSF_REMOTE; i++) {
15     sf->leafbuflen[i]  = 0;
16     sf->leafstart[i]   = 0;
17     sf->leafcontig[i]  = PETSC_TRUE;
18     sf->leafdups[i]    = PETSC_FALSE;
19     dat->rootbuflen[i] = 0;
20     dat->rootstart[i]  = 0;
21     dat->rootcontig[i] = PETSC_TRUE;
22     dat->rootdups[i]   = PETSC_FALSE;
23   }
24 
25   sf->leafbuflen[PETSCSF_REMOTE]  = sf->nleaves;
26   dat->rootbuflen[PETSCSF_REMOTE] = sf->nroots;
27   sf->persistent = PETSC_FALSE;
28   sf->nleafreqs  = 0; /* MPI collectives only need one request. We treat it as a root request. */
29   dat->nrootreqs = 1;
30   PetscFunctionReturn(0);
31 }
32 
33 static PetscErrorCode PetscSFBcastAndOpBegin_Allgather(PetscSF sf,MPI_Datatype unit,PetscMemType rootmtype,const void *rootdata,PetscMemType leafmtype,void *leafdata,MPI_Op op)
34 {
35   PetscErrorCode        ierr;
36   PetscSFLink           link;
37   PetscMPIInt           sendcount;
38   MPI_Comm              comm;
39   void                  *rootbuf = NULL,*leafbuf = NULL; /* buffer seen by MPI */
40   MPI_Request           *req;
41 
42   PetscFunctionBegin;
43   ierr = PetscSFLinkCreate(sf,unit,rootmtype,rootdata,leafmtype,leafdata,op,PETSCSF_BCAST,&link);CHKERRQ(ierr);
44   ierr = PetscSFLinkPackRootData(sf,link,PETSCSF_REMOTE,rootdata);CHKERRQ(ierr);
45   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
46   ierr = PetscMPIIntCast(sf->nroots,&sendcount);CHKERRQ(ierr);
47   ierr = PetscSFLinkGetMPIBuffersAndRequests(sf,link,PETSCSF_ROOT2LEAF,&rootbuf,&leafbuf,&req,NULL);CHKERRQ(ierr);
48   ierr = MPIU_Iallgather(rootbuf,sendcount,unit,leafbuf,sendcount,unit,comm,req);CHKERRQ(ierr);
49   PetscFunctionReturn(0);
50 }
51 
52 static PetscErrorCode PetscSFReduceBegin_Allgather(PetscSF sf,MPI_Datatype unit,PetscMemType leafmtype,const void *leafdata,PetscMemType rootmtype,void *rootdata,MPI_Op op)
53 {
54   PetscErrorCode        ierr;
55   PetscSFLink           link;
56   PetscInt              rstart;
57   MPI_Comm              comm;
58   PetscMPIInt           rank,count,recvcount;
59   void                  *rootbuf = NULL,*leafbuf = NULL; /* buffer seen by MPI */
60   PetscSF_Allgather     *dat = (PetscSF_Allgather*)sf->data;
61   MPI_Request           *req;
62 
63   PetscFunctionBegin;
64   ierr = PetscSFLinkCreate(sf,unit,rootmtype,rootdata,leafmtype,leafdata,op,PETSCSF_REDUCE,&link);CHKERRQ(ierr);
65   if (op == MPIU_REPLACE) {
66     /* REPLACE is only meaningful when all processes have the same leafdata to reduce. Therefore copy from local leafdata is fine */
67     ierr = PetscLayoutGetRange(sf->map,&rstart,NULL);CHKERRQ(ierr);
68     ierr = (*link->Memcpy)(link,rootmtype,rootdata,leafmtype,(const char*)leafdata+(size_t)rstart*link->unitbytes,(size_t)sf->nroots*link->unitbytes);CHKERRQ(ierr);
69   } else {
70     ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
71     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
72     ierr = PetscSFLinkPackLeafData(sf,link,PETSCSF_REMOTE,leafdata);CHKERRQ(ierr);
73     ierr = PetscSFLinkGetMPIBuffersAndRequests(sf,link,PETSCSF_LEAF2ROOT,&rootbuf,&leafbuf,&req,NULL);CHKERRQ(ierr);
74     ierr = PetscMPIIntCast(dat->rootbuflen[PETSCSF_REMOTE],&recvcount);CHKERRQ(ierr);
75     if (!rank && !link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi]) {
76       ierr = PetscSFMalloc(sf,link->leafmtype_mpi,sf->leafbuflen[PETSCSF_REMOTE]*link->unitbytes,(void**)&link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi]);CHKERRQ(ierr);
77     }
78     if (!rank && link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi] == leafbuf) leafbuf = MPI_IN_PLACE;
79     ierr = PetscMPIIntCast(sf->nleaves*link->bs,&count);CHKERRQ(ierr);
80     ierr = MPI_Reduce(leafbuf,link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi],count,link->basicunit,op,0,comm);CHKERRQ(ierr); /* Must do reduce with MPI builltin datatype basicunit */
81     ierr = MPIU_Iscatter(link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi],recvcount,unit,rootbuf,recvcount,unit,0/*rank 0*/,comm,req);CHKERRQ(ierr);
82   }
83   PetscFunctionReturn(0);
84 }
85 
86 static PetscErrorCode PetscSFBcastToZero_Allgather(PetscSF sf,MPI_Datatype unit,PetscMemType rootmtype,const void *rootdata,PetscMemType leafmtype,void *leafdata)
87 {
88   PetscErrorCode        ierr;
89   PetscSFLink           link;
90   PetscMPIInt           rank;
91 
92   PetscFunctionBegin;
93   ierr = PetscSFBcastAndOpBegin_Gather(sf,unit,rootmtype,rootdata,leafmtype,leafdata,MPIU_REPLACE);CHKERRQ(ierr);
94   ierr = PetscSFLinkGetInUse(sf,unit,rootdata,leafdata,PETSC_OWN_POINTER,&link);CHKERRQ(ierr);
95   ierr = PetscSFLinkMPIWaitall(sf,link,PETSCSF_ROOT2LEAF);CHKERRQ(ierr);
96   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr);
97   if (!rank && leafmtype == PETSC_MEMTYPE_DEVICE && !sf->use_gpu_aware_mpi) {
98     ierr = (*link->Memcpy)(link,PETSC_MEMTYPE_DEVICE,leafdata,PETSC_MEMTYPE_HOST,link->leafbuf[PETSCSF_REMOTE][PETSC_MEMTYPE_HOST],sf->leafbuflen[PETSCSF_REMOTE]*link->unitbytes);CHKERRQ(ierr);
99   }
100   ierr = PetscSFLinkReclaim(sf,&link);CHKERRQ(ierr);
101   PetscFunctionReturn(0);
102 }
103 
104 PETSC_INTERN PetscErrorCode PetscSFCreate_Allgather(PetscSF sf)
105 {
106   PetscErrorCode    ierr;
107   PetscSF_Allgather *dat = (PetscSF_Allgather*)sf->data;
108 
109   PetscFunctionBegin;
110   sf->ops->BcastAndOpEnd   = PetscSFBcastAndOpEnd_Basic;
111   sf->ops->ReduceEnd       = PetscSFReduceEnd_Basic;
112 
113   /* Inherit from Allgatherv */
114   sf->ops->Reset           = PetscSFReset_Allgatherv;
115   sf->ops->Destroy         = PetscSFDestroy_Allgatherv;
116   sf->ops->FetchAndOpBegin = PetscSFFetchAndOpBegin_Allgatherv;
117   sf->ops->FetchAndOpEnd   = PetscSFFetchAndOpEnd_Allgatherv;
118   sf->ops->GetRootRanks    = PetscSFGetRootRanks_Allgatherv;
119   sf->ops->CreateLocalSF   = PetscSFCreateLocalSF_Allgatherv;
120   sf->ops->GetGraph        = PetscSFGetGraph_Allgatherv;
121   sf->ops->GetLeafRanks    = PetscSFGetLeafRanks_Allgatherv;
122 
123   /* Allgather stuff */
124   sf->ops->SetUp           = PetscSFSetUp_Allgather;
125   sf->ops->BcastAndOpBegin = PetscSFBcastAndOpBegin_Allgather;
126   sf->ops->ReduceBegin     = PetscSFReduceBegin_Allgather;
127   sf->ops->BcastToZero     = PetscSFBcastToZero_Allgather;
128 
129   ierr = PetscNewLog(sf,&dat);CHKERRQ(ierr);
130   sf->data = (void*)dat;
131   PetscFunctionReturn(0);
132 }
133