xref: /petsc/src/vec/is/sf/tests/ex3.c (revision ffa8c5705e8ab2cf85ee1d14dbe507a6e2eb5283)
1 static char help[]= "Test PetscSFFetchAndOp on patterned SF graphs. PetscSFFetchAndOp internally uses PetscSFBcastAndOp \n\
2  and PetscSFReduce. So it is a good test to see if they all work for patterned graphs.\n\
3  Run with ./prog -op [replace | sum]\n\n";
4 
5 #include <petscvec.h>
6 #include <petscsf.h>
7 int main(int argc,char **argv)
8 {
9   PetscInt       i,N=10,low,high,nleaves;
10   PetscMPIInt    size,rank;
11   Vec            x,y,y2,gy2;
12   PetscScalar    *rootdata,*leafdata,*leafupdate;
13   PetscLayout    layout;
14   PetscSF        gathersf,allgathersf,alltoallsf;
15   MPI_Op         op=MPI_SUM;
16   char           opname[64];
17   const char     *mpiopname;
18   PetscBool      flag,isreplace,issum;
19 
20   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
21   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
22   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
23 
24   PetscCall(PetscOptionsGetString(NULL,NULL,"-op",opname,sizeof(opname),&flag));
25   PetscCall(PetscStrcmp(opname,"replace",&isreplace));
26   PetscCall(PetscStrcmp(opname,"sum",&issum));
27 
28   if (isreplace)  {op = MPI_REPLACE; mpiopname = "MPI_REPLACE";}
29   else if (issum) {op = MPIU_SUM;     mpiopname = "MPI_SUM";}
30   else SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Unsupported argument (%s) to -op, which must be 'replace' or 'sum'",opname);
31 
32   PetscCall(VecCreate(PETSC_COMM_WORLD,&x));
33   PetscCall(VecSetFromOptions(x));
34   PetscCall(VecSetSizes(x,PETSC_DECIDE,N));
35 
36   /*-------------------------------------*/
37   /*       PETSCSF_PATTERN_GATHER        */
38   /*-------------------------------------*/
39 
40   /* set MPI vec x to [1, 2, .., N] */
41   PetscCall(VecGetOwnershipRange(x,&low,&high));
42   for (i=low; i<high; i++) PetscCall(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
43   PetscCall(VecAssemblyBegin(x));
44   PetscCall(VecAssemblyEnd(x));
45 
46   /* Create the gather SF */
47   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_GATHER graph with op = %s\n",mpiopname));
48   PetscCall(VecGetLayout(x,&layout));
49   PetscCall(PetscSFCreate(PETSC_COMM_WORLD,&gathersf));
50   PetscCall(PetscSFSetGraphWithPattern(gathersf,layout,PETSCSF_PATTERN_GATHER));
51 
52   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
53   PetscCall(PetscSFGetGraph(gathersf,NULL,&nleaves,NULL,NULL));
54   PetscCall(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
55   PetscCall(VecDuplicate(y,&y2));
56 
57   PetscCall(VecGetArray(x,&rootdata));
58   PetscCall(VecGetArray(y,&leafdata));
59   PetscCall(VecGetArray(y2,&leafupdate));
60 
61   /* Bcast x to y,to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
62   PetscCall(PetscSFBcastBegin(gathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
63   PetscCall(PetscSFBcastEnd(gathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
64   PetscCall(VecRestoreArray(y,&leafdata));
65   PetscCall(VecScale(y,2));
66   PetscCall(VecGetArray(y,&leafdata));
67 
68   /* FetchAndOp x to y */
69   PetscCall(PetscSFFetchAndOpBegin(gathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
70   PetscCall(PetscSFFetchAndOpEnd(gathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
71 
72   /* View roots (x) and leafupdate (y2). Since this is a gather graph, leafudpate = rootdata = [1,N], then rootdata += leafdata, i.e., [3,3*N] */
73   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
74   PetscCall(PetscObjectSetName((PetscObject)x,"rootdata"));
75   PetscCall(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
76 
77   PetscCall(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
78   PetscCall(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
79   PetscCall(VecDestroy(&gy2));
80 
81   PetscCall(VecRestoreArray(y2,&leafupdate));
82   PetscCall(VecDestroy(&y2));
83 
84   PetscCall(VecRestoreArray(y,&leafdata));
85   PetscCall(VecDestroy(&y));
86 
87   PetscCall(VecRestoreArray(x,&rootdata));
88   /* PetscCall(VecDestroy(&x)); */ /* We will reuse x in ALLGATHER, so do not destroy it */
89 
90   PetscCall(PetscSFDestroy(&gathersf));
91 
92   /*-------------------------------------*/
93   /*       PETSCSF_PATTERN_ALLGATHER     */
94   /*-------------------------------------*/
95 
96   /* set MPI vec x to [1, 2, .., N] */
97   for (i=low; i<high; i++) PetscCall(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
98   PetscCall(VecAssemblyBegin(x));
99   PetscCall(VecAssemblyEnd(x));
100 
101   /* Create the allgather SF */
102   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLGATHER graph with op = %s\n",mpiopname));
103   PetscCall(VecGetLayout(x,&layout));
104   PetscCall(PetscSFCreate(PETSC_COMM_WORLD,&allgathersf));
105   PetscCall(PetscSFSetGraphWithPattern(allgathersf,layout,PETSCSF_PATTERN_ALLGATHER));
106 
107   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
108   PetscCall(PetscSFGetGraph(allgathersf,NULL,&nleaves,NULL,NULL));
109   PetscCall(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
110   PetscCall(VecDuplicate(y,&y2));
111 
112   PetscCall(VecGetArray(x,&rootdata));
113   PetscCall(VecGetArray(y,&leafdata));
114   PetscCall(VecGetArray(y2,&leafupdate));
115 
116   /* Bcast x to y, to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
117   PetscCall(PetscSFBcastBegin(allgathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
118   PetscCall(PetscSFBcastEnd(allgathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
119   PetscCall(VecRestoreArray(y,&leafdata));
120   PetscCall(VecScale(y,2));
121   PetscCall(VecGetArray(y,&leafdata));
122 
123   /* FetchAndOp x to y */
124   PetscCall(PetscSFFetchAndOpBegin(allgathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
125   PetscCall(PetscSFFetchAndOpEnd(allgathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
126 
127   /* View roots (x) and leafupdate (y2). Since this is an allgather graph, we have (suppose ranks get updates in ascending order)
128      rank 0: leafupdate = rootdata = [1,N],   rootdata += leafdata = [3,3*N]
129      rank 1: leafupdate = rootdata = [3,3*N], rootdata += leafdata = [5,5*N]
130      rank 2: leafupdate = rootdata = [5,5*N], rootdata += leafdata = [7,7*N]
131      ...
132    */
133   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
134   PetscCall(PetscObjectSetName((PetscObject)x,"rootdata"));
135   PetscCall(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
136 
137   PetscCall(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
138   PetscCall(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
139   PetscCall(VecDestroy(&gy2));
140 
141   PetscCall(VecRestoreArray(y2,&leafupdate));
142   PetscCall(VecDestroy(&y2));
143 
144   PetscCall(VecRestoreArray(y,&leafdata));
145   PetscCall(VecDestroy(&y));
146 
147   PetscCall(VecRestoreArray(x,&rootdata));
148   PetscCall(VecDestroy(&x)); /* We won't reuse x in ALLGATHER, so destroy it */
149 
150   PetscCall(PetscSFDestroy(&allgathersf));
151 
152   /*-------------------------------------*/
153   /*       PETSCSF_PATTERN_ALLTOALL     */
154   /*-------------------------------------*/
155 
156   PetscCall(VecCreate(PETSC_COMM_WORLD,&x));
157   PetscCall(VecSetFromOptions(x));
158   PetscCall(VecSetSizes(x,size,PETSC_DECIDE));
159 
160   /* set MPI vec x to [1, 2, .., size^2] */
161   PetscCall(VecGetOwnershipRange(x,&low,&high));
162   for (i=low; i<high; i++) PetscCall(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
163   PetscCall(VecAssemblyBegin(x));
164   PetscCall(VecAssemblyEnd(x));
165 
166 /* Create the alltoall SF */
167   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLTOALL graph with op = %s\n",mpiopname));
168   PetscCall(PetscSFCreate(PETSC_COMM_WORLD,&alltoallsf));
169   PetscCall(PetscSFSetGraphWithPattern(alltoallsf,NULL/*insignificant*/,PETSCSF_PATTERN_ALLTOALL));
170 
171   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
172   PetscCall(PetscSFGetGraph(alltoallsf,NULL,&nleaves,NULL,NULL));
173   PetscCall(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
174   PetscCall(VecDuplicate(y,&y2));
175 
176   PetscCall(VecGetArray(x,&rootdata));
177   PetscCall(VecGetArray(y,&leafdata));
178   PetscCall(VecGetArray(y2,&leafupdate));
179 
180   /* Bcast x to y, to initialize y = 1+rank+size*i, with i=0..size-1 */
181   PetscCall(PetscSFBcastBegin(alltoallsf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
182   PetscCall(PetscSFBcastEnd(alltoallsf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
183 
184   /* FetchAndOp x to y */
185   PetscCall(PetscSFFetchAndOpBegin(alltoallsf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
186   PetscCall(PetscSFFetchAndOpEnd(alltoallsf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
187 
188   /* View roots (x) and leafupdate (y2). Since this is an alltoall graph, each root has only one leaf.
189      So, leafupdate = rootdata = 1+rank+size*i, i=0..size-1; and rootdata += leafdata, i.e., rootdata = [2,2*N]
190    */
191   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
192   PetscCall(PetscObjectSetName((PetscObject)x,"rootdata"));
193   PetscCall(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
194 
195   PetscCall(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
196   PetscCall(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
197   PetscCall(VecDestroy(&gy2));
198 
199   PetscCall(VecRestoreArray(y2,&leafupdate));
200   PetscCall(VecDestroy(&y2));
201 
202   PetscCall(VecRestoreArray(y,&leafdata));
203   PetscCall(VecDestroy(&y));
204 
205   PetscCall(VecRestoreArray(x,&rootdata));
206   PetscCall(VecDestroy(&x));
207 
208   PetscCall(PetscSFDestroy(&alltoallsf));
209 
210   PetscCall(PetscFinalize());
211   return 0;
212 }
213 
214 /*TEST
215 
216    test:
217       # N=10 is divisible by nsize, to trigger Allgather/Gather in SF
218       #MPI_Sendrecv_replace is broken with 20210400300
219       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
220       nsize: 2
221       args: -op replace
222 
223    test:
224       suffix: 2
225       nsize: 2
226       args: -op sum
227 
228    # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF
229    test:
230       #MPI_Sendrecv_replace is broken with 20210400300
231       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
232       suffix: 3
233       nsize: 3
234       args: -op replace
235 
236    test:
237       suffix: 4
238       nsize: 3
239       args: -op sum
240 
241 TEST*/
242