xref: /petsc/src/vec/is/sf/tests/ex3.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 static char help[]= "Test PetscSFFetchAndOp on patterned SF graphs. PetscSFFetchAndOp internally uses PetscSFBcastAndOp \n\
2  and PetscSFReduce. So it is a good test to see if they all work for patterned graphs.\n\
3  Run with ./prog -op [replace | sum]\n\n";
4 
5 #include <petscvec.h>
6 #include <petscsf.h>
7 int main(int argc,char **argv)
8 {
9   PetscInt       i,N=10,low,high,nleaves;
10   PetscMPIInt    size,rank;
11   Vec            x,y,y2,gy2;
12   PetscScalar    *rootdata,*leafdata,*leafupdate;
13   PetscLayout    layout;
14   PetscSF        gathersf,allgathersf,alltoallsf;
15   MPI_Op         op=MPI_SUM;
16   char           opname[64];
17   const char     *mpiopname;
18   PetscBool      flag,isreplace,issum;
19 
20   PetscFunctionBeginUser;
21   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
22   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
23   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
24 
25   PetscCall(PetscOptionsGetString(NULL,NULL,"-op",opname,sizeof(opname),&flag));
26   PetscCall(PetscStrcmp(opname,"replace",&isreplace));
27   PetscCall(PetscStrcmp(opname,"sum",&issum));
28 
29   if (isreplace)  {op = MPI_REPLACE; mpiopname = "MPI_REPLACE";}
30   else if (issum) {op = MPIU_SUM;     mpiopname = "MPI_SUM";}
31   else SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Unsupported argument (%s) to -op, which must be 'replace' or 'sum'",opname);
32 
33   PetscCall(VecCreate(PETSC_COMM_WORLD,&x));
34   PetscCall(VecSetFromOptions(x));
35   PetscCall(VecSetSizes(x,PETSC_DECIDE,N));
36 
37   /*-------------------------------------*/
38   /*       PETSCSF_PATTERN_GATHER        */
39   /*-------------------------------------*/
40 
41   /* set MPI vec x to [1, 2, .., N] */
42   PetscCall(VecGetOwnershipRange(x,&low,&high));
43   for (i=low; i<high; i++) PetscCall(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
44   PetscCall(VecAssemblyBegin(x));
45   PetscCall(VecAssemblyEnd(x));
46 
47   /* Create the gather SF */
48   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_GATHER graph with op = %s\n",mpiopname));
49   PetscCall(VecGetLayout(x,&layout));
50   PetscCall(PetscSFCreate(PETSC_COMM_WORLD,&gathersf));
51   PetscCall(PetscSFSetGraphWithPattern(gathersf,layout,PETSCSF_PATTERN_GATHER));
52 
53   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
54   PetscCall(PetscSFGetGraph(gathersf,NULL,&nleaves,NULL,NULL));
55   PetscCall(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
56   PetscCall(VecDuplicate(y,&y2));
57 
58   PetscCall(VecGetArray(x,&rootdata));
59   PetscCall(VecGetArray(y,&leafdata));
60   PetscCall(VecGetArray(y2,&leafupdate));
61 
62   /* Bcast x to y,to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
63   PetscCall(PetscSFBcastBegin(gathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
64   PetscCall(PetscSFBcastEnd(gathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
65   PetscCall(VecRestoreArray(y,&leafdata));
66   PetscCall(VecScale(y,2));
67   PetscCall(VecGetArray(y,&leafdata));
68 
69   /* FetchAndOp x to y */
70   PetscCall(PetscSFFetchAndOpBegin(gathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
71   PetscCall(PetscSFFetchAndOpEnd(gathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
72 
73   /* View roots (x) and leafupdate (y2). Since this is a gather graph, leafudpate = rootdata = [1,N], then rootdata += leafdata, i.e., [3,3*N] */
74   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
75   PetscCall(PetscObjectSetName((PetscObject)x,"rootdata"));
76   PetscCall(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
77 
78   PetscCall(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
79   PetscCall(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
80   PetscCall(VecDestroy(&gy2));
81 
82   PetscCall(VecRestoreArray(y2,&leafupdate));
83   PetscCall(VecDestroy(&y2));
84 
85   PetscCall(VecRestoreArray(y,&leafdata));
86   PetscCall(VecDestroy(&y));
87 
88   PetscCall(VecRestoreArray(x,&rootdata));
89   /* PetscCall(VecDestroy(&x)); */ /* We will reuse x in ALLGATHER, so do not destroy it */
90 
91   PetscCall(PetscSFDestroy(&gathersf));
92 
93   /*-------------------------------------*/
94   /*       PETSCSF_PATTERN_ALLGATHER     */
95   /*-------------------------------------*/
96 
97   /* set MPI vec x to [1, 2, .., N] */
98   for (i=low; i<high; i++) PetscCall(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
99   PetscCall(VecAssemblyBegin(x));
100   PetscCall(VecAssemblyEnd(x));
101 
102   /* Create the allgather SF */
103   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLGATHER graph with op = %s\n",mpiopname));
104   PetscCall(VecGetLayout(x,&layout));
105   PetscCall(PetscSFCreate(PETSC_COMM_WORLD,&allgathersf));
106   PetscCall(PetscSFSetGraphWithPattern(allgathersf,layout,PETSCSF_PATTERN_ALLGATHER));
107 
108   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
109   PetscCall(PetscSFGetGraph(allgathersf,NULL,&nleaves,NULL,NULL));
110   PetscCall(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
111   PetscCall(VecDuplicate(y,&y2));
112 
113   PetscCall(VecGetArray(x,&rootdata));
114   PetscCall(VecGetArray(y,&leafdata));
115   PetscCall(VecGetArray(y2,&leafupdate));
116 
117   /* Bcast x to y, to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
118   PetscCall(PetscSFBcastBegin(allgathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
119   PetscCall(PetscSFBcastEnd(allgathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
120   PetscCall(VecRestoreArray(y,&leafdata));
121   PetscCall(VecScale(y,2));
122   PetscCall(VecGetArray(y,&leafdata));
123 
124   /* FetchAndOp x to y */
125   PetscCall(PetscSFFetchAndOpBegin(allgathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
126   PetscCall(PetscSFFetchAndOpEnd(allgathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
127 
128   /* View roots (x) and leafupdate (y2). Since this is an allgather graph, we have (suppose ranks get updates in ascending order)
129      rank 0: leafupdate = rootdata = [1,N],   rootdata += leafdata = [3,3*N]
130      rank 1: leafupdate = rootdata = [3,3*N], rootdata += leafdata = [5,5*N]
131      rank 2: leafupdate = rootdata = [5,5*N], rootdata += leafdata = [7,7*N]
132      ...
133    */
134   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
135   PetscCall(PetscObjectSetName((PetscObject)x,"rootdata"));
136   PetscCall(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
137 
138   PetscCall(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
139   PetscCall(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
140   PetscCall(VecDestroy(&gy2));
141 
142   PetscCall(VecRestoreArray(y2,&leafupdate));
143   PetscCall(VecDestroy(&y2));
144 
145   PetscCall(VecRestoreArray(y,&leafdata));
146   PetscCall(VecDestroy(&y));
147 
148   PetscCall(VecRestoreArray(x,&rootdata));
149   PetscCall(VecDestroy(&x)); /* We won't reuse x in ALLGATHER, so destroy it */
150 
151   PetscCall(PetscSFDestroy(&allgathersf));
152 
153   /*-------------------------------------*/
154   /*       PETSCSF_PATTERN_ALLTOALL     */
155   /*-------------------------------------*/
156 
157   PetscCall(VecCreate(PETSC_COMM_WORLD,&x));
158   PetscCall(VecSetFromOptions(x));
159   PetscCall(VecSetSizes(x,size,PETSC_DECIDE));
160 
161   /* set MPI vec x to [1, 2, .., size^2] */
162   PetscCall(VecGetOwnershipRange(x,&low,&high));
163   for (i=low; i<high; i++) PetscCall(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
164   PetscCall(VecAssemblyBegin(x));
165   PetscCall(VecAssemblyEnd(x));
166 
167 /* Create the alltoall SF */
168   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLTOALL graph with op = %s\n",mpiopname));
169   PetscCall(PetscSFCreate(PETSC_COMM_WORLD,&alltoallsf));
170   PetscCall(PetscSFSetGraphWithPattern(alltoallsf,NULL/*insignificant*/,PETSCSF_PATTERN_ALLTOALL));
171 
172   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
173   PetscCall(PetscSFGetGraph(alltoallsf,NULL,&nleaves,NULL,NULL));
174   PetscCall(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
175   PetscCall(VecDuplicate(y,&y2));
176 
177   PetscCall(VecGetArray(x,&rootdata));
178   PetscCall(VecGetArray(y,&leafdata));
179   PetscCall(VecGetArray(y2,&leafupdate));
180 
181   /* Bcast x to y, to initialize y = 1+rank+size*i, with i=0..size-1 */
182   PetscCall(PetscSFBcastBegin(alltoallsf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
183   PetscCall(PetscSFBcastEnd(alltoallsf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
184 
185   /* FetchAndOp x to y */
186   PetscCall(PetscSFFetchAndOpBegin(alltoallsf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
187   PetscCall(PetscSFFetchAndOpEnd(alltoallsf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
188 
189   /* View roots (x) and leafupdate (y2). Since this is an alltoall graph, each root has only one leaf.
190      So, leafupdate = rootdata = 1+rank+size*i, i=0..size-1; and rootdata += leafdata, i.e., rootdata = [2,2*N]
191    */
192   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
193   PetscCall(PetscObjectSetName((PetscObject)x,"rootdata"));
194   PetscCall(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
195 
196   PetscCall(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
197   PetscCall(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
198   PetscCall(VecDestroy(&gy2));
199 
200   PetscCall(VecRestoreArray(y2,&leafupdate));
201   PetscCall(VecDestroy(&y2));
202 
203   PetscCall(VecRestoreArray(y,&leafdata));
204   PetscCall(VecDestroy(&y));
205 
206   PetscCall(VecRestoreArray(x,&rootdata));
207   PetscCall(VecDestroy(&x));
208 
209   PetscCall(PetscSFDestroy(&alltoallsf));
210 
211   PetscCall(PetscFinalize());
212   return 0;
213 }
214 
215 /*TEST
216 
217    test:
218       # N=10 is divisible by nsize, to trigger Allgather/Gather in SF
219       #MPI_Sendrecv_replace is broken with 20210400300
220       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
221       nsize: 2
222       args: -op replace
223 
224    test:
225       suffix: 2
226       nsize: 2
227       args: -op sum
228 
229    # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF
230    test:
231       #MPI_Sendrecv_replace is broken with 20210400300
232       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
233       suffix: 3
234       nsize: 3
235       args: -op replace
236 
237    test:
238       suffix: 4
239       nsize: 3
240       args: -op sum
241 
242 TEST*/
243