xref: /petsc/src/vec/is/sf/tests/ex3.c (revision b122ec5aa1bd4469eb4e0673542fb7de3f411254)
1 static char help[]= "Test PetscSFFetchAndOp on patterned SF graphs. PetscSFFetchAndOp internally uses PetscSFBcastAndOp \n\
2  and PetscSFReduce. So it is a good test to see if they all work for patterned graphs.\n\
3  Run with ./prog -op [replace | sum]\n\n";
4 
5 #include <petscvec.h>
6 #include <petscsf.h>
7 int main(int argc,char **argv)
8 {
9   PetscInt       i,N=10,low,high,nleaves;
10   PetscMPIInt    size,rank;
11   Vec            x,y,y2,gy2;
12   PetscScalar    *rootdata,*leafdata,*leafupdate;
13   PetscLayout    layout;
14   PetscSF        gathersf,allgathersf,alltoallsf;
15   MPI_Op         op=MPI_SUM;
16   char           opname[64];
17   const char     *mpiopname;
18   PetscBool      flag,isreplace,issum;
19 
20   CHKERRQ(PetscInitialize(&argc,&argv,(char*)0,help));
21   CHKERRMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
22   CHKERRMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
23 
24   CHKERRQ(PetscOptionsGetString(NULL,NULL,"-op",opname,sizeof(opname),&flag));
25   CHKERRQ(PetscStrcmp(opname,"replace",&isreplace));
26   CHKERRQ(PetscStrcmp(opname,"sum",&issum));
27 
28   if (isreplace)  {op = MPI_REPLACE; mpiopname = "MPI_REPLACE";}
29   else if (issum) {op = MPIU_SUM;     mpiopname = "MPI_SUM";}
30   else SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Unsupported argument (%s) to -op, which must be 'replace' or 'sum'",opname);
31 
32   CHKERRQ(VecCreate(PETSC_COMM_WORLD,&x));
33   CHKERRQ(VecSetFromOptions(x));
34   CHKERRQ(VecSetSizes(x,PETSC_DECIDE,N));
35 
36   /*-------------------------------------*/
37   /*       PETSCSF_PATTERN_GATHER        */
38   /*-------------------------------------*/
39 
40   /* set MPI vec x to [1, 2, .., N] */
41   CHKERRQ(VecGetOwnershipRange(x,&low,&high));
42   for (i=low; i<high; i++) CHKERRQ(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
43   CHKERRQ(VecAssemblyBegin(x));
44   CHKERRQ(VecAssemblyEnd(x));
45 
46   /* Create the gather SF */
47   CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_GATHER graph with op = %s\n",mpiopname));
48   CHKERRQ(VecGetLayout(x,&layout));
49   CHKERRQ(PetscSFCreate(PETSC_COMM_WORLD,&gathersf));
50   CHKERRQ(PetscSFSetGraphWithPattern(gathersf,layout,PETSCSF_PATTERN_GATHER));
51 
52   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
53   CHKERRQ(PetscSFGetGraph(gathersf,NULL,&nleaves,NULL,NULL));
54   CHKERRQ(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
55   CHKERRQ(VecDuplicate(y,&y2));
56 
57   CHKERRQ(VecGetArray(x,&rootdata));
58   CHKERRQ(VecGetArray(y,&leafdata));
59   CHKERRQ(VecGetArray(y2,&leafupdate));
60 
61   /* Bcast x to y,to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
62   CHKERRQ(PetscSFBcastBegin(gathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
63   CHKERRQ(PetscSFBcastEnd(gathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
64   CHKERRQ(VecRestoreArray(y,&leafdata));
65   CHKERRQ(VecScale(y,2));
66   CHKERRQ(VecGetArray(y,&leafdata));
67 
68   /* FetchAndOp x to y */
69   CHKERRQ(PetscSFFetchAndOpBegin(gathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
70   CHKERRQ(PetscSFFetchAndOpEnd(gathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
71 
72   /* View roots (x) and leafupdate (y2). Since this is a gather graph, leafudpate = rootdata = [1,N], then rootdata += leafdata, i.e., [3,3*N] */
73   CHKERRQ(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
74   CHKERRQ(PetscObjectSetName((PetscObject)x,"rootdata"));
75   CHKERRQ(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
76 
77   CHKERRQ(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
78   CHKERRQ(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
79   CHKERRQ(VecDestroy(&gy2));
80 
81   CHKERRQ(VecRestoreArray(y2,&leafupdate));
82   CHKERRQ(VecDestroy(&y2));
83 
84   CHKERRQ(VecRestoreArray(y,&leafdata));
85   CHKERRQ(VecDestroy(&y));
86 
87   CHKERRQ(VecRestoreArray(x,&rootdata));
88   /* CHKERRQ(VecDestroy(&x)); */ /* We will reuse x in ALLGATHER, so do not destroy it */
89 
90   CHKERRQ(PetscSFDestroy(&gathersf));
91 
92   /*-------------------------------------*/
93   /*       PETSCSF_PATTERN_ALLGATHER     */
94   /*-------------------------------------*/
95 
96   /* set MPI vec x to [1, 2, .., N] */
97   for (i=low; i<high; i++) CHKERRQ(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
98   CHKERRQ(VecAssemblyBegin(x));
99   CHKERRQ(VecAssemblyEnd(x));
100 
101   /* Create the allgather SF */
102   CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLGATHER graph with op = %s\n",mpiopname));
103   CHKERRQ(VecGetLayout(x,&layout));
104   CHKERRQ(PetscSFCreate(PETSC_COMM_WORLD,&allgathersf));
105   CHKERRQ(PetscSFSetGraphWithPattern(allgathersf,layout,PETSCSF_PATTERN_ALLGATHER));
106 
107   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
108   CHKERRQ(PetscSFGetGraph(allgathersf,NULL,&nleaves,NULL,NULL));
109   CHKERRQ(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
110   CHKERRQ(VecDuplicate(y,&y2));
111 
112   CHKERRQ(VecGetArray(x,&rootdata));
113   CHKERRQ(VecGetArray(y,&leafdata));
114   CHKERRQ(VecGetArray(y2,&leafupdate));
115 
116   /* Bcast x to y, to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
117   CHKERRQ(PetscSFBcastBegin(allgathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
118   CHKERRQ(PetscSFBcastEnd(allgathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
119   CHKERRQ(VecRestoreArray(y,&leafdata));
120   CHKERRQ(VecScale(y,2));
121   CHKERRQ(VecGetArray(y,&leafdata));
122 
123   /* FetchAndOp x to y */
124   CHKERRQ(PetscSFFetchAndOpBegin(allgathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
125   CHKERRQ(PetscSFFetchAndOpEnd(allgathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
126 
127   /* View roots (x) and leafupdate (y2). Since this is an allgather graph, we have (suppose ranks get updates in ascending order)
128      rank 0: leafupdate = rootdata = [1,N],   rootdata += leafdata = [3,3*N]
129      rank 1: leafupdate = rootdata = [3,3*N], rootdata += leafdata = [5,5*N]
130      rank 2: leafupdate = rootdata = [5,5*N], rootdata += leafdata = [7,7*N]
131      ...
132    */
133   CHKERRQ(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
134   CHKERRQ(PetscObjectSetName((PetscObject)x,"rootdata"));
135   CHKERRQ(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
136 
137   CHKERRQ(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
138   CHKERRQ(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
139   CHKERRQ(VecDestroy(&gy2));
140 
141   CHKERRQ(VecRestoreArray(y2,&leafupdate));
142   CHKERRQ(VecDestroy(&y2));
143 
144   CHKERRQ(VecRestoreArray(y,&leafdata));
145   CHKERRQ(VecDestroy(&y));
146 
147   CHKERRQ(VecRestoreArray(x,&rootdata));
148   CHKERRQ(VecDestroy(&x)); /* We won't reuse x in ALLGATHER, so destroy it */
149 
150   CHKERRQ(PetscSFDestroy(&allgathersf));
151 
152   /*-------------------------------------*/
153   /*       PETSCSF_PATTERN_ALLTOALL     */
154   /*-------------------------------------*/
155 
156   CHKERRQ(VecCreate(PETSC_COMM_WORLD,&x));
157   CHKERRQ(VecSetFromOptions(x));
158   CHKERRQ(VecSetSizes(x,size,PETSC_DECIDE));
159 
160   /* set MPI vec x to [1, 2, .., size^2] */
161   CHKERRQ(VecGetOwnershipRange(x,&low,&high));
162   for (i=low; i<high; i++) CHKERRQ(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
163   CHKERRQ(VecAssemblyBegin(x));
164   CHKERRQ(VecAssemblyEnd(x));
165 
166 /* Create the alltoall SF */
167   CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLTOALL graph with op = %s\n",mpiopname));
168   CHKERRQ(PetscSFCreate(PETSC_COMM_WORLD,&alltoallsf));
169   CHKERRQ(PetscSFSetGraphWithPattern(alltoallsf,NULL/*insignificant*/,PETSCSF_PATTERN_ALLTOALL));
170 
171   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
172   CHKERRQ(PetscSFGetGraph(alltoallsf,NULL,&nleaves,NULL,NULL));
173   CHKERRQ(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
174   CHKERRQ(VecDuplicate(y,&y2));
175 
176   CHKERRQ(VecGetArray(x,&rootdata));
177   CHKERRQ(VecGetArray(y,&leafdata));
178   CHKERRQ(VecGetArray(y2,&leafupdate));
179 
180   /* Bcast x to y, to initialize y = 1+rank+size*i, with i=0..size-1 */
181   CHKERRQ(PetscSFBcastBegin(alltoallsf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
182   CHKERRQ(PetscSFBcastEnd(alltoallsf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
183 
184   /* FetchAndOp x to y */
185   CHKERRQ(PetscSFFetchAndOpBegin(alltoallsf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
186   CHKERRQ(PetscSFFetchAndOpEnd(alltoallsf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
187 
188   /* View roots (x) and leafupdate (y2). Since this is an alltoall graph, each root has only one leaf.
189      So, leafupdate = rootdata = 1+rank+size*i, i=0..size-1; and rootdata += leafdata, i.e., rootdata = [2,2*N]
190    */
191   CHKERRQ(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
192   CHKERRQ(PetscObjectSetName((PetscObject)x,"rootdata"));
193   CHKERRQ(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
194 
195   CHKERRQ(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
196   CHKERRQ(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
197   CHKERRQ(VecDestroy(&gy2));
198 
199   CHKERRQ(VecRestoreArray(y2,&leafupdate));
200   CHKERRQ(VecDestroy(&y2));
201 
202   CHKERRQ(VecRestoreArray(y,&leafdata));
203   CHKERRQ(VecDestroy(&y));
204 
205   CHKERRQ(VecRestoreArray(x,&rootdata));
206   CHKERRQ(VecDestroy(&x));
207 
208   CHKERRQ(PetscSFDestroy(&alltoallsf));
209 
210   CHKERRQ(PetscFinalize());
211   return 0;
212 }
213 
214 /*TEST
215 
216    test:
217       # N=10 is divisible by nsize, to trigger Allgather/Gather in SF
218       #MPI_Sendrecv_replace is broken with 20210400300
219       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
220       nsize: 2
221       args: -op replace
222 
223    test:
224       suffix: 2
225       nsize: 2
226       args: -op sum
227 
228    # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF
229    test:
230       #MPI_Sendrecv_replace is broken with 20210400300
231       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
232       suffix: 3
233       nsize: 3
234       args: -op replace
235 
236    test:
237       suffix: 4
238       nsize: 3
239       args: -op sum
240 
241 TEST*/
242