xref: /petsc/src/vec/is/sf/tests/ex3.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 static char help[]= "Test PetscSFFetchAndOp on patterned SF graphs. PetscSFFetchAndOp internally uses PetscSFBcastAndOp \n\
2  and PetscSFReduce. So it is a good test to see if they all work for patterned graphs.\n\
3  Run with ./prog -op [replace | sum]\n\n";
4 
5 #include <petscvec.h>
6 #include <petscsf.h>
7 int main(int argc,char **argv)
8 {
9   PetscErrorCode ierr;
10   PetscInt       i,N=10,low,high,nleaves;
11   PetscMPIInt    size,rank;
12   Vec            x,y,y2,gy2;
13   PetscScalar    *rootdata,*leafdata,*leafupdate;
14   PetscLayout    layout;
15   PetscSF        gathersf,allgathersf,alltoallsf;
16   MPI_Op         op=MPI_SUM;
17   char           opname[64];
18   const char     *mpiopname;
19   PetscBool      flag,isreplace,issum;
20 
21   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
22   CHKERRMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
23   CHKERRMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
24 
25   CHKERRQ(PetscOptionsGetString(NULL,NULL,"-op",opname,sizeof(opname),&flag));
26   CHKERRQ(PetscStrcmp(opname,"replace",&isreplace));
27   CHKERRQ(PetscStrcmp(opname,"sum",&issum));
28 
29   if (isreplace)  {op = MPI_REPLACE; mpiopname = "MPI_REPLACE";}
30   else if (issum) {op = MPIU_SUM;     mpiopname = "MPI_SUM";}
31   else SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Unsupported argument (%s) to -op, which must be 'replace' or 'sum'",opname);
32 
33   CHKERRQ(VecCreate(PETSC_COMM_WORLD,&x));
34   CHKERRQ(VecSetFromOptions(x));
35   CHKERRQ(VecSetSizes(x,PETSC_DECIDE,N));
36 
37   /*-------------------------------------*/
38   /*       PETSCSF_PATTERN_GATHER        */
39   /*-------------------------------------*/
40 
41   /* set MPI vec x to [1, 2, .., N] */
42   CHKERRQ(VecGetOwnershipRange(x,&low,&high));
43   for (i=low; i<high; i++) CHKERRQ(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
44   CHKERRQ(VecAssemblyBegin(x));
45   CHKERRQ(VecAssemblyEnd(x));
46 
47   /* Create the gather SF */
48   CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_GATHER graph with op = %s\n",mpiopname));
49   CHKERRQ(VecGetLayout(x,&layout));
50   CHKERRQ(PetscSFCreate(PETSC_COMM_WORLD,&gathersf));
51   CHKERRQ(PetscSFSetGraphWithPattern(gathersf,layout,PETSCSF_PATTERN_GATHER));
52 
53   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
54   CHKERRQ(PetscSFGetGraph(gathersf,NULL,&nleaves,NULL,NULL));
55   CHKERRQ(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
56   CHKERRQ(VecDuplicate(y,&y2));
57 
58   CHKERRQ(VecGetArray(x,&rootdata));
59   CHKERRQ(VecGetArray(y,&leafdata));
60   CHKERRQ(VecGetArray(y2,&leafupdate));
61 
62   /* Bcast x to y,to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
63   CHKERRQ(PetscSFBcastBegin(gathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
64   CHKERRQ(PetscSFBcastEnd(gathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
65   CHKERRQ(VecRestoreArray(y,&leafdata));
66   CHKERRQ(VecScale(y,2));
67   CHKERRQ(VecGetArray(y,&leafdata));
68 
69   /* FetchAndOp x to y */
70   CHKERRQ(PetscSFFetchAndOpBegin(gathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
71   CHKERRQ(PetscSFFetchAndOpEnd(gathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
72 
73   /* View roots (x) and leafupdate (y2). Since this is a gather graph, leafudpate = rootdata = [1,N], then rootdata += leafdata, i.e., [3,3*N] */
74   CHKERRQ(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
75   CHKERRQ(PetscObjectSetName((PetscObject)x,"rootdata"));
76   CHKERRQ(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
77 
78   CHKERRQ(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
79   CHKERRQ(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
80   CHKERRQ(VecDestroy(&gy2));
81 
82   CHKERRQ(VecRestoreArray(y2,&leafupdate));
83   CHKERRQ(VecDestroy(&y2));
84 
85   CHKERRQ(VecRestoreArray(y,&leafdata));
86   CHKERRQ(VecDestroy(&y));
87 
88   CHKERRQ(VecRestoreArray(x,&rootdata));
89   /* CHKERRQ(VecDestroy(&x)); */ /* We will reuse x in ALLGATHER, so do not destroy it */
90 
91   CHKERRQ(PetscSFDestroy(&gathersf));
92 
93   /*-------------------------------------*/
94   /*       PETSCSF_PATTERN_ALLGATHER     */
95   /*-------------------------------------*/
96 
97   /* set MPI vec x to [1, 2, .., N] */
98   for (i=low; i<high; i++) CHKERRQ(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
99   CHKERRQ(VecAssemblyBegin(x));
100   CHKERRQ(VecAssemblyEnd(x));
101 
102   /* Create the allgather SF */
103   CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLGATHER graph with op = %s\n",mpiopname));
104   CHKERRQ(VecGetLayout(x,&layout));
105   CHKERRQ(PetscSFCreate(PETSC_COMM_WORLD,&allgathersf));
106   CHKERRQ(PetscSFSetGraphWithPattern(allgathersf,layout,PETSCSF_PATTERN_ALLGATHER));
107 
108   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
109   CHKERRQ(PetscSFGetGraph(allgathersf,NULL,&nleaves,NULL,NULL));
110   CHKERRQ(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
111   CHKERRQ(VecDuplicate(y,&y2));
112 
113   CHKERRQ(VecGetArray(x,&rootdata));
114   CHKERRQ(VecGetArray(y,&leafdata));
115   CHKERRQ(VecGetArray(y2,&leafupdate));
116 
117   /* Bcast x to y, to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
118   CHKERRQ(PetscSFBcastBegin(allgathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
119   CHKERRQ(PetscSFBcastEnd(allgathersf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
120   CHKERRQ(VecRestoreArray(y,&leafdata));
121   CHKERRQ(VecScale(y,2));
122   CHKERRQ(VecGetArray(y,&leafdata));
123 
124   /* FetchAndOp x to y */
125   CHKERRQ(PetscSFFetchAndOpBegin(allgathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
126   CHKERRQ(PetscSFFetchAndOpEnd(allgathersf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
127 
128   /* View roots (x) and leafupdate (y2). Since this is an allgather graph, we have (suppose ranks get updates in ascending order)
129      rank 0: leafupdate = rootdata = [1,N],   rootdata += leafdata = [3,3*N]
130      rank 1: leafupdate = rootdata = [3,3*N], rootdata += leafdata = [5,5*N]
131      rank 2: leafupdate = rootdata = [5,5*N], rootdata += leafdata = [7,7*N]
132      ...
133    */
134   CHKERRQ(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
135   CHKERRQ(PetscObjectSetName((PetscObject)x,"rootdata"));
136   CHKERRQ(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
137 
138   CHKERRQ(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
139   CHKERRQ(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
140   CHKERRQ(VecDestroy(&gy2));
141 
142   CHKERRQ(VecRestoreArray(y2,&leafupdate));
143   CHKERRQ(VecDestroy(&y2));
144 
145   CHKERRQ(VecRestoreArray(y,&leafdata));
146   CHKERRQ(VecDestroy(&y));
147 
148   CHKERRQ(VecRestoreArray(x,&rootdata));
149   CHKERRQ(VecDestroy(&x)); /* We won't reuse x in ALLGATHER, so destroy it */
150 
151   CHKERRQ(PetscSFDestroy(&allgathersf));
152 
153   /*-------------------------------------*/
154   /*       PETSCSF_PATTERN_ALLTOALL     */
155   /*-------------------------------------*/
156 
157   CHKERRQ(VecCreate(PETSC_COMM_WORLD,&x));
158   CHKERRQ(VecSetFromOptions(x));
159   CHKERRQ(VecSetSizes(x,size,PETSC_DECIDE));
160 
161   /* set MPI vec x to [1, 2, .., size^2] */
162   CHKERRQ(VecGetOwnershipRange(x,&low,&high));
163   for (i=low; i<high; i++) CHKERRQ(VecSetValue(x,i,(PetscScalar)i+1.0,INSERT_VALUES));
164   CHKERRQ(VecAssemblyBegin(x));
165   CHKERRQ(VecAssemblyEnd(x));
166 
167 /* Create the alltoall SF */
168   CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLTOALL graph with op = %s\n",mpiopname));
169   CHKERRQ(PetscSFCreate(PETSC_COMM_WORLD,&alltoallsf));
170   CHKERRQ(PetscSFSetGraphWithPattern(alltoallsf,NULL/*insignificant*/,PETSCSF_PATTERN_ALLTOALL));
171 
172   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
173   CHKERRQ(PetscSFGetGraph(alltoallsf,NULL,&nleaves,NULL,NULL));
174   CHKERRQ(VecCreateSeq(PETSC_COMM_SELF,nleaves,&y));
175   CHKERRQ(VecDuplicate(y,&y2));
176 
177   CHKERRQ(VecGetArray(x,&rootdata));
178   CHKERRQ(VecGetArray(y,&leafdata));
179   CHKERRQ(VecGetArray(y2,&leafupdate));
180 
181   /* Bcast x to y, to initialize y = 1+rank+size*i, with i=0..size-1 */
182   CHKERRQ(PetscSFBcastBegin(alltoallsf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
183   CHKERRQ(PetscSFBcastEnd(alltoallsf,MPIU_SCALAR,rootdata,leafdata,MPI_REPLACE));
184 
185   /* FetchAndOp x to y */
186   CHKERRQ(PetscSFFetchAndOpBegin(alltoallsf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
187   CHKERRQ(PetscSFFetchAndOpEnd(alltoallsf,MPIU_SCALAR,rootdata,leafdata,leafupdate,op));
188 
189   /* View roots (x) and leafupdate (y2). Since this is an alltoall graph, each root has only one leaf.
190      So, leafupdate = rootdata = 1+rank+size*i, i=0..size-1; and rootdata += leafdata, i.e., rootdata = [2,2*N]
191    */
192   CHKERRQ(VecCreateMPIWithArray(PETSC_COMM_WORLD,1,nleaves,PETSC_DECIDE,leafupdate,&gy2));
193   CHKERRQ(PetscObjectSetName((PetscObject)x,"rootdata"));
194   CHKERRQ(PetscObjectSetName((PetscObject)gy2,"leafupdate"));
195 
196   CHKERRQ(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
197   CHKERRQ(VecView(gy2,PETSC_VIEWER_STDOUT_WORLD));
198   CHKERRQ(VecDestroy(&gy2));
199 
200   CHKERRQ(VecRestoreArray(y2,&leafupdate));
201   CHKERRQ(VecDestroy(&y2));
202 
203   CHKERRQ(VecRestoreArray(y,&leafdata));
204   CHKERRQ(VecDestroy(&y));
205 
206   CHKERRQ(VecRestoreArray(x,&rootdata));
207   CHKERRQ(VecDestroy(&x));
208 
209   CHKERRQ(PetscSFDestroy(&alltoallsf));
210 
211   ierr = PetscFinalize();
212   return ierr;
213 }
214 
215 /*TEST
216 
217    test:
218       # N=10 is divisible by nsize, to trigger Allgather/Gather in SF
219       #MPI_Sendrecv_replace is broken with 20210400300
220       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
221       nsize: 2
222       args: -op replace
223 
224    test:
225       suffix: 2
226       nsize: 2
227       args: -op sum
228 
229    # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF
230    test:
231       #MPI_Sendrecv_replace is broken with 20210400300
232       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
233       suffix: 3
234       nsize: 3
235       args: -op replace
236 
237    test:
238       suffix: 4
239       nsize: 3
240       args: -op sum
241 
242 TEST*/
243