xref: /petsc/src/vec/is/sf/tests/ex3.c (revision d2522c19e8fa9bca20aaca277941d9a63e71db6a)
1 static char help[] = "Test PetscSFFetchAndOp on patterned SF graphs. PetscSFFetchAndOp internally uses PetscSFBcastAndOp \n\
2  and PetscSFReduce. So it is a good test to see if they all work for patterned graphs.\n\
3  Run with ./prog -op [replace | sum]\n\n";
4 
5 #include <petscvec.h>
6 #include <petscsf.h>
7 int main(int argc, char **argv) {
8   PetscInt     i, N = 10, low, high, nleaves;
9   PetscMPIInt  size, rank;
10   Vec          x, y, y2, gy2;
11   PetscScalar *rootdata, *leafdata, *leafupdate;
12   PetscLayout  layout;
13   PetscSF      gathersf, allgathersf, alltoallsf;
14   MPI_Op       op = MPI_SUM;
15   char         opname[64];
16   const char  *mpiopname;
17   PetscBool    flag, isreplace, issum;
18 
19   PetscFunctionBeginUser;
20   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
21   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
22   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
23 
24   PetscCall(PetscOptionsGetString(NULL, NULL, "-op", opname, sizeof(opname), &flag));
25   PetscCall(PetscStrcmp(opname, "replace", &isreplace));
26   PetscCall(PetscStrcmp(opname, "sum", &issum));
27 
28   if (isreplace) {
29     op        = MPI_REPLACE;
30     mpiopname = "MPI_REPLACE";
31   } else if (issum) {
32     op        = MPIU_SUM;
33     mpiopname = "MPI_SUM";
34   } else SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Unsupported argument (%s) to -op, which must be 'replace' or 'sum'", opname);
35 
36   PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
37   PetscCall(VecSetFromOptions(x));
38   PetscCall(VecSetSizes(x, PETSC_DECIDE, N));
39 
40   /*-------------------------------------*/
41   /*       PETSCSF_PATTERN_GATHER        */
42   /*-------------------------------------*/
43 
44   /* set MPI vec x to [1, 2, .., N] */
45   PetscCall(VecGetOwnershipRange(x, &low, &high));
46   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i + 1.0, INSERT_VALUES));
47   PetscCall(VecAssemblyBegin(x));
48   PetscCall(VecAssemblyEnd(x));
49 
50   /* Create the gather SF */
51   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_GATHER graph with op = %s\n", mpiopname));
52   PetscCall(VecGetLayout(x, &layout));
53   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &gathersf));
54   PetscCall(PetscSFSetGraphWithPattern(gathersf, layout, PETSCSF_PATTERN_GATHER));
55 
56   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
57   PetscCall(PetscSFGetGraph(gathersf, NULL, &nleaves, NULL, NULL));
58   PetscCall(VecCreateSeq(PETSC_COMM_SELF, nleaves, &y));
59   PetscCall(VecDuplicate(y, &y2));
60 
61   PetscCall(VecGetArray(x, &rootdata));
62   PetscCall(VecGetArray(y, &leafdata));
63   PetscCall(VecGetArray(y2, &leafupdate));
64 
65   /* Bcast x to y,to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
66   PetscCall(PetscSFBcastBegin(gathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
67   PetscCall(PetscSFBcastEnd(gathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
68   PetscCall(VecRestoreArray(y, &leafdata));
69   PetscCall(VecScale(y, 2));
70   PetscCall(VecGetArray(y, &leafdata));
71 
72   /* FetchAndOp x to y */
73   PetscCall(PetscSFFetchAndOpBegin(gathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
74   PetscCall(PetscSFFetchAndOpEnd(gathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
75 
76   /* View roots (x) and leafupdate (y2). Since this is a gather graph, leafudpate = rootdata = [1,N], then rootdata += leafdata, i.e., [3,3*N] */
77   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, nleaves, PETSC_DECIDE, leafupdate, &gy2));
78   PetscCall(PetscObjectSetName((PetscObject)x, "rootdata"));
79   PetscCall(PetscObjectSetName((PetscObject)gy2, "leafupdate"));
80 
81   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
82   PetscCall(VecView(gy2, PETSC_VIEWER_STDOUT_WORLD));
83   PetscCall(VecDestroy(&gy2));
84 
85   PetscCall(VecRestoreArray(y2, &leafupdate));
86   PetscCall(VecDestroy(&y2));
87 
88   PetscCall(VecRestoreArray(y, &leafdata));
89   PetscCall(VecDestroy(&y));
90 
91   PetscCall(VecRestoreArray(x, &rootdata));
92   /* PetscCall(VecDestroy(&x)); */ /* We will reuse x in ALLGATHER, so do not destroy it */
93 
94   PetscCall(PetscSFDestroy(&gathersf));
95 
96   /*-------------------------------------*/
97   /*       PETSCSF_PATTERN_ALLGATHER     */
98   /*-------------------------------------*/
99 
100   /* set MPI vec x to [1, 2, .., N] */
101   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i + 1.0, INSERT_VALUES));
102   PetscCall(VecAssemblyBegin(x));
103   PetscCall(VecAssemblyEnd(x));
104 
105   /* Create the allgather SF */
106   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLGATHER graph with op = %s\n", mpiopname));
107   PetscCall(VecGetLayout(x, &layout));
108   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &allgathersf));
109   PetscCall(PetscSFSetGraphWithPattern(allgathersf, layout, PETSCSF_PATTERN_ALLGATHER));
110 
111   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
112   PetscCall(PetscSFGetGraph(allgathersf, NULL, &nleaves, NULL, NULL));
113   PetscCall(VecCreateSeq(PETSC_COMM_SELF, nleaves, &y));
114   PetscCall(VecDuplicate(y, &y2));
115 
116   PetscCall(VecGetArray(x, &rootdata));
117   PetscCall(VecGetArray(y, &leafdata));
118   PetscCall(VecGetArray(y2, &leafupdate));
119 
120   /* Bcast x to y, to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
121   PetscCall(PetscSFBcastBegin(allgathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
122   PetscCall(PetscSFBcastEnd(allgathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
123   PetscCall(VecRestoreArray(y, &leafdata));
124   PetscCall(VecScale(y, 2));
125   PetscCall(VecGetArray(y, &leafdata));
126 
127   /* FetchAndOp x to y */
128   PetscCall(PetscSFFetchAndOpBegin(allgathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
129   PetscCall(PetscSFFetchAndOpEnd(allgathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
130 
131   /* View roots (x) and leafupdate (y2). Since this is an allgather graph, we have (suppose ranks get updates in ascending order)
132      rank 0: leafupdate = rootdata = [1,N],   rootdata += leafdata = [3,3*N]
133      rank 1: leafupdate = rootdata = [3,3*N], rootdata += leafdata = [5,5*N]
134      rank 2: leafupdate = rootdata = [5,5*N], rootdata += leafdata = [7,7*N]
135      ...
136    */
137   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, nleaves, PETSC_DECIDE, leafupdate, &gy2));
138   PetscCall(PetscObjectSetName((PetscObject)x, "rootdata"));
139   PetscCall(PetscObjectSetName((PetscObject)gy2, "leafupdate"));
140 
141   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
142   PetscCall(VecView(gy2, PETSC_VIEWER_STDOUT_WORLD));
143   PetscCall(VecDestroy(&gy2));
144 
145   PetscCall(VecRestoreArray(y2, &leafupdate));
146   PetscCall(VecDestroy(&y2));
147 
148   PetscCall(VecRestoreArray(y, &leafdata));
149   PetscCall(VecDestroy(&y));
150 
151   PetscCall(VecRestoreArray(x, &rootdata));
152   PetscCall(VecDestroy(&x)); /* We won't reuse x in ALLGATHER, so destroy it */
153 
154   PetscCall(PetscSFDestroy(&allgathersf));
155 
156   /*-------------------------------------*/
157   /*       PETSCSF_PATTERN_ALLTOALL     */
158   /*-------------------------------------*/
159 
160   PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
161   PetscCall(VecSetFromOptions(x));
162   PetscCall(VecSetSizes(x, size, PETSC_DECIDE));
163 
164   /* set MPI vec x to [1, 2, .., size^2] */
165   PetscCall(VecGetOwnershipRange(x, &low, &high));
166   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i + 1.0, INSERT_VALUES));
167   PetscCall(VecAssemblyBegin(x));
168   PetscCall(VecAssemblyEnd(x));
169 
170   /* Create the alltoall SF */
171   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLTOALL graph with op = %s\n", mpiopname));
172   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &alltoallsf));
173   PetscCall(PetscSFSetGraphWithPattern(alltoallsf, NULL /*insignificant*/, PETSCSF_PATTERN_ALLTOALL));
174 
175   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
176   PetscCall(PetscSFGetGraph(alltoallsf, NULL, &nleaves, NULL, NULL));
177   PetscCall(VecCreateSeq(PETSC_COMM_SELF, nleaves, &y));
178   PetscCall(VecDuplicate(y, &y2));
179 
180   PetscCall(VecGetArray(x, &rootdata));
181   PetscCall(VecGetArray(y, &leafdata));
182   PetscCall(VecGetArray(y2, &leafupdate));
183 
184   /* Bcast x to y, to initialize y = 1+rank+size*i, with i=0..size-1 */
185   PetscCall(PetscSFBcastBegin(alltoallsf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
186   PetscCall(PetscSFBcastEnd(alltoallsf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
187 
188   /* FetchAndOp x to y */
189   PetscCall(PetscSFFetchAndOpBegin(alltoallsf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
190   PetscCall(PetscSFFetchAndOpEnd(alltoallsf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
191 
192   /* View roots (x) and leafupdate (y2). Since this is an alltoall graph, each root has only one leaf.
193      So, leafupdate = rootdata = 1+rank+size*i, i=0..size-1; and rootdata += leafdata, i.e., rootdata = [2,2*N]
194    */
195   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, nleaves, PETSC_DECIDE, leafupdate, &gy2));
196   PetscCall(PetscObjectSetName((PetscObject)x, "rootdata"));
197   PetscCall(PetscObjectSetName((PetscObject)gy2, "leafupdate"));
198 
199   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
200   PetscCall(VecView(gy2, PETSC_VIEWER_STDOUT_WORLD));
201   PetscCall(VecDestroy(&gy2));
202 
203   PetscCall(VecRestoreArray(y2, &leafupdate));
204   PetscCall(VecDestroy(&y2));
205 
206   PetscCall(VecRestoreArray(y, &leafdata));
207   PetscCall(VecDestroy(&y));
208 
209   PetscCall(VecRestoreArray(x, &rootdata));
210   PetscCall(VecDestroy(&x));
211 
212   PetscCall(PetscSFDestroy(&alltoallsf));
213 
214   PetscCall(PetscFinalize());
215   return 0;
216 }
217 
218 /*TEST
219 
220    test:
221       # N=10 is divisible by nsize, to trigger Allgather/Gather in SF
222       #MPI_Sendrecv_replace is broken with 20210400300
223       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
224       nsize: 2
225       args: -op replace
226 
227    test:
228       suffix: 2
229       nsize: 2
230       args: -op sum
231 
232    # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF
233    test:
234       #MPI_Sendrecv_replace is broken with 20210400300
235       requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION)
236       suffix: 3
237       nsize: 3
238       args: -op replace
239 
240    test:
241       suffix: 4
242       nsize: 3
243       args: -op sum
244 
245 TEST*/
246