xref: /petsc/src/vec/is/sf/tests/ex3.c (revision 732aec7a18f2199fb53bb9a2f3aef439a834ce31) !
1 static char help[] = "Test PetscSFFetchAndOp on patterned SF graphs. PetscSFFetchAndOp internally uses PetscSFBcastAndOp \n\
2  and PetscSFReduce. So it is a good test to see if they all work for patterned graphs.\n\
3  Run with ./prog -op [replace | sum]\n\n";
4 
5 #include <petscvec.h>
6 #include <petscsf.h>
main(int argc,char ** argv)7 int main(int argc, char **argv)
8 {
9   PetscInt     i, N = 10, low, high, nleaves;
10   PetscMPIInt  size, rank;
11   Vec          x, y, y2, gy2;
12   PetscScalar *rootdata, *leafdata, *leafupdate;
13   PetscLayout  layout;
14   PetscSF      gathersf, allgathersf, alltoallsf;
15   MPI_Op       op = MPI_SUM;
16   char         opname[64];
17   const char  *mpiopname;
18   PetscBool    flag, isreplace, issum;
19 
20   PetscFunctionBeginUser;
21   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
22   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
23   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
24 
25   PetscCall(PetscOptionsGetString(NULL, NULL, "-op", opname, sizeof(opname), &flag));
26   PetscCall(PetscStrcmp(opname, "replace", &isreplace));
27   PetscCall(PetscStrcmp(opname, "sum", &issum));
28 
29   if (isreplace) {
30     op        = MPI_REPLACE;
31     mpiopname = "MPI_REPLACE";
32   } else if (issum) {
33     op        = MPIU_SUM;
34     mpiopname = "MPI_SUM";
35   } else SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Unsupported argument (%s) to -op, which must be 'replace' or 'sum'", opname);
36 
37   PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
38   PetscCall(VecSetFromOptions(x));
39   PetscCall(VecSetSizes(x, PETSC_DECIDE, N));
40 
41   /*-------------------------------------*/
42   /*       PETSCSF_PATTERN_GATHER        */
43   /*-------------------------------------*/
44 
45   /* set MPI vec x to [1, 2, .., N] */
46   PetscCall(VecGetOwnershipRange(x, &low, &high));
47   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i + 1.0, INSERT_VALUES));
48   PetscCall(VecAssemblyBegin(x));
49   PetscCall(VecAssemblyEnd(x));
50 
51   /* Create the gather SF */
52   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_GATHER graph with op = %s\n", mpiopname));
53   PetscCall(VecGetLayout(x, &layout));
54   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &gathersf));
55   PetscCall(PetscSFSetGraphWithPattern(gathersf, layout, PETSCSF_PATTERN_GATHER));
56 
57   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
58   PetscCall(PetscSFGetGraph(gathersf, NULL, &nleaves, NULL, NULL));
59   PetscCall(VecCreateSeq(PETSC_COMM_SELF, nleaves, &y));
60   PetscCall(VecDuplicate(y, &y2));
61 
62   PetscCall(VecGetArray(x, &rootdata));
63   PetscCall(VecGetArray(y, &leafdata));
64   PetscCall(VecGetArray(y2, &leafupdate));
65 
66   /* Bcast x to y,to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
67   PetscCall(PetscSFBcastBegin(gathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
68   PetscCall(PetscSFBcastEnd(gathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
69   PetscCall(VecRestoreArray(y, &leafdata));
70   PetscCall(VecScale(y, 2));
71   PetscCall(VecGetArray(y, &leafdata));
72 
73   /* FetchAndOp x to y */
74   PetscCall(PetscSFFetchAndOpBegin(gathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
75   PetscCall(PetscSFFetchAndOpEnd(gathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
76 
77   /* View roots (x) and leafupdate (y2). Since this is a gather graph, leafudpate = rootdata = [1,N], then rootdata += leafdata, i.e., [3,3*N] */
78   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, nleaves, PETSC_DECIDE, leafupdate, &gy2));
79   PetscCall(PetscObjectSetName((PetscObject)x, "rootdata"));
80   PetscCall(PetscObjectSetName((PetscObject)gy2, "leafupdate"));
81 
82   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
83   PetscCall(VecView(gy2, PETSC_VIEWER_STDOUT_WORLD));
84   PetscCall(VecDestroy(&gy2));
85 
86   PetscCall(VecRestoreArray(y2, &leafupdate));
87   PetscCall(VecDestroy(&y2));
88 
89   PetscCall(VecRestoreArray(y, &leafdata));
90   PetscCall(VecDestroy(&y));
91 
92   PetscCall(VecRestoreArray(x, &rootdata));
93   /* PetscCall(VecDestroy(&x)); */ /* We will reuse x in ALLGATHER, so do not destroy it */
94 
95   PetscCall(PetscSFDestroy(&gathersf));
96 
97   /*-------------------------------------*/
98   /*       PETSCSF_PATTERN_ALLGATHER     */
99   /*-------------------------------------*/
100 
101   /* set MPI vec x to [1, 2, .., N] */
102   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i + 1.0, INSERT_VALUES));
103   PetscCall(VecAssemblyBegin(x));
104   PetscCall(VecAssemblyEnd(x));
105 
106   /* Create the allgather SF */
107   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLGATHER graph with op = %s\n", mpiopname));
108   PetscCall(VecGetLayout(x, &layout));
109   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &allgathersf));
110   PetscCall(PetscSFSetGraphWithPattern(allgathersf, layout, PETSCSF_PATTERN_ALLGATHER));
111 
112   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
113   PetscCall(PetscSFGetGraph(allgathersf, NULL, &nleaves, NULL, NULL));
114   PetscCall(VecCreateSeq(PETSC_COMM_SELF, nleaves, &y));
115   PetscCall(VecDuplicate(y, &y2));
116 
117   PetscCall(VecGetArray(x, &rootdata));
118   PetscCall(VecGetArray(y, &leafdata));
119   PetscCall(VecGetArray(y2, &leafupdate));
120 
121   /* Bcast x to y, to initialize y = [1,N], then scale y to make leafupdate = y = [2,2*N] */
122   PetscCall(PetscSFBcastBegin(allgathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
123   PetscCall(PetscSFBcastEnd(allgathersf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
124   PetscCall(VecRestoreArray(y, &leafdata));
125   PetscCall(VecScale(y, 2));
126   PetscCall(VecGetArray(y, &leafdata));
127 
128   /* FetchAndOp x to y */
129   PetscCall(PetscSFFetchAndOpBegin(allgathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
130   PetscCall(PetscSFFetchAndOpEnd(allgathersf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
131 
132   /* View roots (x) and leafupdate (y2). Since this is an allgather graph, we have (suppose ranks get updates in ascending order)
133      rank 0: leafupdate = rootdata = [1,N],   rootdata += leafdata = [3,3*N]
134      rank 1: leafupdate = rootdata = [3,3*N], rootdata += leafdata = [5,5*N]
135      rank 2: leafupdate = rootdata = [5,5*N], rootdata += leafdata = [7,7*N]
136      ...
137    */
138   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, nleaves, PETSC_DECIDE, leafupdate, &gy2));
139   PetscCall(PetscObjectSetName((PetscObject)x, "rootdata"));
140   PetscCall(PetscObjectSetName((PetscObject)gy2, "leafupdate"));
141 
142   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
143   PetscCall(VecView(gy2, PETSC_VIEWER_STDOUT_WORLD));
144   PetscCall(VecDestroy(&gy2));
145 
146   PetscCall(VecRestoreArray(y2, &leafupdate));
147   PetscCall(VecDestroy(&y2));
148 
149   PetscCall(VecRestoreArray(y, &leafdata));
150   PetscCall(VecDestroy(&y));
151 
152   PetscCall(VecRestoreArray(x, &rootdata));
153   PetscCall(VecDestroy(&x)); /* We won't reuse x in ALLGATHER, so destroy it */
154 
155   PetscCall(PetscSFDestroy(&allgathersf));
156 
157   /*-------------------------------------*/
158   /*       PETSCSF_PATTERN_ALLTOALL     */
159   /*-------------------------------------*/
160 
161   PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
162   PetscCall(VecSetFromOptions(x));
163   PetscCall(VecSetSizes(x, size, PETSC_DECIDE));
164 
165   /* set MPI vec x to [1, 2, .., size^2] */
166   PetscCall(VecGetOwnershipRange(x, &low, &high));
167   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i + 1.0, INSERT_VALUES));
168   PetscCall(VecAssemblyBegin(x));
169   PetscCall(VecAssemblyEnd(x));
170 
171   /* Create the alltoall SF */
172   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nTesting PetscSFFetchAndOp on a PETSCSF_PATTERN_ALLTOALL graph with op = %s\n", mpiopname));
173   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &alltoallsf));
174   PetscCall(PetscSFSetGraphWithPattern(alltoallsf, NULL /*insignificant*/, PETSCSF_PATTERN_ALLTOALL));
175 
176   /* Create the leaf vector y (seq vector) and its duplicate y2 working as leafupdate */
177   PetscCall(PetscSFGetGraph(alltoallsf, NULL, &nleaves, NULL, NULL));
178   PetscCall(VecCreateSeq(PETSC_COMM_SELF, nleaves, &y));
179   PetscCall(VecDuplicate(y, &y2));
180 
181   PetscCall(VecGetArray(x, &rootdata));
182   PetscCall(VecGetArray(y, &leafdata));
183   PetscCall(VecGetArray(y2, &leafupdate));
184 
185   /* Bcast x to y, to initialize y = 1+rank+size*i, with i=0..size-1 */
186   PetscCall(PetscSFBcastBegin(alltoallsf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
187   PetscCall(PetscSFBcastEnd(alltoallsf, MPIU_SCALAR, rootdata, leafdata, MPI_REPLACE));
188 
189   /* FetchAndOp x to y */
190   PetscCall(PetscSFFetchAndOpBegin(alltoallsf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
191   PetscCall(PetscSFFetchAndOpEnd(alltoallsf, MPIU_SCALAR, rootdata, leafdata, leafupdate, op));
192 
193   /* View roots (x) and leafupdate (y2). Since this is an alltoall graph, each root has only one leaf.
194      So, leafupdate = rootdata = 1+rank+size*i, i=0..size-1; and rootdata += leafdata, i.e., rootdata = [2,2*N]
195    */
196   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, nleaves, PETSC_DECIDE, leafupdate, &gy2));
197   PetscCall(PetscObjectSetName((PetscObject)x, "rootdata"));
198   PetscCall(PetscObjectSetName((PetscObject)gy2, "leafupdate"));
199 
200   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
201   PetscCall(VecView(gy2, PETSC_VIEWER_STDOUT_WORLD));
202   PetscCall(VecDestroy(&gy2));
203 
204   PetscCall(VecRestoreArray(y2, &leafupdate));
205   PetscCall(VecDestroy(&y2));
206 
207   PetscCall(VecRestoreArray(y, &leafdata));
208   PetscCall(VecDestroy(&y));
209 
210   PetscCall(VecRestoreArray(x, &rootdata));
211   PetscCall(VecDestroy(&x));
212 
213   PetscCall(PetscSFDestroy(&alltoallsf));
214 
215   PetscCall(PetscFinalize());
216   return 0;
217 }
218 
219 /*TEST
220 
221    test:
222       # N=10 is divisible by nsize, to trigger Allgather/Gather in SF
223       #MPI_Sendrecv_replace is broken with 20210400300
224       requires: !defined(PETSC_HAVE_I_MPI)
225       nsize: 2
226       args: -op replace
227 
228    test:
229       suffix: 2
230       nsize: 2
231       args: -op sum
232 
233    # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF
234    test:
235       #MPI_Sendrecv_replace is broken with 20210400300
236       requires: !defined(PETSC_HAVE_I_MPI)
237       suffix: 3
238       nsize: 3
239       args: -op replace
240 
241    test:
242       suffix: 4
243       nsize: 3
244       args: -op sum
245 
246 TEST*/
247