xref: /petsc/src/vec/is/sf/tutorials/ex1.c (revision 34c645fd3b0199e05bec2fcc32d3597bfeb7f4f2)
1 static const char help[] = "Test star forest communication (PetscSF)\n\n";
2 
3 /*
4     Description: A star is a simple tree with one root and zero or more leaves.
5     A star forest is a union of disjoint stars.
6     Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
7     This example creates a star forest, communicates values using the graph (see options for types of communication), views the graph, then destroys it.
8 */
9 
10 /*
11   Include petscsf.h so we can use PetscSF objects. Note that this automatically
12   includes petscsys.h.
13 */
14 #include <petscsf.h>
15 #include <petscviewer.h>
16 
17 /* like PetscSFView() but with alternative array of local indices */
18 static PetscErrorCode PetscSFViewCustomLocals_Private(PetscSF sf, const PetscInt locals[], PetscViewer viewer)
19 {
20   const PetscSFNode *iremote;
21   PetscInt           i, nroots, nleaves, nranks;
22   PetscMPIInt        rank;
23 
24   PetscFunctionBeginUser;
25   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)sf), &rank));
26   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
27   PetscCall(PetscSFGetRootRanks(sf, &nranks, NULL, NULL, NULL, NULL));
28   PetscCall(PetscViewerASCIIPushTab(viewer));
29   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
30   PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Number of roots=%" PetscInt_FMT ", leaves=%" PetscInt_FMT ", remote ranks=%" PetscInt_FMT "\n", rank, nroots, nleaves, nranks));
31   for (i = 0; i < nleaves; i++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " <- (%" PetscInt_FMT ",%" PetscInt_FMT ")\n", rank, locals[i], iremote[i].rank, iremote[i].index));
32   PetscCall(PetscViewerFlush(viewer));
33   PetscCall(PetscViewerASCIIPopTab(viewer));
34   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
35   PetscFunctionReturn(PETSC_SUCCESS);
36 }
37 
38 int main(int argc, char **argv)
39 {
40   PetscInt     i, bs = 2, nroots, nrootsalloc, nleaves, nleavesalloc, *mine, stride;
41   PetscSFNode *remote;
42   PetscMPIInt  rank, size;
43   PetscSF      sf, vsf;
44   PetscBool    test_all, test_bcast, test_bcastop, test_reduce, test_degree, test_fetchandop, test_gather, test_scatter, test_embed, test_invert, test_sf_distribute, test_char, test_vector = PETSC_FALSE;
45   MPI_Op       mop = MPI_OP_NULL; /* initialize to prevent compiler warnings with cxx_quad build */
46   char         opstring[256];
47   PetscBool    strflg;
48 
49   PetscFunctionBeginUser;
50   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
51   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
52   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
53 
54   PetscOptionsBegin(PETSC_COMM_WORLD, "", "PetscSF Test Options", "none");
55   test_all = PETSC_FALSE;
56   PetscCall(PetscOptionsBool("-test_all", "Test all SF communications", "", test_all, &test_all, NULL));
57   test_bcast = test_all;
58   PetscCall(PetscOptionsBool("-test_bcast", "Test broadcast", "", test_bcast, &test_bcast, NULL));
59   test_bcastop = test_all;
60   PetscCall(PetscOptionsBool("-test_bcastop", "Test broadcast and reduce", "", test_bcastop, &test_bcastop, NULL));
61   test_reduce = test_all;
62   PetscCall(PetscOptionsBool("-test_reduce", "Test reduction", "", test_reduce, &test_reduce, NULL));
63   test_char = test_all;
64   PetscCall(PetscOptionsBool("-test_char", "Test signed char, unsigned char, and char", "", test_char, &test_char, NULL));
65   mop = MPI_SUM;
66   PetscCall(PetscStrncpy(opstring, "sum", sizeof(opstring)));
67   PetscCall(PetscOptionsString("-test_op", "Designate which MPI_Op to use", "", opstring, opstring, sizeof(opstring), NULL));
68   PetscCall(PetscStrcmp("sum", opstring, &strflg));
69   if (strflg) mop = MPIU_SUM;
70   PetscCall(PetscStrcmp("prod", opstring, &strflg));
71   if (strflg) mop = MPI_PROD;
72   PetscCall(PetscStrcmp("max", opstring, &strflg));
73   if (strflg) mop = MPI_MAX;
74   PetscCall(PetscStrcmp("min", opstring, &strflg));
75   if (strflg) mop = MPI_MIN;
76   PetscCall(PetscStrcmp("land", opstring, &strflg));
77   if (strflg) mop = MPI_LAND;
78   PetscCall(PetscStrcmp("band", opstring, &strflg));
79   if (strflg) mop = MPI_BAND;
80   PetscCall(PetscStrcmp("lor", opstring, &strflg));
81   if (strflg) mop = MPI_LOR;
82   PetscCall(PetscStrcmp("bor", opstring, &strflg));
83   if (strflg) mop = MPI_BOR;
84   PetscCall(PetscStrcmp("lxor", opstring, &strflg));
85   if (strflg) mop = MPI_LXOR;
86   PetscCall(PetscStrcmp("bxor", opstring, &strflg));
87   if (strflg) mop = MPI_BXOR;
88   test_degree = test_all;
89   PetscCall(PetscOptionsBool("-test_degree", "Test computation of vertex degree", "", test_degree, &test_degree, NULL));
90   test_fetchandop = test_all;
91   PetscCall(PetscOptionsBool("-test_fetchandop", "Test atomic Fetch-And-Op", "", test_fetchandop, &test_fetchandop, NULL));
92   test_gather = test_all;
93   PetscCall(PetscOptionsBool("-test_gather", "Test point gather", "", test_gather, &test_gather, NULL));
94   test_scatter = test_all;
95   PetscCall(PetscOptionsBool("-test_scatter", "Test point scatter", "", test_scatter, &test_scatter, NULL));
96   test_embed = test_all;
97   PetscCall(PetscOptionsBool("-test_embed", "Test point embed", "", test_embed, &test_embed, NULL));
98   test_invert = test_all;
99   PetscCall(PetscOptionsBool("-test_invert", "Test point invert", "", test_invert, &test_invert, NULL));
100   stride = 1;
101   PetscCall(PetscOptionsInt("-stride", "Stride for leaf and root data", "", stride, &stride, NULL));
102   test_sf_distribute = PETSC_FALSE;
103   PetscCall(PetscOptionsBool("-test_sf_distribute", "Create an SF that 'distributes' to each process, like an alltoall", "", test_sf_distribute, &test_sf_distribute, NULL));
104   PetscCall(PetscOptionsString("-test_op", "Designate which MPI_Op to use", "", opstring, opstring, sizeof(opstring), NULL));
105   PetscCall(PetscOptionsInt("-bs", "Block size for vectorial SF", "", bs, &bs, NULL));
106   PetscCall(PetscOptionsBool("-test_vector", "Run tests using the vectorial SF", "", test_vector, &test_vector, NULL));
107   PetscOptionsEnd();
108 
109   if (test_sf_distribute) {
110     nroots       = size;
111     nrootsalloc  = size;
112     nleaves      = size;
113     nleavesalloc = size;
114     mine         = NULL;
115     PetscCall(PetscMalloc1(nleaves, &remote));
116     for (i = 0; i < size; i++) {
117       remote[i].rank  = i;
118       remote[i].index = rank;
119     }
120   } else {
121     nroots       = 2 + (PetscInt)(rank == 0);
122     nrootsalloc  = nroots * stride;
123     nleaves      = 2 + (PetscInt)(rank > 0);
124     nleavesalloc = nleaves * stride;
125     mine         = NULL;
126     if (stride > 1) {
127       PetscInt i;
128 
129       PetscCall(PetscMalloc1(nleaves, &mine));
130       for (i = 0; i < nleaves; i++) mine[i] = stride * i;
131     }
132     PetscCall(PetscMalloc1(nleaves, &remote));
133     /* Left periodic neighbor */
134     remote[0].rank  = (rank + size - 1) % size;
135     remote[0].index = 1 * stride;
136     /* Right periodic neighbor */
137     remote[1].rank  = (rank + 1) % size;
138     remote[1].index = 0 * stride;
139     if (rank > 0) { /* All processes reference rank 0, index 1 */
140       remote[2].rank  = 0;
141       remote[2].index = 2 * stride;
142     }
143   }
144 
145   /* Create a star forest for communication */
146   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sf));
147   PetscCall(PetscSFSetFromOptions(sf));
148   PetscCall(PetscSFSetGraph(sf, nrootsalloc, nleaves, mine, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER));
149   PetscCall(PetscSFSetUp(sf));
150 
151   /* We can also obtain a strided SF to communicate interleaved blocks of data */
152   PetscCall(PetscSFCreateStridedSF(sf, bs, PETSC_DECIDE, PETSC_DECIDE, &vsf));
153   if (test_vector) { /* perform all tests below using the vectorial SF */
154     PetscSF t = sf;
155 
156     sf  = vsf;
157     vsf = t;
158 
159     nroots *= bs;
160     nleaves *= bs;
161     nrootsalloc *= bs;
162     nleavesalloc *= bs;
163   }
164   PetscCall(PetscSFDestroy(&vsf));
165 
166   /* View graph, mostly useful for debugging purposes. */
167   PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_INFO_DETAIL));
168   PetscCall(PetscSFView(sf, PETSC_VIEWER_STDOUT_WORLD));
169   PetscCall(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD));
170 
171   if (test_bcast) { /* broadcast rootdata into leafdata */
172     PetscInt *rootdata, *leafdata;
173     /* Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
174      * user-defined structures, could also be used. */
175     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
176     /* Set rootdata buffer to be broadcast */
177     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
178     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
179     /* Initialize local buffer, these values are never used. */
180     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
181     /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
182     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, rootdata, leafdata, MPI_REPLACE));
183     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, rootdata, leafdata, MPI_REPLACE));
184     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Rootdata\n"));
185     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
186     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Leafdata\n"));
187     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
188     PetscCall(PetscFree2(rootdata, leafdata));
189   }
190 
191   if (test_bcast && test_char) { /* Bcast with char */
192     PetscInt len;
193     char     buf[256];
194     char    *rootdata, *leafdata;
195     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
196     /* Set rootdata buffer to be broadcast */
197     for (i = 0; i < nrootsalloc; i++) rootdata[i] = '*';
198     for (i = 0; i < nroots; i++) rootdata[i * stride] = 'A' + rank * 3 + i; /* rank is very small, so it is fine to compute a char */
199     /* Initialize local buffer, these values are never used. */
200     for (i = 0; i < nleavesalloc; i++) leafdata[i] = '?';
201 
202     PetscCall(PetscSFBcastBegin(sf, MPI_CHAR, rootdata, leafdata, MPI_REPLACE));
203     PetscCall(PetscSFBcastEnd(sf, MPI_CHAR, rootdata, leafdata, MPI_REPLACE));
204 
205     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Rootdata in type of char\n"));
206     len = 0;
207     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
208     len += 5;
209     for (i = 0; i < nrootsalloc; i++) {
210       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5c", rootdata[i]));
211       len += 5;
212     }
213     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
214     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
215 
216     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Leafdata in type of char\n"));
217     len = 0;
218     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
219     len += 5;
220     for (i = 0; i < nleavesalloc; i++) {
221       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5c", leafdata[i]));
222       len += 5;
223     }
224     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
225     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
226 
227     PetscCall(PetscFree2(rootdata, leafdata));
228   }
229 
230   if (test_bcastop) { /* Reduce rootdata into leafdata */
231     PetscInt *rootdata, *leafdata;
232     /* Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
233      * user-defined structures, could also be used. */
234     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
235     /* Set rootdata buffer to be broadcast */
236     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
237     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
238     /* Set leaf values to reduce with */
239     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -10 * (rank + 1) - i;
240     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-BcastAndOp Leafdata\n"));
241     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
242     /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
243     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, rootdata, leafdata, mop));
244     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, rootdata, leafdata, mop));
245     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## BcastAndOp Rootdata\n"));
246     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
247     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## BcastAndOp Leafdata\n"));
248     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
249     PetscCall(PetscFree2(rootdata, leafdata));
250   }
251 
252   if (test_reduce) { /* Reduce leafdata into rootdata */
253     PetscInt *rootdata, *leafdata;
254     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
255     /* Initialize rootdata buffer in which the result of the reduction will appear. */
256     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
257     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
258     /* Set leaf values to reduce. */
259     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
260     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 1000 * (rank + 1) + 10 * i;
261     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata\n"));
262     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
263     /* Perform reduction. Computation or other communication can be performed between the begin and end calls.
264      * This example sums the values, but other MPI_Ops can be used (e.g MPI_MAX, MPI_PROD). */
265     PetscCall(PetscSFReduceBegin(sf, MPIU_INT, leafdata, rootdata, mop));
266     PetscCall(PetscSFReduceEnd(sf, MPIU_INT, leafdata, rootdata, mop));
267     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata\n"));
268     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
269     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata\n"));
270     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
271     PetscCall(PetscFree2(rootdata, leafdata));
272   }
273 
274   if (test_reduce && test_char) { /* Reduce with signed char */
275     PetscInt     len;
276     char         buf[256];
277     signed char *rootdata, *leafdata;
278     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
279     /* Initialize rootdata buffer in which the result of the reduction will appear. */
280     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
281     for (i = 0; i < nroots; i++) rootdata[i * stride] = 10 * (rank + 1) + i;
282     /* Set leaf values to reduce. */
283     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
284     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 50 * (rank + 1) + 10 * i;
285     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata in type of signed char\n"));
286 
287     len = 0;
288     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
289     len += 5;
290     for (i = 0; i < nrootsalloc; i++) {
291       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5d", rootdata[i]));
292       len += 5;
293     }
294     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
295     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
296 
297     /* Using MPI_CHAR should trigger an error since MPI standard does not support reduction on MPI_CHAR.
298        Testing with -test_op max, one can see the sign does take effect in MPI_MAX.
299      */
300     PetscCall(PetscSFReduceBegin(sf, MPI_SIGNED_CHAR, leafdata, rootdata, mop));
301     PetscCall(PetscSFReduceEnd(sf, MPI_SIGNED_CHAR, leafdata, rootdata, mop));
302 
303     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata in type of signed char\n"));
304     len = 0;
305     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
306     len += 5;
307     for (i = 0; i < nleavesalloc; i++) {
308       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5d", leafdata[i]));
309       len += 5;
310     }
311     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
312     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
313 
314     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata in type of signed char\n"));
315     len = 0;
316     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
317     len += 5;
318     for (i = 0; i < nrootsalloc; i++) {
319       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5d", rootdata[i]));
320       len += 5;
321     }
322     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
323     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
324 
325     PetscCall(PetscFree2(rootdata, leafdata));
326   }
327 
328   if (test_reduce && test_char) { /* Reduce with unsigned char */
329     PetscInt       len;
330     char           buf[256];
331     unsigned char *rootdata, *leafdata;
332     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
333     /* Initialize rootdata buffer in which the result of the reduction will appear. */
334     for (i = 0; i < nrootsalloc; i++) rootdata[i] = 0;
335     for (i = 0; i < nroots; i++) rootdata[i * stride] = 10 * (rank + 1) + i;
336     /* Set leaf values to reduce. */
337     for (i = 0; i < nleavesalloc; i++) leafdata[i] = 0;
338     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 50 * (rank + 1) + 10 * i;
339     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata in type of unsigned char\n"));
340 
341     len = 0;
342     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
343     len += 5;
344     for (i = 0; i < nrootsalloc; i++) {
345       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5u", rootdata[i]));
346       len += 5;
347     }
348     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
349     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
350 
351     /* Using MPI_CHAR should trigger an error since MPI standard does not support reduction on MPI_CHAR.
352        Testing with -test_op max, one can see the sign does take effect in MPI_MAX.
353      */
354     PetscCall(PetscSFReduceBegin(sf, MPI_UNSIGNED_CHAR, leafdata, rootdata, mop));
355     PetscCall(PetscSFReduceEnd(sf, MPI_UNSIGNED_CHAR, leafdata, rootdata, mop));
356 
357     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata in type of unsigned char\n"));
358     len = 0;
359     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
360     len += 5;
361     for (i = 0; i < nleavesalloc; i++) {
362       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5u", leafdata[i]));
363       len += 5;
364     }
365     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
366     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
367 
368     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata in type of unsigned char\n"));
369     len = 0;
370     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
371     len += 5;
372     for (i = 0; i < nrootsalloc; i++) {
373       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5u", rootdata[i]));
374       len += 5;
375     }
376     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
377     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
378 
379     PetscCall(PetscFree2(rootdata, leafdata));
380   }
381 
382   if (test_degree) {
383     const PetscInt *degree;
384     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
385     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
386     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Root degrees\n"));
387     PetscCall(PetscIntView(nrootsalloc, degree, PETSC_VIEWER_STDOUT_WORLD));
388   }
389 
390   if (test_fetchandop) {
391     /* Cannot use text compare here because token ordering is not deterministic */
392     PetscInt *leafdata, *leafupdate, *rootdata;
393     PetscCall(PetscMalloc3(nleavesalloc, &leafdata, nleavesalloc, &leafupdate, nrootsalloc, &rootdata));
394     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
395     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 1;
396     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
397     for (i = 0; i < nroots; i++) rootdata[i * stride] = 0;
398     PetscCall(PetscSFFetchAndOpBegin(sf, MPIU_INT, rootdata, leafdata, leafupdate, mop));
399     PetscCall(PetscSFFetchAndOpEnd(sf, MPIU_INT, rootdata, leafdata, leafupdate, mop));
400     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Rootdata (sum of 1 from each leaf)\n"));
401     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
402     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Leafupdate (value at roots prior to my atomic update)\n"));
403     PetscCall(PetscIntView(nleavesalloc, leafupdate, PETSC_VIEWER_STDOUT_WORLD));
404     PetscCall(PetscFree3(leafdata, leafupdate, rootdata));
405   }
406 
407   if (test_gather) {
408     const PetscInt *degree;
409     PetscInt        inedges, *indata, *outdata;
410     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
411     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
412     for (i = 0, inedges = 0; i < nrootsalloc; i++) inedges += degree[i];
413     PetscCall(PetscMalloc2(inedges, &indata, nleavesalloc, &outdata));
414     for (i = 0; i < nleavesalloc; i++) outdata[i] = -1;
415     for (i = 0; i < nleaves; i++) outdata[i * stride] = 1000 * (rank + 1) + i;
416     PetscCall(PetscSFGatherBegin(sf, MPIU_INT, outdata, indata));
417     PetscCall(PetscSFGatherEnd(sf, MPIU_INT, outdata, indata));
418     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Gathered data at multi-roots from leaves\n"));
419     PetscCall(PetscIntView(inedges, indata, PETSC_VIEWER_STDOUT_WORLD));
420     PetscCall(PetscFree2(indata, outdata));
421   }
422 
423   if (test_scatter) {
424     const PetscInt *degree;
425     PetscInt        j, count, inedges, *indata, *outdata;
426     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
427     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
428     for (i = 0, inedges = 0; i < nrootsalloc; i++) inedges += degree[i];
429     PetscCall(PetscMalloc2(inedges, &indata, nleavesalloc, &outdata));
430     for (i = 0; i < nleavesalloc; i++) outdata[i] = -1;
431     for (i = 0, count = 0; i < nrootsalloc; i++) {
432       for (j = 0; j < degree[i]; j++) indata[count++] = 1000 * (rank + 1) + 100 * i + j;
433     }
434     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Data at multi-roots, to scatter to leaves\n"));
435     PetscCall(PetscIntView(inedges, indata, PETSC_VIEWER_STDOUT_WORLD));
436 
437     PetscCall(PetscSFScatterBegin(sf, MPIU_INT, indata, outdata));
438     PetscCall(PetscSFScatterEnd(sf, MPIU_INT, indata, outdata));
439     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Scattered data at leaves\n"));
440     PetscCall(PetscIntView(nleavesalloc, outdata, PETSC_VIEWER_STDOUT_WORLD));
441     PetscCall(PetscFree2(indata, outdata));
442   }
443 
444   if (test_embed) {
445     const PetscInt nroots = 1 + (PetscInt)(rank == 0);
446     PetscInt       selected[2];
447     PetscSF        esf;
448 
449     selected[0] = stride;
450     selected[1] = 2 * stride;
451     PetscCall(PetscSFCreateEmbeddedRootSF(sf, nroots, selected, &esf));
452     PetscCall(PetscSFSetUp(esf));
453     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Embedded PetscSF\n"));
454     PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_INFO_DETAIL));
455     PetscCall(PetscSFView(esf, PETSC_VIEWER_STDOUT_WORLD));
456     PetscCall(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD));
457     PetscCall(PetscSFDestroy(&esf));
458   }
459 
460   if (test_invert) {
461     const PetscInt *degree;
462     PetscInt       *mRootsOrigNumbering;
463     PetscInt        inedges;
464     PetscSF         msf, imsf;
465 
466     PetscCall(PetscSFGetMultiSF(sf, &msf));
467     PetscCall(PetscSFCreateInverseSF(msf, &imsf));
468     PetscCall(PetscSFSetUp(msf));
469     PetscCall(PetscSFSetUp(imsf));
470     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Multi-SF\n"));
471     PetscCall(PetscSFView(msf, PETSC_VIEWER_STDOUT_WORLD));
472     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Multi-SF roots indices in original SF roots numbering\n"));
473     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
474     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
475     PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, degree, &inedges, &mRootsOrigNumbering));
476     PetscCall(PetscIntView(inedges, mRootsOrigNumbering, PETSC_VIEWER_STDOUT_WORLD));
477     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Inverse of Multi-SF\n"));
478     PetscCall(PetscSFView(imsf, PETSC_VIEWER_STDOUT_WORLD));
479     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Inverse of Multi-SF, original numbering\n"));
480     PetscCall(PetscSFViewCustomLocals_Private(imsf, mRootsOrigNumbering, PETSC_VIEWER_STDOUT_WORLD));
481     PetscCall(PetscSFDestroy(&imsf));
482     PetscCall(PetscFree(mRootsOrigNumbering));
483   }
484 
485   /* Clean storage for star forest. */
486   PetscCall(PetscSFDestroy(&sf));
487   PetscCall(PetscFinalize());
488   return 0;
489 }
490 
491 /*TEST
492 
493    test:
494       nsize: 4
495       filter: grep -v "type" | grep -v "sort"
496       args: -test_bcast -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
497       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
498 
499    test:
500       suffix: 2
501       nsize: 4
502       filter: grep -v "type" | grep -v "sort"
503       args: -test_reduce -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
504       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
505 
506    test:
507       suffix: 2_basic
508       nsize: 4
509       args: -test_reduce -sf_type basic
510 
511    test:
512       suffix: 3
513       nsize: 4
514       filter: grep -v "type" | grep -v "sort"
515       args: -test_degree -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
516       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
517 
518    test:
519       suffix: 3_basic
520       nsize: 4
521       args: -test_degree -sf_type basic
522 
523    test:
524       suffix: 4
525       nsize: 4
526       filter: grep -v "type" | grep -v "sort"
527       args: -test_gather -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
528       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
529 
530    test:
531       suffix: 4_basic
532       nsize: 4
533       args: -test_gather -sf_type basic
534 
535    test:
536       suffix: 4_stride
537       nsize: 4
538       args: -test_gather -sf_type basic -stride 2
539 
540    test:
541       suffix: 5
542       nsize: 4
543       filter: grep -v "type" | grep -v "sort"
544       args: -test_scatter -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
545       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
546 
547    test:
548       suffix: 5_basic
549       nsize: 4
550       args: -test_scatter -sf_type basic
551 
552    test:
553       suffix: 5_stride
554       nsize: 4
555       args: -test_scatter -sf_type basic -stride 2
556 
557    test:
558       suffix: 6
559       nsize: 4
560       filter: grep -v "type" | grep -v "sort"
561       # No -sf_window_flavor dynamic due to bug https://gitlab.com/petsc/petsc/issues/555
562       args: -test_embed -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate}}
563       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
564 
565    test:
566       suffix: 6_basic
567       nsize: 4
568       args: -test_embed -sf_type basic
569 
570    test:
571       suffix: 7
572       nsize: 4
573       filter: grep -v "type" | grep -v "sort"
574       args: -test_invert -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
575       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
576 
577    test:
578       suffix: 7_basic
579       nsize: 4
580       args: -test_invert -sf_type basic
581 
582    test:
583       suffix: basic
584       nsize: 4
585       args: -test_bcast -sf_type basic
586       output_file: output/ex1_1_basic.out
587 
588    test:
589       suffix: bcastop_basic
590       nsize: 4
591       args: -test_bcastop -sf_type basic
592       output_file: output/ex1_bcastop_basic.out
593 
594    test:
595       suffix: 8
596       nsize: 3
597       filter: grep -v "type" | grep -v "sort"
598       args: -test_bcast -test_sf_distribute -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
599       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
600 
601    test:
602       suffix: 8_basic
603       nsize: 3
604       args: -test_bcast -test_sf_distribute -sf_type basic
605 
606    test:
607       suffix: 9_char
608       nsize: 4
609       args: -sf_type basic -test_bcast -test_reduce -test_op max -test_char
610 
611    # Here we do not test -sf_window_flavor dynamic since it is designed for repeated SFs with few different rootdata pointers
612    test:
613       suffix: 10
614       filter: grep -v "type" | grep -v "sort"
615       nsize: 4
616       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate}} -test_all -test_bcastop 0 -test_fetchandop 0
617       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
618 
619    # The nightly test suite with MPICH uses ch3:sock, which is broken when winsize == 0 in some of the processes
620    test:
621       suffix: 10_shared
622       output_file: output/ex1_10.out
623       filter: grep -v "type" | grep -v "sort"
624       nsize: 4
625       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared -test_all -test_bcastop 0 -test_fetchandop 0
626       requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
627 
628    test:
629       suffix: 10_basic
630       nsize: 4
631       args: -sf_type basic -test_all -test_bcastop 0 -test_fetchandop 0
632 
633    test:
634       suffix: 10_basic_vector
635       nsize: 4
636       args: -sf_type basic -test_all -test_bcastop 0 -test_fetchandop 0 -test_vector
637 
638 TEST*/
639