xref: /petsc/src/vec/is/sf/tutorials/ex1.c (revision 0baf8eba40dbc839082666f9f7396a225d6f663c)
1 static const char help[] = "Test star forest communication (PetscSF)\n\n";
2 
3 /*
4     Description: A star is a simple tree with one root and zero or more leaves.
5     A star forest is a union of disjoint stars.
6     Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
7     This example creates a star forest, communicates values using the graph (see options for types of communication), views the graph, then destroys it.
8 */
9 
10 /*
11   Include petscsf.h so we can use PetscSF objects. Note that this automatically
12   includes petscsys.h.
13 */
14 #include <petscsf.h>
15 #include <petscviewer.h>
16 
17 /* like PetscSFView() but with alternative array of local indices */
18 static PetscErrorCode PetscSFViewCustomLocals_Private(PetscSF sf, const PetscInt locals[], PetscViewer viewer)
19 {
20   const PetscSFNode *iremote;
21   PetscInt           i, nroots, nleaves;
22   PetscMPIInt        rank, nranks;
23 
24   PetscFunctionBeginUser;
25   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)sf), &rank));
26   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
27   PetscCall(PetscSFGetRootRanks(sf, &nranks, NULL, NULL, NULL, NULL));
28   PetscCall(PetscViewerASCIIPushTab(viewer));
29   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
30   PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Number of roots=%" PetscInt_FMT ", leaves=%" PetscInt_FMT ", remote ranks=%d\n", rank, nroots, nleaves, nranks));
31   for (i = 0; i < nleaves; i++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " <- (%d,%" PetscInt_FMT ")\n", rank, locals[i], (PetscMPIInt)iremote[i].rank, iremote[i].index));
32   PetscCall(PetscViewerFlush(viewer));
33   PetscCall(PetscViewerASCIIPopTab(viewer));
34   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
35   PetscFunctionReturn(PETSC_SUCCESS);
36 }
37 
38 int main(int argc, char **argv)
39 {
40   PetscInt     i, bs = 2, nroots, nrootsalloc, nleaves, nleavesalloc, *mine, stride;
41   PetscSFNode *remote;
42   PetscMPIInt  rank, size;
43   PetscSF      sf, vsf;
44   PetscBool    test_all, test_bcast, test_bcastop, test_reduce, test_degree, test_fetchandop, test_gather, test_scatter, test_embed, test_invert, test_sf_distribute, test_char, test_vector = PETSC_FALSE;
45   MPI_Op       mop = MPI_OP_NULL; /* initialize to prevent compiler warnings with cxx_quad build */
46   char         opstring[256];
47   PetscBool    strflg;
48 
49   PetscFunctionBeginUser;
50   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
51   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
52   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
53 
54   PetscOptionsBegin(PETSC_COMM_WORLD, "", "PetscSF Test Options", "none");
55   test_all = PETSC_FALSE;
56   PetscCall(PetscOptionsBool("-test_all", "Test all SF communications", "", test_all, &test_all, NULL));
57   test_bcast = test_all;
58   PetscCall(PetscOptionsBool("-test_bcast", "Test broadcast", "", test_bcast, &test_bcast, NULL));
59   test_bcastop = test_all;
60   PetscCall(PetscOptionsBool("-test_bcastop", "Test broadcast and reduce", "", test_bcastop, &test_bcastop, NULL));
61   test_reduce = test_all;
62   PetscCall(PetscOptionsBool("-test_reduce", "Test reduction", "", test_reduce, &test_reduce, NULL));
63   test_char = test_all;
64   PetscCall(PetscOptionsBool("-test_char", "Test signed char, unsigned char, and char", "", test_char, &test_char, NULL));
65   mop = MPI_SUM;
66   PetscCall(PetscStrncpy(opstring, "sum", sizeof(opstring)));
67   PetscCall(PetscOptionsString("-test_op", "Designate which MPI_Op to use", "", opstring, opstring, sizeof(opstring), NULL));
68   PetscCall(PetscStrcmp("sum", opstring, &strflg));
69   if (strflg) mop = MPIU_SUM;
70   PetscCall(PetscStrcmp("prod", opstring, &strflg));
71   if (strflg) mop = MPI_PROD;
72   PetscCall(PetscStrcmp("max", opstring, &strflg));
73   if (strflg) mop = MPI_MAX;
74   PetscCall(PetscStrcmp("min", opstring, &strflg));
75   if (strflg) mop = MPI_MIN;
76   PetscCall(PetscStrcmp("land", opstring, &strflg));
77   if (strflg) mop = MPI_LAND;
78   PetscCall(PetscStrcmp("band", opstring, &strflg));
79   if (strflg) mop = MPI_BAND;
80   PetscCall(PetscStrcmp("lor", opstring, &strflg));
81   if (strflg) mop = MPI_LOR;
82   PetscCall(PetscStrcmp("bor", opstring, &strflg));
83   if (strflg) mop = MPI_BOR;
84   PetscCall(PetscStrcmp("lxor", opstring, &strflg));
85   if (strflg) mop = MPI_LXOR;
86   PetscCall(PetscStrcmp("bxor", opstring, &strflg));
87   if (strflg) mop = MPI_BXOR;
88   test_degree = test_all;
89   PetscCall(PetscOptionsBool("-test_degree", "Test computation of vertex degree", "", test_degree, &test_degree, NULL));
90   test_fetchandop = test_all;
91   PetscCall(PetscOptionsBool("-test_fetchandop", "Test atomic Fetch-And-Op", "", test_fetchandop, &test_fetchandop, NULL));
92   test_gather = test_all;
93   PetscCall(PetscOptionsBool("-test_gather", "Test point gather", "", test_gather, &test_gather, NULL));
94   test_scatter = test_all;
95   PetscCall(PetscOptionsBool("-test_scatter", "Test point scatter", "", test_scatter, &test_scatter, NULL));
96   test_embed = test_all;
97   PetscCall(PetscOptionsBool("-test_embed", "Test point embed", "", test_embed, &test_embed, NULL));
98   test_invert = test_all;
99   PetscCall(PetscOptionsBool("-test_invert", "Test point invert", "", test_invert, &test_invert, NULL));
100   stride = 1;
101   PetscCall(PetscOptionsInt("-stride", "Stride for leaf and root data", "", stride, &stride, NULL));
102   test_sf_distribute = PETSC_FALSE;
103   PetscCall(PetscOptionsBool("-test_sf_distribute", "Create an SF that 'distributes' to each process, like an alltoall", "", test_sf_distribute, &test_sf_distribute, NULL));
104   PetscCall(PetscOptionsString("-test_op", "Designate which MPI_Op to use", "", opstring, opstring, sizeof(opstring), NULL));
105   PetscCall(PetscOptionsInt("-bs", "Block size for vectorial SF", "", bs, &bs, NULL));
106   PetscCall(PetscOptionsBool("-test_vector", "Run tests using the vectorial SF", "", test_vector, &test_vector, NULL));
107   PetscOptionsEnd();
108 
109   if (test_sf_distribute) {
110     nroots       = size;
111     nrootsalloc  = size;
112     nleaves      = size;
113     nleavesalloc = size;
114     mine         = NULL;
115     PetscCall(PetscMalloc1(nleaves, &remote));
116     for (PetscMPIInt ii = 0; ii < size; ii++) {
117       remote[ii].rank  = ii;
118       remote[ii].index = rank;
119     }
120   } else {
121     nroots       = 2 + (PetscInt)(rank == 0);
122     nrootsalloc  = nroots * stride;
123     nleaves      = 2 + (PetscInt)(rank > 0);
124     nleavesalloc = nleaves * stride;
125     mine         = NULL;
126     if (stride > 1) {
127       PetscInt i;
128 
129       PetscCall(PetscMalloc1(nleaves, &mine));
130       for (i = 0; i < nleaves; i++) mine[i] = stride * i;
131     }
132     PetscCall(PetscMalloc1(nleaves, &remote));
133     /* Left periodic neighbor */
134     remote[0].rank  = (rank + size - 1) % size;
135     remote[0].index = 1 * stride;
136     /* Right periodic neighbor */
137     remote[1].rank  = (rank + 1) % size;
138     remote[1].index = 0 * stride;
139     if (rank > 0) { /* All processes reference rank 0, index 1 */
140       remote[2].rank  = 0;
141       remote[2].index = 2 * stride;
142     }
143   }
144 
145   /* Create a star forest for communication */
146   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sf));
147   PetscCall(PetscSFSetFromOptions(sf));
148   PetscCall(PetscSFSetGraph(sf, nrootsalloc, nleaves, mine, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER));
149   PetscCall(PetscSFSetUp(sf));
150 
151   /* We can also obtain a strided SF to communicate interleaved blocks of data */
152   PetscCall(PetscSFCreateStridedSF(sf, bs, PETSC_DECIDE, PETSC_DECIDE, &vsf));
153   if (test_vector) { /* perform all tests below using the vectorial SF */
154     PetscSF t = sf;
155 
156     sf  = vsf;
157     vsf = t;
158 
159     nroots *= bs;
160     nleaves *= bs;
161     nrootsalloc *= bs;
162     nleavesalloc *= bs;
163   }
164   PetscCall(PetscSFDestroy(&vsf));
165 
166   /* View graph, mostly useful for debugging purposes. */
167   PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_INFO_DETAIL));
168   PetscCall(PetscSFView(sf, PETSC_VIEWER_STDOUT_WORLD));
169   PetscCall(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD));
170 
171   if (test_bcast) { /* broadcast rootdata into leafdata */
172     PetscInt *rootdata, *leafdata;
173     /* Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
174      * user-defined structures, could also be used. */
175     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
176     /* Set rootdata buffer to be broadcast */
177     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
178     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
179     /* Initialize local buffer, these values are never used. */
180     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
181     /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
182     // test persistent communication code paths by repeated bcast several times
183     PetscCall(PetscSFRegisterPersistent(sf, MPIU_INT, rootdata, leafdata));
184     for (PetscInt i = 0; i < 10; i++) {
185       PetscCall(PetscSFBcastBegin(sf, MPIU_INT, rootdata, leafdata, MPI_REPLACE));
186       PetscCall(PetscSFBcastEnd(sf, MPIU_INT, rootdata, leafdata, MPI_REPLACE));
187     }
188     PetscCall(PetscSFDeregisterPersistent(sf, MPIU_INT, rootdata, leafdata));
189     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Rootdata\n"));
190     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
191     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Leafdata\n"));
192     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
193     PetscCall(PetscFree2(rootdata, leafdata));
194   }
195 
196   if (test_bcast && test_char) { /* Bcast with char */
197     PetscInt len;
198     char     buf[256];
199     char    *rootdata, *leafdata;
200     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
201     /* Set rootdata buffer to be broadcast */
202     for (i = 0; i < nrootsalloc; i++) rootdata[i] = '*';
203     for (i = 0; i < nroots; i++) rootdata[i * stride] = (char)('A' + rank * 3 + i); /* rank is very small, so it is fine to compute a char */
204     /* Initialize local buffer, these values are never used. */
205     for (i = 0; i < nleavesalloc; i++) leafdata[i] = '?';
206 
207     PetscCall(PetscSFBcastBegin(sf, MPI_CHAR, rootdata, leafdata, MPI_REPLACE));
208     PetscCall(PetscSFBcastEnd(sf, MPI_CHAR, rootdata, leafdata, MPI_REPLACE));
209 
210     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Rootdata in type of char\n"));
211     len = 0;
212     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
213     len += 5;
214     for (i = 0; i < nrootsalloc; i++) {
215       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5c", rootdata[i]));
216       len += 5;
217     }
218     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
219     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
220 
221     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Leafdata in type of char\n"));
222     len = 0;
223     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
224     len += 5;
225     for (i = 0; i < nleavesalloc; i++) {
226       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5c", leafdata[i]));
227       len += 5;
228     }
229     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
230     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
231 
232     PetscCall(PetscFree2(rootdata, leafdata));
233   }
234 
235   if (test_bcastop) { /* Reduce rootdata into leafdata */
236     PetscInt *rootdata, *leafdata;
237     /* Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
238      * user-defined structures, could also be used. */
239     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
240     /* Set rootdata buffer to be broadcast */
241     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
242     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
243     /* Set leaf values to reduce with */
244     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -10 * (rank + 1) - i;
245     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-BcastAndOp Leafdata\n"));
246     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
247     /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
248     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, rootdata, leafdata, mop));
249     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, rootdata, leafdata, mop));
250     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## BcastAndOp Rootdata\n"));
251     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
252     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## BcastAndOp Leafdata\n"));
253     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
254     PetscCall(PetscFree2(rootdata, leafdata));
255   }
256 
257   if (test_reduce) { /* Reduce leafdata into rootdata */
258     PetscInt *rootdata, *leafdata;
259     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
260     /* Initialize rootdata buffer in which the result of the reduction will appear. */
261     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
262     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
263     /* Set leaf values to reduce. */
264     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
265     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 1000 * (rank + 1) + 10 * i;
266     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata\n"));
267     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
268     /* Perform reduction. Computation or other communication can be performed between the begin and end calls.
269      * This example sums the values, but other MPI_Ops can be used (e.g MPI_MAX, MPI_PROD). */
270     PetscCall(PetscSFReduceBegin(sf, MPIU_INT, leafdata, rootdata, mop));
271     PetscCall(PetscSFReduceEnd(sf, MPIU_INT, leafdata, rootdata, mop));
272     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata\n"));
273     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
274     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata\n"));
275     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
276     PetscCall(PetscFree2(rootdata, leafdata));
277   }
278 
279   if (test_reduce && test_char) { /* Reduce with signed char */
280     PetscInt     len;
281     char         buf[256];
282     signed char *rootdata, *leafdata;
283     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
284     /* Initialize rootdata buffer in which the result of the reduction will appear. */
285     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
286     for (i = 0; i < nroots; i++) rootdata[i * stride] = (signed char)(10 * (rank + 1) + i);
287     /* Set leaf values to reduce. */
288     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
289     for (i = 0; i < nleaves; i++) leafdata[i * stride] = (signed char)(50 * (rank + 1) + 10 * i);
290     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata in type of signed char\n"));
291 
292     len = 0;
293     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
294     len += 5;
295     for (i = 0; i < nrootsalloc; i++) {
296       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5d", rootdata[i]));
297       len += 5;
298     }
299     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
300     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
301 
302     /* Using MPI_CHAR should trigger an error since MPI standard does not support reduction on MPI_CHAR.
303        Testing with -test_op max, one can see the sign does take effect in MPI_MAX.
304      */
305     PetscCall(PetscSFReduceBegin(sf, MPI_SIGNED_CHAR, leafdata, rootdata, mop));
306     PetscCall(PetscSFReduceEnd(sf, MPI_SIGNED_CHAR, leafdata, rootdata, mop));
307 
308     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata in type of signed char\n"));
309     len = 0;
310     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
311     len += 5;
312     for (i = 0; i < nleavesalloc; i++) {
313       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5d", leafdata[i]));
314       len += 5;
315     }
316     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
317     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
318 
319     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata in type of signed char\n"));
320     len = 0;
321     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
322     len += 5;
323     for (i = 0; i < nrootsalloc; i++) {
324       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5d", rootdata[i]));
325       len += 5;
326     }
327     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
328     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
329 
330     PetscCall(PetscFree2(rootdata, leafdata));
331   }
332 
333   if (test_reduce && test_char) { /* Reduce with unsigned char */
334     PetscInt       len;
335     char           buf[256];
336     unsigned char *rootdata, *leafdata;
337     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
338     /* Initialize rootdata buffer in which the result of the reduction will appear. */
339     for (i = 0; i < nrootsalloc; i++) rootdata[i] = 0;
340     for (i = 0; i < nroots; i++) rootdata[i * stride] = (unsigned char)(10 * (rank + 1) + i);
341     /* Set leaf values to reduce. */
342     for (i = 0; i < nleavesalloc; i++) leafdata[i] = 0;
343     for (i = 0; i < nleaves; i++) leafdata[i * stride] = (unsigned char)(50 * (rank + 1) + 10 * i);
344     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata in type of unsigned char\n"));
345 
346     len = 0;
347     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
348     len += 5;
349     for (i = 0; i < nrootsalloc; i++) {
350       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5u", rootdata[i]));
351       len += 5;
352     }
353     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
354     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
355 
356     /* Using MPI_CHAR should trigger an error since MPI standard does not support reduction on MPI_CHAR.
357        Testing with -test_op max, one can see the sign does take effect in MPI_MAX.
358      */
359     PetscCall(PetscSFReduceBegin(sf, MPI_UNSIGNED_CHAR, leafdata, rootdata, mop));
360     PetscCall(PetscSFReduceEnd(sf, MPI_UNSIGNED_CHAR, leafdata, rootdata, mop));
361 
362     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata in type of unsigned char\n"));
363     len = 0;
364     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
365     len += 5;
366     for (i = 0; i < nleavesalloc; i++) {
367       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5u", leafdata[i]));
368       len += 5;
369     }
370     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
371     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
372 
373     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata in type of unsigned char\n"));
374     len = 0;
375     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
376     len += 5;
377     for (i = 0; i < nrootsalloc; i++) {
378       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5u", rootdata[i]));
379       len += 5;
380     }
381     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
382     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
383 
384     PetscCall(PetscFree2(rootdata, leafdata));
385   }
386 
387   if (test_degree) {
388     const PetscInt *degree;
389     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
390     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
391     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Root degrees\n"));
392     PetscCall(PetscIntView(nrootsalloc, degree, PETSC_VIEWER_STDOUT_WORLD));
393   }
394 
395   if (test_fetchandop) {
396     /* Cannot use text compare here because token ordering is not deterministic */
397     PetscInt *leafdata, *leafupdate, *rootdata;
398     PetscCall(PetscMalloc3(nleavesalloc, &leafdata, nleavesalloc, &leafupdate, nrootsalloc, &rootdata));
399     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
400     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 1;
401     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
402     for (i = 0; i < nroots; i++) rootdata[i * stride] = 0;
403     PetscCall(PetscSFFetchAndOpBegin(sf, MPIU_INT, rootdata, leafdata, leafupdate, mop));
404     PetscCall(PetscSFFetchAndOpEnd(sf, MPIU_INT, rootdata, leafdata, leafupdate, mop));
405     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Rootdata (sum of 1 from each leaf)\n"));
406     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
407     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Leafupdate (value at roots prior to my atomic update)\n"));
408     PetscCall(PetscIntView(nleavesalloc, leafupdate, PETSC_VIEWER_STDOUT_WORLD));
409     PetscCall(PetscFree3(leafdata, leafupdate, rootdata));
410   }
411 
412   if (test_gather) {
413     const PetscInt *degree;
414     PetscInt        inedges, *indata, *outdata;
415     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
416     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
417     for (i = 0, inedges = 0; i < nrootsalloc; i++) inedges += degree[i];
418     PetscCall(PetscMalloc2(inedges, &indata, nleavesalloc, &outdata));
419     for (i = 0; i < nleavesalloc; i++) outdata[i] = -1;
420     for (i = 0; i < nleaves; i++) outdata[i * stride] = 1000 * (rank + 1) + i;
421     PetscCall(PetscSFGatherBegin(sf, MPIU_INT, outdata, indata));
422     PetscCall(PetscSFGatherEnd(sf, MPIU_INT, outdata, indata));
423     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Gathered data at multi-roots from leaves\n"));
424     PetscCall(PetscIntView(inedges, indata, PETSC_VIEWER_STDOUT_WORLD));
425     PetscCall(PetscFree2(indata, outdata));
426   }
427 
428   if (test_scatter) {
429     const PetscInt *degree;
430     PetscInt        j, count, inedges, *indata, *outdata;
431     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
432     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
433     for (i = 0, inedges = 0; i < nrootsalloc; i++) inedges += degree[i];
434     PetscCall(PetscMalloc2(inedges, &indata, nleavesalloc, &outdata));
435     for (i = 0; i < nleavesalloc; i++) outdata[i] = -1;
436     for (i = 0, count = 0; i < nrootsalloc; i++) {
437       for (j = 0; j < degree[i]; j++) indata[count++] = 1000 * (rank + 1) + 100 * i + j;
438     }
439     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Data at multi-roots, to scatter to leaves\n"));
440     PetscCall(PetscIntView(inedges, indata, PETSC_VIEWER_STDOUT_WORLD));
441 
442     PetscCall(PetscSFScatterBegin(sf, MPIU_INT, indata, outdata));
443     PetscCall(PetscSFScatterEnd(sf, MPIU_INT, indata, outdata));
444     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Scattered data at leaves\n"));
445     PetscCall(PetscIntView(nleavesalloc, outdata, PETSC_VIEWER_STDOUT_WORLD));
446     PetscCall(PetscFree2(indata, outdata));
447   }
448 
449   if (test_embed) {
450     const PetscInt nroots = 1 + (PetscInt)(rank == 0);
451     PetscInt       selected[2];
452     PetscSF        esf;
453 
454     selected[0] = stride;
455     selected[1] = 2 * stride;
456     PetscCall(PetscSFCreateEmbeddedRootSF(sf, nroots, selected, &esf));
457     PetscCall(PetscSFSetUp(esf));
458     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Embedded PetscSF\n"));
459     PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_INFO_DETAIL));
460     PetscCall(PetscSFView(esf, PETSC_VIEWER_STDOUT_WORLD));
461     PetscCall(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD));
462     PetscCall(PetscSFDestroy(&esf));
463   }
464 
465   if (test_invert) {
466     const PetscInt *degree;
467     PetscInt       *mRootsOrigNumbering;
468     PetscInt        inedges;
469     PetscSF         msf, imsf;
470 
471     PetscCall(PetscSFGetMultiSF(sf, &msf));
472     PetscCall(PetscSFCreateInverseSF(msf, &imsf));
473     PetscCall(PetscSFSetUp(msf));
474     PetscCall(PetscSFSetUp(imsf));
475     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Multi-SF\n"));
476     PetscCall(PetscSFView(msf, PETSC_VIEWER_STDOUT_WORLD));
477     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Multi-SF roots indices in original SF roots numbering\n"));
478     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
479     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
480     PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, degree, &inedges, &mRootsOrigNumbering));
481     PetscCall(PetscIntView(inedges, mRootsOrigNumbering, PETSC_VIEWER_STDOUT_WORLD));
482     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Inverse of Multi-SF\n"));
483     PetscCall(PetscSFView(imsf, PETSC_VIEWER_STDOUT_WORLD));
484     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Inverse of Multi-SF, original numbering\n"));
485     PetscCall(PetscSFViewCustomLocals_Private(imsf, mRootsOrigNumbering, PETSC_VIEWER_STDOUT_WORLD));
486     PetscCall(PetscSFDestroy(&imsf));
487     PetscCall(PetscFree(mRootsOrigNumbering));
488   }
489 
490   /* Clean storage for star forest. */
491   PetscCall(PetscSFDestroy(&sf));
492   PetscCall(PetscFinalize());
493   return 0;
494 }
495 
496 /*TEST
497 
498    test:
499       nsize: 4
500       filter: grep -v "type" | grep -v "sort"
501       args: -test_bcast -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
502       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
503 
504    test:
505       suffix: 2
506       nsize: 4
507       filter: grep -v "type" | grep -v "sort"
508       args: -test_reduce -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
509       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
510 
511    test:
512       suffix: 2_basic
513       nsize: 4
514       args: -test_reduce -sf_type basic
515 
516    test:
517       suffix: 3
518       nsize: 4
519       filter: grep -v "type" | grep -v "sort"
520       args: -test_degree -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
521       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
522 
523    test:
524       suffix: 3_basic
525       nsize: 4
526       args: -test_degree -sf_type basic
527 
528    test:
529       suffix: 4
530       nsize: 4
531       filter: grep -v "type" | grep -v "sort"
532       args: -test_gather -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
533       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
534 
535    test:
536       suffix: 4_basic
537       nsize: 4
538       args: -test_gather -sf_type basic
539 
540    test:
541       suffix: 4_stride
542       nsize: 4
543       args: -test_gather -sf_type basic -stride 2
544 
545    test:
546       suffix: 5
547       nsize: 4
548       filter: grep -v "type" | grep -v "sort"
549       args: -test_scatter -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
550       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
551 
552    test:
553       suffix: 5_basic
554       nsize: 4
555       args: -test_scatter -sf_type basic
556 
557    test:
558       suffix: 5_stride
559       nsize: 4
560       args: -test_scatter -sf_type basic -stride 2
561 
562    test:
563       suffix: 6
564       nsize: 4
565       filter: grep -v "type" | grep -v "sort"
566       # No -sf_window_flavor dynamic due to bug https://gitlab.com/petsc/petsc/issues/555
567       args: -test_embed -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate}}
568       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
569 
570    test:
571       suffix: 6_basic
572       nsize: 4
573       args: -test_embed -sf_type basic
574 
575    test:
576       suffix: 7
577       nsize: 4
578       filter: grep -v "type" | grep -v "sort"
579       args: -test_invert -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
580       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
581 
582    test:
583       suffix: 7_basic
584       nsize: 4
585       args: -test_invert -sf_type basic
586 
587    test:
588       suffix: basic
589       nsize: 4
590       args: -test_bcast -sf_type basic
591       output_file: output/ex1_1_basic.out
592 
593    test:
594       suffix: bcastop_basic
595       nsize: 4
596       args: -test_bcastop -sf_type basic
597       output_file: output/ex1_bcastop_basic.out
598 
599    test:
600       suffix: 8
601       nsize: 3
602       filter: grep -v "type" | grep -v "sort"
603       args: -test_bcast -test_sf_distribute -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
604       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
605 
606    test:
607       suffix: 8_basic
608       nsize: 3
609       args: -test_bcast -test_sf_distribute -sf_type basic
610 
611    test:
612       suffix: 9_char
613       nsize: 4
614       args: -sf_type basic -test_bcast -test_reduce -test_op max -test_char
615 
616    # Here we do not test -sf_window_flavor dynamic since it is designed for repeated SFs with few different rootdata pointers
617    test:
618       suffix: 10
619       filter: grep -v "type" | grep -v "sort"
620       nsize: 4
621       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate}} -test_all -test_bcastop 0 -test_fetchandop 0
622       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
623 
624    # The nightly test suite with MPICH uses ch3:sock, which is broken when winsize == 0 in some of the processes
625    test:
626       suffix: 10_shared
627       output_file: output/ex1_10.out
628       filter: grep -v "type" | grep -v "sort"
629       nsize: 4
630       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared -test_all -test_bcastop 0 -test_fetchandop 0
631       requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH) defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
632 
633    test:
634       suffix: 10_basic
635       nsize: 4
636       args: -sf_type basic -test_all -test_bcastop 0 -test_fetchandop 0
637 
638    test:
639       suffix: 10_basic_vector
640       nsize: 4
641       args: -sf_type basic -test_all -test_bcastop 0 -test_fetchandop 0 -test_vector
642 
643 TEST*/
644