xref: /petsc/src/vec/is/sf/tutorials/ex1.c (revision df4cd43f92eaa320656440c40edb1046daee8f75)
1 static const char help[] = "Test star forest communication (PetscSF)\n\n";
2 
3 /*
4     Description: A star is a simple tree with one root and zero or more leaves.
5     A star forest is a union of disjoint stars.
6     Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
7     This example creates a star forest, communicates values using the graph (see options for types of communication), views the graph, then destroys it.
8 */
9 
10 /*
11   Include petscsf.h so we can use PetscSF objects. Note that this automatically
12   includes petscsys.h.
13 */
14 #include <petscsf.h>
15 #include <petscviewer.h>
16 
17 /* like PetscSFView() but with alternative array of local indices */
18 static PetscErrorCode PetscSFViewCustomLocals_Private(PetscSF sf, const PetscInt locals[], PetscViewer viewer)
19 {
20   const PetscSFNode *iremote;
21   PetscInt           i, nroots, nleaves, nranks;
22   PetscMPIInt        rank;
23 
24   PetscFunctionBeginUser;
25   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)sf), &rank));
26   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
27   PetscCall(PetscSFGetRootRanks(sf, &nranks, NULL, NULL, NULL, NULL));
28   PetscCall(PetscViewerASCIIPushTab(viewer));
29   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
30   PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Number of roots=%" PetscInt_FMT ", leaves=%" PetscInt_FMT ", remote ranks=%" PetscInt_FMT "\n", rank, nroots, nleaves, nranks));
31   for (i = 0; i < nleaves; i++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " <- (%" PetscInt_FMT ",%" PetscInt_FMT ")\n", rank, locals[i], iremote[i].rank, iremote[i].index));
32   PetscCall(PetscViewerFlush(viewer));
33   PetscCall(PetscViewerASCIIPopTab(viewer));
34   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
35   PetscFunctionReturn(PETSC_SUCCESS);
36 }
37 
38 int main(int argc, char **argv)
39 {
40   PetscInt     i, nroots, nrootsalloc, nleaves, nleavesalloc, *mine, stride;
41   PetscSFNode *remote;
42   PetscMPIInt  rank, size;
43   PetscSF      sf;
44   PetscBool    test_all, test_bcast, test_bcastop, test_reduce, test_degree, test_fetchandop, test_gather, test_scatter, test_embed, test_invert, test_sf_distribute, test_char;
45   MPI_Op       mop = MPI_OP_NULL; /* initialize to prevent compiler warnings with cxx_quad build */
46   char         opstring[256];
47   PetscBool    strflg;
48 
49   PetscFunctionBeginUser;
50   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
51   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
52   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
53 
54   PetscOptionsBegin(PETSC_COMM_WORLD, "", "PetscSF Test Options", "none");
55   test_all = PETSC_FALSE;
56   PetscCall(PetscOptionsBool("-test_all", "Test all SF communications", "", test_all, &test_all, NULL));
57   test_bcast = test_all;
58   PetscCall(PetscOptionsBool("-test_bcast", "Test broadcast", "", test_bcast, &test_bcast, NULL));
59   test_bcastop = test_all;
60   PetscCall(PetscOptionsBool("-test_bcastop", "Test broadcast and reduce", "", test_bcastop, &test_bcastop, NULL));
61   test_reduce = test_all;
62   PetscCall(PetscOptionsBool("-test_reduce", "Test reduction", "", test_reduce, &test_reduce, NULL));
63   test_char = test_all;
64   PetscCall(PetscOptionsBool("-test_char", "Test signed char, unsigned char, and char", "", test_char, &test_char, NULL));
65   mop = MPI_SUM;
66   PetscCall(PetscStrcpy(opstring, "sum"));
67   PetscCall(PetscOptionsString("-test_op", "Designate which MPI_Op to use", "", opstring, opstring, sizeof(opstring), NULL));
68   PetscCall(PetscStrcmp("sum", opstring, &strflg));
69   if (strflg) mop = MPIU_SUM;
70   PetscCall(PetscStrcmp("prod", opstring, &strflg));
71   if (strflg) mop = MPI_PROD;
72   PetscCall(PetscStrcmp("max", opstring, &strflg));
73   if (strflg) mop = MPI_MAX;
74   PetscCall(PetscStrcmp("min", opstring, &strflg));
75   if (strflg) mop = MPI_MIN;
76   PetscCall(PetscStrcmp("land", opstring, &strflg));
77   if (strflg) mop = MPI_LAND;
78   PetscCall(PetscStrcmp("band", opstring, &strflg));
79   if (strflg) mop = MPI_BAND;
80   PetscCall(PetscStrcmp("lor", opstring, &strflg));
81   if (strflg) mop = MPI_LOR;
82   PetscCall(PetscStrcmp("bor", opstring, &strflg));
83   if (strflg) mop = MPI_BOR;
84   PetscCall(PetscStrcmp("lxor", opstring, &strflg));
85   if (strflg) mop = MPI_LXOR;
86   PetscCall(PetscStrcmp("bxor", opstring, &strflg));
87   if (strflg) mop = MPI_BXOR;
88   test_degree = test_all;
89   PetscCall(PetscOptionsBool("-test_degree", "Test computation of vertex degree", "", test_degree, &test_degree, NULL));
90   test_fetchandop = test_all;
91   PetscCall(PetscOptionsBool("-test_fetchandop", "Test atomic Fetch-And-Op", "", test_fetchandop, &test_fetchandop, NULL));
92   test_gather = test_all;
93   PetscCall(PetscOptionsBool("-test_gather", "Test point gather", "", test_gather, &test_gather, NULL));
94   test_scatter = test_all;
95   PetscCall(PetscOptionsBool("-test_scatter", "Test point scatter", "", test_scatter, &test_scatter, NULL));
96   test_embed = test_all;
97   PetscCall(PetscOptionsBool("-test_embed", "Test point embed", "", test_embed, &test_embed, NULL));
98   test_invert = test_all;
99   PetscCall(PetscOptionsBool("-test_invert", "Test point invert", "", test_invert, &test_invert, NULL));
100   stride = 1;
101   PetscCall(PetscOptionsInt("-stride", "Stride for leaf and root data", "", stride, &stride, NULL));
102   test_sf_distribute = PETSC_FALSE;
103   PetscCall(PetscOptionsBool("-test_sf_distribute", "Create an SF that 'distributes' to each process, like an alltoall", "", test_sf_distribute, &test_sf_distribute, NULL));
104   PetscCall(PetscOptionsString("-test_op", "Designate which MPI_Op to use", "", opstring, opstring, sizeof(opstring), NULL));
105   PetscOptionsEnd();
106 
107   if (test_sf_distribute) {
108     nroots       = size;
109     nrootsalloc  = size;
110     nleaves      = size;
111     nleavesalloc = size;
112     mine         = NULL;
113     PetscCall(PetscMalloc1(nleaves, &remote));
114     for (i = 0; i < size; i++) {
115       remote[i].rank  = i;
116       remote[i].index = rank;
117     }
118   } else {
119     nroots       = 2 + (PetscInt)(rank == 0);
120     nrootsalloc  = nroots * stride;
121     nleaves      = 2 + (PetscInt)(rank > 0);
122     nleavesalloc = nleaves * stride;
123     mine         = NULL;
124     if (stride > 1) {
125       PetscInt i;
126 
127       PetscCall(PetscMalloc1(nleaves, &mine));
128       for (i = 0; i < nleaves; i++) mine[i] = stride * i;
129     }
130     PetscCall(PetscMalloc1(nleaves, &remote));
131     /* Left periodic neighbor */
132     remote[0].rank  = (rank + size - 1) % size;
133     remote[0].index = 1 * stride;
134     /* Right periodic neighbor */
135     remote[1].rank  = (rank + 1) % size;
136     remote[1].index = 0 * stride;
137     if (rank > 0) { /* All processes reference rank 0, index 1 */
138       remote[2].rank  = 0;
139       remote[2].index = 2 * stride;
140     }
141   }
142 
143   /* Create a star forest for communication. In this example, the leaf space is dense, so we pass NULL. */
144   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sf));
145   PetscCall(PetscSFSetFromOptions(sf));
146   PetscCall(PetscSFSetGraph(sf, nrootsalloc, nleaves, mine, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER));
147   PetscCall(PetscSFSetUp(sf));
148 
149   /* View graph, mostly useful for debugging purposes. */
150   PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_INFO_DETAIL));
151   PetscCall(PetscSFView(sf, PETSC_VIEWER_STDOUT_WORLD));
152   PetscCall(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD));
153 
154   if (test_bcast) { /* broadcast rootdata into leafdata */
155     PetscInt *rootdata, *leafdata;
156     /* Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
157      * user-defined structures, could also be used. */
158     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
159     /* Set rootdata buffer to be broadcast */
160     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
161     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
162     /* Initialize local buffer, these values are never used. */
163     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
164     /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
165     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, rootdata, leafdata, MPI_REPLACE));
166     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, rootdata, leafdata, MPI_REPLACE));
167     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Rootdata\n"));
168     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
169     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Leafdata\n"));
170     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
171     PetscCall(PetscFree2(rootdata, leafdata));
172   }
173 
174   if (test_bcast && test_char) { /* Bcast with char */
175     PetscInt len;
176     char     buf[256];
177     char    *rootdata, *leafdata;
178     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
179     /* Set rootdata buffer to be broadcast */
180     for (i = 0; i < nrootsalloc; i++) rootdata[i] = '*';
181     for (i = 0; i < nroots; i++) rootdata[i * stride] = 'A' + rank * 3 + i; /* rank is very small, so it is fine to compute a char */
182     /* Initialize local buffer, these values are never used. */
183     for (i = 0; i < nleavesalloc; i++) leafdata[i] = '?';
184 
185     PetscCall(PetscSFBcastBegin(sf, MPI_CHAR, rootdata, leafdata, MPI_REPLACE));
186     PetscCall(PetscSFBcastEnd(sf, MPI_CHAR, rootdata, leafdata, MPI_REPLACE));
187 
188     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Rootdata in type of char\n"));
189     len = 0;
190     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
191     len += 5;
192     for (i = 0; i < nrootsalloc; i++) {
193       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5c", rootdata[i]));
194       len += 5;
195     }
196     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
197     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
198 
199     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Leafdata in type of char\n"));
200     len = 0;
201     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
202     len += 5;
203     for (i = 0; i < nleavesalloc; i++) {
204       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5c", leafdata[i]));
205       len += 5;
206     }
207     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
208     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
209 
210     PetscCall(PetscFree2(rootdata, leafdata));
211   }
212 
213   if (test_bcastop) { /* Reduce rootdata into leafdata */
214     PetscInt *rootdata, *leafdata;
215     /* Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
216      * user-defined structures, could also be used. */
217     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
218     /* Set rootdata buffer to be broadcast */
219     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
220     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
221     /* Set leaf values to reduce with */
222     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -10 * (rank + 1) - i;
223     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-BcastAndOp Leafdata\n"));
224     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
225     /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
226     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, rootdata, leafdata, mop));
227     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, rootdata, leafdata, mop));
228     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## BcastAndOp Rootdata\n"));
229     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
230     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## BcastAndOp Leafdata\n"));
231     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
232     PetscCall(PetscFree2(rootdata, leafdata));
233   }
234 
235   if (test_reduce) { /* Reduce leafdata into rootdata */
236     PetscInt *rootdata, *leafdata;
237     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
238     /* Initialize rootdata buffer in which the result of the reduction will appear. */
239     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
240     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
241     /* Set leaf values to reduce. */
242     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
243     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 1000 * (rank + 1) + 10 * i;
244     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata\n"));
245     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
246     /* Perform reduction. Computation or other communication can be performed between the begin and end calls.
247      * This example sums the values, but other MPI_Ops can be used (e.g MPI_MAX, MPI_PROD). */
248     PetscCall(PetscSFReduceBegin(sf, MPIU_INT, leafdata, rootdata, mop));
249     PetscCall(PetscSFReduceEnd(sf, MPIU_INT, leafdata, rootdata, mop));
250     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata\n"));
251     PetscCall(PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD));
252     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata\n"));
253     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
254     PetscCall(PetscFree2(rootdata, leafdata));
255   }
256 
257   if (test_reduce && test_char) { /* Reduce with signed char */
258     PetscInt     len;
259     char         buf[256];
260     signed char *rootdata, *leafdata;
261     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
262     /* Initialize rootdata buffer in which the result of the reduction will appear. */
263     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
264     for (i = 0; i < nroots; i++) rootdata[i * stride] = 10 * (rank + 1) + i;
265     /* Set leaf values to reduce. */
266     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
267     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 50 * (rank + 1) + 10 * i;
268     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata in type of signed char\n"));
269 
270     len = 0;
271     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
272     len += 5;
273     for (i = 0; i < nrootsalloc; i++) {
274       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5d", rootdata[i]));
275       len += 5;
276     }
277     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
278     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
279 
280     /* Using MPI_CHAR should trigger an error since MPI standard does not support reduction on MPI_CHAR.
281        Testing with -test_op max, one can see the sign does take effect in MPI_MAX.
282      */
283     PetscCall(PetscSFReduceBegin(sf, MPI_SIGNED_CHAR, leafdata, rootdata, mop));
284     PetscCall(PetscSFReduceEnd(sf, MPI_SIGNED_CHAR, leafdata, rootdata, mop));
285 
286     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata in type of signed char\n"));
287     len = 0;
288     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
289     len += 5;
290     for (i = 0; i < nleavesalloc; i++) {
291       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5d", leafdata[i]));
292       len += 5;
293     }
294     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
295     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
296 
297     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata in type of signed char\n"));
298     len = 0;
299     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
300     len += 5;
301     for (i = 0; i < nrootsalloc; i++) {
302       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5d", rootdata[i]));
303       len += 5;
304     }
305     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
306     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
307 
308     PetscCall(PetscFree2(rootdata, leafdata));
309   }
310 
311   if (test_reduce && test_char) { /* Reduce with unsigned char */
312     PetscInt       len;
313     char           buf[256];
314     unsigned char *rootdata, *leafdata;
315     PetscCall(PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata));
316     /* Initialize rootdata buffer in which the result of the reduction will appear. */
317     for (i = 0; i < nrootsalloc; i++) rootdata[i] = 0;
318     for (i = 0; i < nroots; i++) rootdata[i * stride] = 10 * (rank + 1) + i;
319     /* Set leaf values to reduce. */
320     for (i = 0; i < nleavesalloc; i++) leafdata[i] = 0;
321     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 50 * (rank + 1) + 10 * i;
322     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata in type of unsigned char\n"));
323 
324     len = 0;
325     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
326     len += 5;
327     for (i = 0; i < nrootsalloc; i++) {
328       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5u", rootdata[i]));
329       len += 5;
330     }
331     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
332     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
333 
334     /* Using MPI_CHAR should trigger an error since MPI standard does not support reduction on MPI_CHAR.
335        Testing with -test_op max, one can see the sign does take effect in MPI_MAX.
336      */
337     PetscCall(PetscSFReduceBegin(sf, MPI_UNSIGNED_CHAR, leafdata, rootdata, mop));
338     PetscCall(PetscSFReduceEnd(sf, MPI_UNSIGNED_CHAR, leafdata, rootdata, mop));
339 
340     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata in type of unsigned char\n"));
341     len = 0;
342     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
343     len += 5;
344     for (i = 0; i < nleavesalloc; i++) {
345       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5u", leafdata[i]));
346       len += 5;
347     }
348     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
349     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
350 
351     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata in type of unsigned char\n"));
352     len = 0;
353     PetscCall(PetscSNPrintf(buf, 256, "%4d:", rank));
354     len += 5;
355     for (i = 0; i < nrootsalloc; i++) {
356       PetscCall(PetscSNPrintf(buf + len, 256 - len, "%5u", rootdata[i]));
357       len += 5;
358     }
359     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf));
360     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
361 
362     PetscCall(PetscFree2(rootdata, leafdata));
363   }
364 
365   if (test_degree) {
366     const PetscInt *degree;
367     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
368     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
369     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Root degrees\n"));
370     PetscCall(PetscIntView(nrootsalloc, degree, PETSC_VIEWER_STDOUT_WORLD));
371   }
372 
373   if (test_fetchandop) {
374     /* Cannot use text compare here because token ordering is not deterministic */
375     PetscInt *leafdata, *leafupdate, *rootdata;
376     PetscCall(PetscMalloc3(nleavesalloc, &leafdata, nleavesalloc, &leafupdate, nrootsalloc, &rootdata));
377     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
378     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 1;
379     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
380     for (i = 0; i < nroots; i++) rootdata[i * stride] = 0;
381     PetscCall(PetscSFFetchAndOpBegin(sf, MPIU_INT, rootdata, leafdata, leafupdate, mop));
382     PetscCall(PetscSFFetchAndOpEnd(sf, MPIU_INT, rootdata, leafdata, leafupdate, mop));
383     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Rootdata (sum of 1 from each leaf)\n"));
384     PetscCall(PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD));
385     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Leafupdate (value at roots prior to my atomic update)\n"));
386     PetscCall(PetscIntView(nleavesalloc, leafupdate, PETSC_VIEWER_STDOUT_WORLD));
387     PetscCall(PetscFree3(leafdata, leafupdate, rootdata));
388   }
389 
390   if (test_gather) {
391     const PetscInt *degree;
392     PetscInt        inedges, *indata, *outdata;
393     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
394     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
395     for (i = 0, inedges = 0; i < nrootsalloc; i++) inedges += degree[i];
396     PetscCall(PetscMalloc2(inedges, &indata, nleavesalloc, &outdata));
397     for (i = 0; i < nleavesalloc; i++) outdata[i] = -1;
398     for (i = 0; i < nleaves; i++) outdata[i * stride] = 1000 * (rank + 1) + i;
399     PetscCall(PetscSFGatherBegin(sf, MPIU_INT, outdata, indata));
400     PetscCall(PetscSFGatherEnd(sf, MPIU_INT, outdata, indata));
401     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Gathered data at multi-roots from leaves\n"));
402     PetscCall(PetscIntView(inedges, indata, PETSC_VIEWER_STDOUT_WORLD));
403     PetscCall(PetscFree2(indata, outdata));
404   }
405 
406   if (test_scatter) {
407     const PetscInt *degree;
408     PetscInt        j, count, inedges, *indata, *outdata;
409     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
410     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
411     for (i = 0, inedges = 0; i < nrootsalloc; i++) inedges += degree[i];
412     PetscCall(PetscMalloc2(inedges, &indata, nleavesalloc, &outdata));
413     for (i = 0; i < nleavesalloc; i++) outdata[i] = -1;
414     for (i = 0, count = 0; i < nrootsalloc; i++) {
415       for (j = 0; j < degree[i]; j++) indata[count++] = 1000 * (rank + 1) + 100 * i + j;
416     }
417     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Data at multi-roots, to scatter to leaves\n"));
418     PetscCall(PetscIntView(inedges, indata, PETSC_VIEWER_STDOUT_WORLD));
419 
420     PetscCall(PetscSFScatterBegin(sf, MPIU_INT, indata, outdata));
421     PetscCall(PetscSFScatterEnd(sf, MPIU_INT, indata, outdata));
422     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Scattered data at leaves\n"));
423     PetscCall(PetscIntView(nleavesalloc, outdata, PETSC_VIEWER_STDOUT_WORLD));
424     PetscCall(PetscFree2(indata, outdata));
425   }
426 
427   if (test_embed) {
428     const PetscInt nroots = 1 + (PetscInt)(rank == 0);
429     PetscInt       selected[2];
430     PetscSF        esf;
431 
432     selected[0] = stride;
433     selected[1] = 2 * stride;
434     PetscCall(PetscSFCreateEmbeddedRootSF(sf, nroots, selected, &esf));
435     PetscCall(PetscSFSetUp(esf));
436     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Embedded PetscSF\n"));
437     PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_INFO_DETAIL));
438     PetscCall(PetscSFView(esf, PETSC_VIEWER_STDOUT_WORLD));
439     PetscCall(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD));
440     PetscCall(PetscSFDestroy(&esf));
441   }
442 
443   if (test_invert) {
444     const PetscInt *degree;
445     PetscInt       *mRootsOrigNumbering;
446     PetscInt        inedges;
447     PetscSF         msf, imsf;
448 
449     PetscCall(PetscSFGetMultiSF(sf, &msf));
450     PetscCall(PetscSFCreateInverseSF(msf, &imsf));
451     PetscCall(PetscSFSetUp(msf));
452     PetscCall(PetscSFSetUp(imsf));
453     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Multi-SF\n"));
454     PetscCall(PetscSFView(msf, PETSC_VIEWER_STDOUT_WORLD));
455     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Multi-SF roots indices in original SF roots numbering\n"));
456     PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
457     PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
458     PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, degree, &inedges, &mRootsOrigNumbering));
459     PetscCall(PetscIntView(inedges, mRootsOrigNumbering, PETSC_VIEWER_STDOUT_WORLD));
460     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Inverse of Multi-SF\n"));
461     PetscCall(PetscSFView(imsf, PETSC_VIEWER_STDOUT_WORLD));
462     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Inverse of Multi-SF, original numbering\n"));
463     PetscCall(PetscSFViewCustomLocals_Private(imsf, mRootsOrigNumbering, PETSC_VIEWER_STDOUT_WORLD));
464     PetscCall(PetscSFDestroy(&imsf));
465     PetscCall(PetscFree(mRootsOrigNumbering));
466   }
467 
468   /* Clean storage for star forest. */
469   PetscCall(PetscSFDestroy(&sf));
470   PetscCall(PetscFinalize());
471   return 0;
472 }
473 
474 /*TEST
475 
476    test:
477       nsize: 4
478       filter: grep -v "type" | grep -v "sort"
479       args: -test_bcast -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
480       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
481 
482    test:
483       suffix: 2
484       nsize: 4
485       filter: grep -v "type" | grep -v "sort"
486       args: -test_reduce -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
487       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
488 
489    test:
490       suffix: 2_basic
491       nsize: 4
492       args: -test_reduce -sf_type basic
493 
494    test:
495       suffix: 3
496       nsize: 4
497       filter: grep -v "type" | grep -v "sort"
498       args: -test_degree -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
499       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
500 
501    test:
502       suffix: 3_basic
503       nsize: 4
504       args: -test_degree -sf_type basic
505 
506    test:
507       suffix: 4
508       nsize: 4
509       filter: grep -v "type" | grep -v "sort"
510       args: -test_gather -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
511       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
512 
513    test:
514       suffix: 4_basic
515       nsize: 4
516       args: -test_gather -sf_type basic
517 
518    test:
519       suffix: 4_stride
520       nsize: 4
521       args: -test_gather -sf_type basic -stride 2
522 
523    test:
524       suffix: 5
525       nsize: 4
526       filter: grep -v "type" | grep -v "sort"
527       args: -test_scatter -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
528       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
529 
530    test:
531       suffix: 5_basic
532       nsize: 4
533       args: -test_scatter -sf_type basic
534 
535    test:
536       suffix: 5_stride
537       nsize: 4
538       args: -test_scatter -sf_type basic -stride 2
539 
540    test:
541       suffix: 6
542       nsize: 4
543       filter: grep -v "type" | grep -v "sort"
544       # No -sf_window_flavor dynamic due to bug https://gitlab.com/petsc/petsc/issues/555
545       args: -test_embed -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate}}
546       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
547 
548    test:
549       suffix: 6_basic
550       nsize: 4
551       args: -test_embed -sf_type basic
552 
553    test:
554       suffix: 7
555       nsize: 4
556       filter: grep -v "type" | grep -v "sort"
557       args: -test_invert -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
558       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
559 
560    test:
561       suffix: 7_basic
562       nsize: 4
563       args: -test_invert -sf_type basic
564 
565    test:
566       suffix: basic
567       nsize: 4
568       args: -test_bcast -sf_type basic
569       output_file: output/ex1_1_basic.out
570 
571    test:
572       suffix: bcastop_basic
573       nsize: 4
574       args: -test_bcastop -sf_type basic
575       output_file: output/ex1_bcastop_basic.out
576 
577    test:
578       suffix: 8
579       nsize: 3
580       filter: grep -v "type" | grep -v "sort"
581       args: -test_bcast -test_sf_distribute -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
582       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
583 
584    test:
585       suffix: 8_basic
586       nsize: 3
587       args: -test_bcast -test_sf_distribute -sf_type basic
588 
589    test:
590       suffix: 9_char
591       nsize: 4
592       args: -sf_type basic -test_bcast -test_reduce -test_op max -test_char
593 
594    # Here we do not test -sf_window_flavor dynamic since it is designed for repeated SFs with few different rootdata pointers
595    test:
596       suffix: 10
597       filter: grep -v "type" | grep -v "sort"
598       nsize: 4
599       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate}} -test_all -test_bcastop 0 -test_fetchandop 0
600       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
601 
602    # The nightly test suite with MPICH uses ch3:sock, which is broken when winsize == 0 in some of the processes
603    test:
604       suffix: 10_shared
605       output_file: output/ex1_10.out
606       filter: grep -v "type" | grep -v "sort"
607       nsize: 4
608       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared -test_all -test_bcastop 0 -test_fetchandop 0
609       requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
610 
611    test:
612       suffix: 10_basic
613       nsize: 4
614       args: -sf_type basic -test_all -test_bcastop 0 -test_fetchandop 0
615 
616 TEST*/
617