xref: /petsc/src/vec/is/sf/tests/ex5.c (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 static char help[] = "Test PetscSFFCompose when the ilocal arrays are not identity nor dense\n\n";
2 
3 #include <petsc.h>
4 #include <petscsf.h>
5 
6 int main(int argc, char **argv) {
7   PetscSF      sfA, sfB, sfBA, sfAAm, sfBBm, sfAm, sfBm;
8   PetscInt     nrootsA, nleavesA, nrootsB, nleavesB;
9   PetscInt    *ilocalA, *ilocalB;
10   PetscSFNode *iremoteA, *iremoteB;
11   PetscMPIInt  rank, size;
12   PetscInt     i, m, n, k, nl = 2, mA, mB, nldataA, nldataB;
13   PetscInt    *rdA, *rdB, *ldA, *ldB;
14   PetscBool    inverse = PETSC_FALSE;
15 
16   PetscFunctionBeginUser;
17   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
18   PetscCall(PetscOptionsGetInt(NULL, NULL, "-nl", &nl, NULL));
19   PetscCall(PetscOptionsGetBool(NULL, NULL, "-explicit_inverse", &inverse, NULL));
20   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
21   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
22 
23   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sfA));
24   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sfB));
25   PetscCall(PetscSFSetFromOptions(sfA));
26   PetscCall(PetscSFSetFromOptions(sfB));
27 
28   n = 4 * nl * size;
29   m = 2 * nl;
30   k = nl;
31 
32   nldataA = rank == 0 ? n : 0;
33   nldataB = 3 * nl;
34 
35   nrootsA  = m;
36   nleavesA = rank == 0 ? size * m : 0;
37   nrootsB  = rank == 0 ? n : 0;
38   nleavesB = k;
39 
40   PetscCall(PetscMalloc1(nleavesA, &ilocalA));
41   PetscCall(PetscMalloc1(nleavesA, &iremoteA));
42   PetscCall(PetscMalloc1(nleavesB, &ilocalB));
43   PetscCall(PetscMalloc1(nleavesB, &iremoteB));
44 
45   /* sf A bcast is equivalent to a sparse gather on process 0
46      process 0 receives data in the middle [nl,3*nl] of the leaf data array for A */
47   for (i = 0; i < nleavesA; i++) {
48     iremoteA[i].rank  = i / m;
49     iremoteA[i].index = i % m;
50     ilocalA[i]        = nl + i / m * 4 * nl + i % m;
51   }
52 
53   /* sf B bcast is equivalent to a sparse scatter from process 0
54      process 0 sends data from [nl,2*nl] of the leaf data array for A
55      each process receives, in reverse order, in the middle [nl,2*nl] of the leaf data array for B */
56   for (i = 0; i < nleavesB; i++) {
57     iremoteB[i].rank  = 0;
58     iremoteB[i].index = rank * 4 * nl + nl + i % m;
59     ilocalB[i]        = 2 * nl - i - 1;
60   }
61   PetscCall(PetscSFSetGraph(sfA, nrootsA, nleavesA, ilocalA, PETSC_OWN_POINTER, iremoteA, PETSC_OWN_POINTER));
62   PetscCall(PetscSFSetGraph(sfB, nrootsB, nleavesB, ilocalB, PETSC_OWN_POINTER, iremoteB, PETSC_OWN_POINTER));
63   PetscCall(PetscSFSetUp(sfA));
64   PetscCall(PetscSFSetUp(sfB));
65   PetscCall(PetscObjectSetName((PetscObject)sfA, "sfA"));
66   PetscCall(PetscObjectSetName((PetscObject)sfB, "sfB"));
67   PetscCall(PetscSFViewFromOptions(sfA, NULL, "-view"));
68   PetscCall(PetscSFViewFromOptions(sfB, NULL, "-view"));
69 
70   PetscCall(PetscSFGetLeafRange(sfA, NULL, &mA));
71   PetscCall(PetscSFGetLeafRange(sfB, NULL, &mB));
72   PetscCall(PetscMalloc2(nrootsA, &rdA, nldataA, &ldA));
73   PetscCall(PetscMalloc2(nrootsB, &rdB, nldataB, &ldB));
74   for (i = 0; i < nrootsA; i++) rdA[i] = m * rank + i;
75   for (i = 0; i < nldataA; i++) ldA[i] = -1;
76   for (i = 0; i < nldataB; i++) ldB[i] = -1;
77 
78   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "BcastB(BcastA)\n"));
79   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "A: root data\n"));
80   PetscCall(PetscIntView(nrootsA, rdA, PETSC_VIEWER_STDOUT_WORLD));
81   PetscCall(PetscSFBcastBegin(sfA, MPIU_INT, rdA, ldA, MPI_REPLACE));
82   PetscCall(PetscSFBcastEnd(sfA, MPIU_INT, rdA, ldA, MPI_REPLACE));
83   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "A: leaf data (all)\n"));
84   PetscCall(PetscIntView(nldataA, ldA, PETSC_VIEWER_STDOUT_WORLD));
85   PetscCall(PetscSFBcastBegin(sfB, MPIU_INT, ldA, ldB, MPI_REPLACE));
86   PetscCall(PetscSFBcastEnd(sfB, MPIU_INT, ldA, ldB, MPI_REPLACE));
87   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "B: leaf data (all)\n"));
88   PetscCall(PetscIntView(nldataB, ldB, PETSC_VIEWER_STDOUT_WORLD));
89 
90   PetscCall(PetscSFCompose(sfA, sfB, &sfBA));
91   PetscCall(PetscSFSetFromOptions(sfBA));
92   PetscCall(PetscSFSetUp(sfBA));
93   PetscCall(PetscObjectSetName((PetscObject)sfBA, "sfBA"));
94   PetscCall(PetscSFViewFromOptions(sfBA, NULL, "-view"));
95 
96   for (i = 0; i < nldataB; i++) ldB[i] = -1;
97   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "BcastBA\n"));
98   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "BA: root data\n"));
99   PetscCall(PetscIntView(nrootsA, rdA, PETSC_VIEWER_STDOUT_WORLD));
100   PetscCall(PetscSFBcastBegin(sfBA, MPIU_INT, rdA, ldB, MPI_REPLACE));
101   PetscCall(PetscSFBcastEnd(sfBA, MPIU_INT, rdA, ldB, MPI_REPLACE));
102   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "BA: leaf data (all)\n"));
103   PetscCall(PetscIntView(nldataB, ldB, PETSC_VIEWER_STDOUT_WORLD));
104 
105   PetscCall(PetscSFCreateInverseSF(sfA, &sfAm));
106   PetscCall(PetscSFSetFromOptions(sfAm));
107   PetscCall(PetscObjectSetName((PetscObject)sfAm, "sfAm"));
108   PetscCall(PetscSFViewFromOptions(sfAm, NULL, "-view"));
109 
110   if (!inverse) {
111     PetscCall(PetscSFComposeInverse(sfA, sfA, &sfAAm));
112   } else {
113     PetscCall(PetscSFCompose(sfA, sfAm, &sfAAm));
114   }
115   PetscCall(PetscSFSetFromOptions(sfAAm));
116   PetscCall(PetscSFSetUp(sfAAm));
117   PetscCall(PetscObjectSetName((PetscObject)sfAAm, "sfAAm"));
118   PetscCall(PetscSFViewFromOptions(sfAAm, NULL, "-view"));
119 
120   PetscCall(PetscSFCreateInverseSF(sfB, &sfBm));
121   PetscCall(PetscSFSetFromOptions(sfBm));
122   PetscCall(PetscObjectSetName((PetscObject)sfBm, "sfBm"));
123   PetscCall(PetscSFViewFromOptions(sfBm, NULL, "-view"));
124 
125   if (!inverse) {
126     PetscCall(PetscSFComposeInverse(sfB, sfB, &sfBBm));
127   } else {
128     PetscCall(PetscSFCompose(sfB, sfBm, &sfBBm));
129   }
130   PetscCall(PetscSFSetFromOptions(sfBBm));
131   PetscCall(PetscSFSetUp(sfBBm));
132   PetscCall(PetscObjectSetName((PetscObject)sfBBm, "sfBBm"));
133   PetscCall(PetscSFViewFromOptions(sfBBm, NULL, "-view"));
134 
135   PetscCall(PetscFree2(rdA, ldA));
136   PetscCall(PetscFree2(rdB, ldB));
137 
138   PetscCall(PetscSFDestroy(&sfA));
139   PetscCall(PetscSFDestroy(&sfB));
140   PetscCall(PetscSFDestroy(&sfBA));
141   PetscCall(PetscSFDestroy(&sfAm));
142   PetscCall(PetscSFDestroy(&sfBm));
143   PetscCall(PetscSFDestroy(&sfAAm));
144   PetscCall(PetscSFDestroy(&sfBBm));
145 
146   PetscCall(PetscFinalize());
147   return 0;
148 }
149 
150 /*TEST
151 
152    test:
153      suffix: 1
154      args: -view -explicit_inverse {{0 1}}
155 
156    test:
157      nsize: 7
158      filter: grep -v "type" | grep -v "sort"
159      suffix: 2
160      args: -view -nl 5 -explicit_inverse {{0 1}}
161 
162    # we cannot test for -sf_window_flavor dynamic because SFCompose with sparse leaves may change the root data pointer only locally, and this is not supported by the dynamic case
163    test:
164      nsize: 7
165      suffix: 2_window
166      filter: grep -v "type" | grep -v "sort"
167      output_file: output/ex5_2.out
168      args: -view -nl 5 -explicit_inverse {{0 1}} -sf_type window -sf_window_sync {{fence lock active}} -sf_window_flavor {{create allocate}}
169      requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
170 
171    # The nightly test suite with MPICH uses ch3:sock, which is broken when winsize == 0 in some of the processes
172    test:
173      nsize: 7
174      suffix: 2_window_shared
175      filter: grep -v "type" | grep -v "sort"
176      output_file: output/ex5_2.out
177      args: -view -nl 5 -explicit_inverse {{0 1}} -sf_type window -sf_window_sync {{fence lock active}} -sf_window_flavor shared
178      requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED)
179 
180 TEST*/
181