xref: /petsc/src/vec/is/sf/tests/ex5.c (revision d5b43468fb8780a8feea140ccd6fa3e6a50411cc)
1 static char help[] = "Test PetscSFFCompose when the ilocal arrays are not identity nor dense\n\n";
2 
3 #include <petsc.h>
4 #include <petscsf.h>
5 
6 int main(int argc, char **argv)
7 {
8   PetscSF      sfA, sfB, sfBA, sfAAm, sfBBm, sfAm, sfBm;
9   PetscInt     nrootsA, nleavesA, nrootsB, nleavesB;
10   PetscInt    *ilocalA, *ilocalB;
11   PetscSFNode *iremoteA, *iremoteB;
12   PetscMPIInt  rank, size;
13   PetscInt     i, m, n, k, nl = 2, mA, mB, nldataA, nldataB;
14   PetscInt    *rdA, *rdB, *ldA, *ldB;
15   PetscBool    inverse = PETSC_FALSE;
16 
17   PetscFunctionBeginUser;
18   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
19   PetscCall(PetscOptionsGetInt(NULL, NULL, "-nl", &nl, NULL));
20   PetscCall(PetscOptionsGetBool(NULL, NULL, "-explicit_inverse", &inverse, NULL));
21   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
22   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
23 
24   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sfA));
25   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sfB));
26   PetscCall(PetscSFSetFromOptions(sfA));
27   PetscCall(PetscSFSetFromOptions(sfB));
28 
29   n = 4 * nl * size;
30   m = 2 * nl;
31   k = nl;
32 
33   nldataA = rank == 0 ? n : 0;
34   nldataB = 3 * nl;
35 
36   nrootsA  = m;
37   nleavesA = rank == 0 ? size * m : 0;
38   nrootsB  = rank == 0 ? n : 0;
39   nleavesB = k;
40 
41   PetscCall(PetscMalloc1(nleavesA, &ilocalA));
42   PetscCall(PetscMalloc1(nleavesA, &iremoteA));
43   PetscCall(PetscMalloc1(nleavesB, &ilocalB));
44   PetscCall(PetscMalloc1(nleavesB, &iremoteB));
45 
46   /* sf A bcast is equivalent to a sparse gather on process 0
47      process 0 receives data in the middle [nl,3*nl] of the leaf data array for A */
48   for (i = 0; i < nleavesA; i++) {
49     iremoteA[i].rank  = i / m;
50     iremoteA[i].index = i % m;
51     ilocalA[i]        = nl + i / m * 4 * nl + i % m;
52   }
53 
54   /* sf B bcast is equivalent to a sparse scatter from process 0
55      process 0 sends data from [nl,2*nl] of the leaf data array for A
56      each process receives, in reverse order, in the middle [nl,2*nl] of the leaf data array for B */
57   for (i = 0; i < nleavesB; i++) {
58     iremoteB[i].rank  = 0;
59     iremoteB[i].index = rank * 4 * nl + nl + i % m;
60     ilocalB[i]        = 2 * nl - i - 1;
61   }
62   PetscCall(PetscSFSetGraph(sfA, nrootsA, nleavesA, ilocalA, PETSC_OWN_POINTER, iremoteA, PETSC_OWN_POINTER));
63   PetscCall(PetscSFSetGraph(sfB, nrootsB, nleavesB, ilocalB, PETSC_OWN_POINTER, iremoteB, PETSC_OWN_POINTER));
64   PetscCall(PetscSFSetUp(sfA));
65   PetscCall(PetscSFSetUp(sfB));
66   PetscCall(PetscObjectSetName((PetscObject)sfA, "sfA"));
67   PetscCall(PetscObjectSetName((PetscObject)sfB, "sfB"));
68   PetscCall(PetscSFViewFromOptions(sfA, NULL, "-view"));
69   PetscCall(PetscSFViewFromOptions(sfB, NULL, "-view"));
70 
71   PetscCall(PetscSFGetLeafRange(sfA, NULL, &mA));
72   PetscCall(PetscSFGetLeafRange(sfB, NULL, &mB));
73   PetscCall(PetscMalloc2(nrootsA, &rdA, nldataA, &ldA));
74   PetscCall(PetscMalloc2(nrootsB, &rdB, nldataB, &ldB));
75   for (i = 0; i < nrootsA; i++) rdA[i] = m * rank + i;
76   for (i = 0; i < nldataA; i++) ldA[i] = -1;
77   for (i = 0; i < nldataB; i++) ldB[i] = -1;
78 
79   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "BcastB(BcastA)\n"));
80   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "A: root data\n"));
81   PetscCall(PetscIntView(nrootsA, rdA, PETSC_VIEWER_STDOUT_WORLD));
82   PetscCall(PetscSFBcastBegin(sfA, MPIU_INT, rdA, ldA, MPI_REPLACE));
83   PetscCall(PetscSFBcastEnd(sfA, MPIU_INT, rdA, ldA, MPI_REPLACE));
84   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "A: leaf data (all)\n"));
85   PetscCall(PetscIntView(nldataA, ldA, PETSC_VIEWER_STDOUT_WORLD));
86   PetscCall(PetscSFBcastBegin(sfB, MPIU_INT, ldA, ldB, MPI_REPLACE));
87   PetscCall(PetscSFBcastEnd(sfB, MPIU_INT, ldA, ldB, MPI_REPLACE));
88   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "B: leaf data (all)\n"));
89   PetscCall(PetscIntView(nldataB, ldB, PETSC_VIEWER_STDOUT_WORLD));
90 
91   PetscCall(PetscSFCompose(sfA, sfB, &sfBA));
92   PetscCall(PetscSFSetFromOptions(sfBA));
93   PetscCall(PetscSFSetUp(sfBA));
94   PetscCall(PetscObjectSetName((PetscObject)sfBA, "sfBA"));
95   PetscCall(PetscSFViewFromOptions(sfBA, NULL, "-view"));
96 
97   for (i = 0; i < nldataB; i++) ldB[i] = -1;
98   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "BcastBA\n"));
99   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "BA: root data\n"));
100   PetscCall(PetscIntView(nrootsA, rdA, PETSC_VIEWER_STDOUT_WORLD));
101   PetscCall(PetscSFBcastBegin(sfBA, MPIU_INT, rdA, ldB, MPI_REPLACE));
102   PetscCall(PetscSFBcastEnd(sfBA, MPIU_INT, rdA, ldB, MPI_REPLACE));
103   PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "BA: leaf data (all)\n"));
104   PetscCall(PetscIntView(nldataB, ldB, PETSC_VIEWER_STDOUT_WORLD));
105 
106   PetscCall(PetscSFCreateInverseSF(sfA, &sfAm));
107   PetscCall(PetscSFSetFromOptions(sfAm));
108   PetscCall(PetscObjectSetName((PetscObject)sfAm, "sfAm"));
109   PetscCall(PetscSFViewFromOptions(sfAm, NULL, "-view"));
110 
111   if (!inverse) {
112     PetscCall(PetscSFComposeInverse(sfA, sfA, &sfAAm));
113   } else {
114     PetscCall(PetscSFCompose(sfA, sfAm, &sfAAm));
115   }
116   PetscCall(PetscSFSetFromOptions(sfAAm));
117   PetscCall(PetscSFSetUp(sfAAm));
118   PetscCall(PetscObjectSetName((PetscObject)sfAAm, "sfAAm"));
119   PetscCall(PetscSFViewFromOptions(sfAAm, NULL, "-view"));
120 
121   PetscCall(PetscSFCreateInverseSF(sfB, &sfBm));
122   PetscCall(PetscSFSetFromOptions(sfBm));
123   PetscCall(PetscObjectSetName((PetscObject)sfBm, "sfBm"));
124   PetscCall(PetscSFViewFromOptions(sfBm, NULL, "-view"));
125 
126   if (!inverse) {
127     PetscCall(PetscSFComposeInverse(sfB, sfB, &sfBBm));
128   } else {
129     PetscCall(PetscSFCompose(sfB, sfBm, &sfBBm));
130   }
131   PetscCall(PetscSFSetFromOptions(sfBBm));
132   PetscCall(PetscSFSetUp(sfBBm));
133   PetscCall(PetscObjectSetName((PetscObject)sfBBm, "sfBBm"));
134   PetscCall(PetscSFViewFromOptions(sfBBm, NULL, "-view"));
135 
136   PetscCall(PetscFree2(rdA, ldA));
137   PetscCall(PetscFree2(rdB, ldB));
138 
139   PetscCall(PetscSFDestroy(&sfA));
140   PetscCall(PetscSFDestroy(&sfB));
141   PetscCall(PetscSFDestroy(&sfBA));
142   PetscCall(PetscSFDestroy(&sfAm));
143   PetscCall(PetscSFDestroy(&sfBm));
144   PetscCall(PetscSFDestroy(&sfAAm));
145   PetscCall(PetscSFDestroy(&sfBBm));
146 
147   PetscCall(PetscFinalize());
148   return 0;
149 }
150 
151 /*TEST
152 
153    test:
154      suffix: 1
155      args: -view -explicit_inverse {{0 1}}
156 
157    test:
158      nsize: 7
159      filter: grep -v "type" | grep -v "sort"
160      suffix: 2
161      args: -view -nl 5 -explicit_inverse {{0 1}}
162 
163    # we cannot test for -sf_window_flavor dynamic because SFCompose with sparse leaves may change the root data pointer only locally, and this is not supported by the dynamic case
164    test:
165      nsize: 7
166      suffix: 2_window
167      filter: grep -v "type" | grep -v "sort"
168      output_file: output/ex5_2.out
169      args: -view -nl 5 -explicit_inverse {{0 1}} -sf_type window -sf_window_sync {{fence lock active}} -sf_window_flavor {{create allocate}}
170      requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
171 
172    # The nightly test suite with MPICH uses ch3:sock, which is broken when winsize == 0 in some of the processes
173    test:
174      nsize: 7
175      suffix: 2_window_shared
176      filter: grep -v "type" | grep -v "sort"
177      output_file: output/ex5_2.out
178      args: -view -nl 5 -explicit_inverse {{0 1}} -sf_type window -sf_window_sync {{fence lock active}} -sf_window_flavor shared
179      requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED)
180 
181 TEST*/
182