xref: /petsc/src/vec/is/sf/tests/ex4.c (revision a69119a591a03a9d906b29c0a4e9802e4d7c9795)
1 static char help[] = "Test PetscSFFCompose when the ilocal array is not the identity\n\n";
2 
3 #include <petscsf.h>
4 
5 int main(int argc, char **argv) {
6   PetscSF            sfA, sfB, sfBA;
7   PetscInt           nrootsA, nleavesA, nrootsB, nleavesB;
8   PetscInt          *ilocalA, *ilocalB;
9   PetscSFNode       *iremoteA, *iremoteB;
10   Vec                a, b, ba;
11   const PetscScalar *arrayR;
12   PetscScalar       *arrayW;
13   PetscMPIInt        size;
14   PetscInt           i;
15   PetscInt           maxleafB;
16   PetscBool          flag = PETSC_FALSE;
17 
18   PetscFunctionBeginUser;
19   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
20   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
21   PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_USER, "Only coded for one MPI process");
22 
23   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sfA));
24   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sfB));
25   PetscCall(PetscSFSetFromOptions(sfA));
26   PetscCall(PetscSFSetFromOptions(sfB));
27 
28   PetscCall(PetscOptionsGetBool(NULL, NULL, "-sparse_sfB", &flag, NULL));
29 
30   if (flag) {
31     /* sfA permutes indices, sfB has sparse leaf space. */
32     nrootsA  = 3;
33     nleavesA = 3;
34     nrootsB  = 3;
35     nleavesB = 2;
36   } else {
37     /* sfA reverses indices, sfB is identity */
38     nrootsA = nrootsB = nleavesA = nleavesB = 4;
39   }
40   PetscCall(PetscMalloc1(nleavesA, &ilocalA));
41   PetscCall(PetscMalloc1(nleavesA, &iremoteA));
42   PetscCall(PetscMalloc1(nleavesB, &ilocalB));
43   PetscCall(PetscMalloc1(nleavesB, &iremoteB));
44 
45   for (i = 0; i < nleavesA; i++) {
46     iremoteA[i].rank  = 0;
47     iremoteA[i].index = i;
48     if (flag) {
49       ilocalA[i] = (i + 1) % nleavesA;
50     } else {
51       ilocalA[i] = nleavesA - i - 1;
52     }
53   }
54 
55   for (i = 0; i < nleavesB; i++) {
56     iremoteB[i].rank = 0;
57     if (flag) {
58       ilocalB[i]        = nleavesB - i;
59       iremoteB[i].index = nleavesB - i - 1;
60     } else {
61       ilocalB[i]        = i;
62       iremoteB[i].index = i;
63     }
64   }
65 
66   PetscCall(PetscSFSetGraph(sfA, nrootsA, nleavesA, ilocalA, PETSC_OWN_POINTER, iremoteA, PETSC_OWN_POINTER));
67   PetscCall(PetscSFSetGraph(sfB, nrootsB, nleavesB, ilocalB, PETSC_OWN_POINTER, iremoteB, PETSC_OWN_POINTER));
68   PetscCall(PetscSFSetUp(sfA));
69   PetscCall(PetscSFSetUp(sfB));
70   PetscCall(PetscObjectSetName((PetscObject)sfA, "sfA"));
71   PetscCall(PetscObjectSetName((PetscObject)sfB, "sfB"));
72 
73   PetscCall(VecCreateSeq(PETSC_COMM_WORLD, nrootsA, &a));
74   PetscCall(VecCreateSeq(PETSC_COMM_WORLD, nleavesA, &b));
75   PetscCall(PetscSFGetLeafRange(sfB, NULL, &maxleafB));
76   PetscCall(VecCreateSeq(PETSC_COMM_WORLD, maxleafB + 1, &ba));
77   PetscCall(VecGetArray(a, &arrayW));
78   for (i = 0; i < nrootsA; i++) arrayW[i] = (PetscScalar)i;
79   PetscCall(VecRestoreArray(a, &arrayW));
80 
81   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Initial Vec A\n"));
82   PetscCall(VecView(a, NULL));
83   PetscCall(VecGetArrayRead(a, &arrayR));
84   PetscCall(VecGetArray(b, &arrayW));
85 
86   PetscCall(PetscSFBcastBegin(sfA, MPIU_SCALAR, arrayR, arrayW, MPI_REPLACE));
87   PetscCall(PetscSFBcastEnd(sfA, MPIU_SCALAR, arrayR, arrayW, MPI_REPLACE));
88   PetscCall(VecRestoreArray(b, &arrayW));
89   PetscCall(VecRestoreArrayRead(a, &arrayR));
90   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nBroadcast A->B over sfA\n"));
91   PetscCall(VecView(b, NULL));
92 
93   PetscCall(VecGetArrayRead(b, &arrayR));
94   PetscCall(VecGetArray(ba, &arrayW));
95   arrayW[0] = 10.0; /* Not touched by bcast */
96   PetscCall(PetscSFBcastBegin(sfB, MPIU_SCALAR, arrayR, arrayW, MPI_REPLACE));
97   PetscCall(PetscSFBcastEnd(sfB, MPIU_SCALAR, arrayR, arrayW, MPI_REPLACE));
98   PetscCall(VecRestoreArrayRead(b, &arrayR));
99   PetscCall(VecRestoreArray(ba, &arrayW));
100 
101   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nBroadcast B->BA over sfB\n"));
102   PetscCall(VecView(ba, NULL));
103 
104   PetscCall(PetscSFCompose(sfA, sfB, &sfBA));
105   PetscCall(PetscSFSetFromOptions(sfBA));
106   PetscCall(PetscObjectSetName((PetscObject)sfBA, "(sfB o sfA)"));
107   PetscCall(VecGetArrayRead(a, &arrayR));
108   PetscCall(VecGetArray(ba, &arrayW));
109   arrayW[0] = 11.0; /* Not touched by bcast */
110   PetscCall(PetscSFBcastBegin(sfBA, MPIU_SCALAR, arrayR, arrayW, MPI_REPLACE));
111   PetscCall(PetscSFBcastEnd(sfBA, MPIU_SCALAR, arrayR, arrayW, MPI_REPLACE));
112   PetscCall(VecRestoreArray(ba, &arrayW));
113   PetscCall(VecRestoreArrayRead(a, &arrayR));
114   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nBroadcast A->BA over sfBA (sfB o sfA)\n"));
115   PetscCall(VecView(ba, NULL));
116 
117   PetscCall(VecDestroy(&ba));
118   PetscCall(VecDestroy(&b));
119   PetscCall(VecDestroy(&a));
120 
121   PetscCall(PetscSFView(sfA, NULL));
122   PetscCall(PetscSFView(sfB, NULL));
123   PetscCall(PetscSFView(sfBA, NULL));
124   PetscCall(PetscSFDestroy(&sfA));
125   PetscCall(PetscSFDestroy(&sfB));
126   PetscCall(PetscSFDestroy(&sfBA));
127 
128   PetscCall(PetscFinalize());
129   return 0;
130 }
131 
132 /*TEST
133 
134    test:
135      suffix: 1
136 
137    test:
138      suffix: 2
139      filter: grep -v "type" | grep -v "sort"
140      args: -sparse_sfB
141 
142    test:
143      suffix: 2_window
144      filter: grep -v "type" | grep -v "sort"
145      output_file: output/ex4_2.out
146      args: -sparse_sfB -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
147      requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
148 
149    # The nightly test suite with MPICH uses ch3:sock, which is broken when winsize == 0 in some of the processes
150    test:
151      suffix: 2_window_shared
152      filter: grep -v "type" | grep -v "sort"
153      output_file: output/ex4_2.out
154      args: -sparse_sfB -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared
155      requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED)
156 
157 TEST*/
158