xref: /petsc/src/vec/is/sf/tests/ex4.c (revision ffa8c5705e8ab2cf85ee1d14dbe507a6e2eb5283)
1 static char help[]= "Test PetscSFFCompose when the ilocal array is not the identity\n\n";
2 
3 #include <petscsf.h>
4 
5 int main(int argc, char **argv)
6 {
7   PetscSF            sfA, sfB, sfBA;
8   PetscInt           nrootsA, nleavesA, nrootsB, nleavesB;
9   PetscInt          *ilocalA, *ilocalB;
10   PetscSFNode       *iremoteA, *iremoteB;
11   Vec                a, b, ba;
12   const PetscScalar *arrayR;
13   PetscScalar       *arrayW;
14   PetscMPIInt        size;
15   PetscInt           i;
16   PetscInt           maxleafB;
17   PetscBool          flag = PETSC_FALSE;
18 
19   PetscCall(PetscInitialize(&argc,&argv,NULL,help));
20   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
21   PetscCheck(size == 1,PETSC_COMM_WORLD, PETSC_ERR_USER, "Only coded for one MPI process");
22 
23   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sfA));
24   PetscCall(PetscSFCreate(PETSC_COMM_WORLD, &sfB));
25   PetscCall(PetscSFSetFromOptions(sfA));
26   PetscCall(PetscSFSetFromOptions(sfB));
27 
28   PetscCall(PetscOptionsGetBool(NULL,NULL,"-sparse_sfB",&flag,NULL));
29 
30   if (flag) {
31     /* sfA permutes indices, sfB has sparse leaf space. */
32     nrootsA = 3;
33     nleavesA = 3;
34     nrootsB = 3;
35     nleavesB = 2;
36   } else {
37     /* sfA reverses indices, sfB is identity */
38     nrootsA = nrootsB = nleavesA = nleavesB = 4;
39   }
40   PetscCall(PetscMalloc1(nleavesA, &ilocalA));
41   PetscCall(PetscMalloc1(nleavesA, &iremoteA));
42   PetscCall(PetscMalloc1(nleavesB, &ilocalB));
43   PetscCall(PetscMalloc1(nleavesB, &iremoteB));
44 
45   for (i = 0; i < nleavesA; i++) {
46     iremoteA[i].rank = 0;
47     iremoteA[i].index = i;
48     if (flag) {
49       ilocalA[i] = (i + 1) % nleavesA;
50     } else {
51       ilocalA[i] = nleavesA - i - 1;
52     }
53   }
54 
55   for (i = 0; i < nleavesB; i++) {
56     iremoteB[i].rank = 0;
57     if (flag) {
58       ilocalB[i] = nleavesB - i;
59       iremoteB[i].index = nleavesB - i - 1;
60     } else {
61       ilocalB[i] = i;
62       iremoteB[i].index = i;
63     }
64   }
65 
66   PetscCall(PetscSFSetGraph(sfA, nrootsA, nleavesA, ilocalA, PETSC_OWN_POINTER, iremoteA, PETSC_OWN_POINTER));
67   PetscCall(PetscSFSetGraph(sfB, nrootsB, nleavesB, ilocalB, PETSC_OWN_POINTER, iremoteB, PETSC_OWN_POINTER));
68   PetscCall(PetscSFSetUp(sfA));
69   PetscCall(PetscSFSetUp(sfB));
70   PetscCall(PetscObjectSetName((PetscObject)sfA, "sfA"));
71   PetscCall(PetscObjectSetName((PetscObject)sfB, "sfB"));
72 
73   PetscCall(VecCreateSeq(PETSC_COMM_WORLD, nrootsA, &a));
74   PetscCall(VecCreateSeq(PETSC_COMM_WORLD, nleavesA, &b));
75   PetscCall(PetscSFGetLeafRange(sfB, NULL, &maxleafB));
76   PetscCall(VecCreateSeq(PETSC_COMM_WORLD, maxleafB+1, &ba));
77   PetscCall(VecGetArray(a, &arrayW));
78   for (i = 0; i < nrootsA; i++) {
79     arrayW[i] = (PetscScalar)i;
80   }
81   PetscCall(VecRestoreArray(a, &arrayW));
82 
83   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Initial Vec A\n"));
84   PetscCall(VecView(a, NULL));
85   PetscCall(VecGetArrayRead(a, &arrayR));
86   PetscCall(VecGetArray(b, &arrayW));
87 
88   PetscCall(PetscSFBcastBegin(sfA, MPIU_SCALAR, arrayR, arrayW,MPI_REPLACE));
89   PetscCall(PetscSFBcastEnd(sfA, MPIU_SCALAR, arrayR, arrayW,MPI_REPLACE));
90   PetscCall(VecRestoreArray(b, &arrayW));
91   PetscCall(VecRestoreArrayRead(a, &arrayR));
92   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nBroadcast A->B over sfA\n"));
93   PetscCall(VecView(b, NULL));
94 
95   PetscCall(VecGetArrayRead(b, &arrayR));
96   PetscCall(VecGetArray(ba, &arrayW));
97   arrayW[0] = 10.0;             /* Not touched by bcast */
98   PetscCall(PetscSFBcastBegin(sfB, MPIU_SCALAR, arrayR, arrayW,MPI_REPLACE));
99   PetscCall(PetscSFBcastEnd(sfB, MPIU_SCALAR, arrayR, arrayW,MPI_REPLACE));
100   PetscCall(VecRestoreArrayRead(b, &arrayR));
101   PetscCall(VecRestoreArray(ba, &arrayW));
102 
103   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nBroadcast B->BA over sfB\n"));
104   PetscCall(VecView(ba, NULL));
105 
106   PetscCall(PetscSFCompose(sfA, sfB, &sfBA));
107   PetscCall(PetscSFSetFromOptions(sfBA));
108   PetscCall(PetscObjectSetName((PetscObject)sfBA, "(sfB o sfA)"));
109   PetscCall(VecGetArrayRead(a, &arrayR));
110   PetscCall(VecGetArray(ba, &arrayW));
111   arrayW[0] = 11.0;             /* Not touched by bcast */
112   PetscCall(PetscSFBcastBegin(sfBA, MPIU_SCALAR, arrayR, arrayW,MPI_REPLACE));
113   PetscCall(PetscSFBcastEnd(sfBA, MPIU_SCALAR, arrayR, arrayW,MPI_REPLACE));
114   PetscCall(VecRestoreArray(ba, &arrayW));
115   PetscCall(VecRestoreArrayRead(a, &arrayR));
116   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nBroadcast A->BA over sfBA (sfB o sfA)\n"));
117   PetscCall(VecView(ba, NULL));
118 
119   PetscCall(VecDestroy(&ba));
120   PetscCall(VecDestroy(&b));
121   PetscCall(VecDestroy(&a));
122 
123   PetscCall(PetscSFView(sfA, NULL));
124   PetscCall(PetscSFView(sfB, NULL));
125   PetscCall(PetscSFView(sfBA, NULL));
126   PetscCall(PetscSFDestroy(&sfA));
127   PetscCall(PetscSFDestroy(&sfB));
128   PetscCall(PetscSFDestroy(&sfBA));
129 
130   PetscCall(PetscFinalize());
131   return 0;
132 }
133 
134 /*TEST
135 
136    test:
137      suffix: 1
138 
139    test:
140      suffix: 2
141      filter: grep -v "type" | grep -v "sort"
142      args: -sparse_sfB
143 
144    test:
145      suffix: 2_window
146      filter: grep -v "type" | grep -v "sort"
147      output_file: output/ex4_2.out
148      args: -sparse_sfB -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
149      requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)
150 
151    # The nightly test suite with MPICH uses ch3:sock, which is broken when winsize == 0 in some of the processes
152    test:
153      suffix: 2_window_shared
154      filter: grep -v "type" | grep -v "sort"
155      output_file: output/ex4_2.out
156      args: -sparse_sfB -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared
157      requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED)
158 
159 TEST*/
160