xref: /petsc/src/vec/is/sf/tests/ex8.c (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 static char help[] = "Test VecScatterCreateToZero, VecScatterCreateToAll\n\n";
2 
3 #include <petscvec.h>
4 int main(int argc, char **argv) {
5   PetscInt    i, N = 10, low, high;
6   PetscMPIInt size, rank;
7   Vec         x, y;
8   VecScatter  vscat;
9 
10   PetscFunctionBeginUser;
11   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
12   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
13   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
14 
15   PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
16   PetscCall(VecSetFromOptions(x));
17   PetscCall(VecSetSizes(x, PETSC_DECIDE, N));
18   PetscCall(VecGetOwnershipRange(x, &low, &high));
19   PetscCall(PetscObjectSetName((PetscObject)x, "x"));
20 
21   /*-------------------------------------*/
22   /*       VecScatterCreateToZero        */
23   /*-------------------------------------*/
24 
25   /* MPI vec x = [0, 1, 2, .., N-1] */
26   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i, INSERT_VALUES));
27   PetscCall(VecAssemblyBegin(x));
28   PetscCall(VecAssemblyEnd(x));
29 
30   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nTesting VecScatterCreateToZero\n"));
31   PetscCall(VecScatterCreateToZero(x, &vscat, &y));
32   PetscCall(PetscObjectSetName((PetscObject)y, "y"));
33 
34   /* Test PetscSFBcastAndOp with op = MPI_REPLACE, which does y = x on rank 0 */
35   PetscCall(VecScatterBegin(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
36   PetscCall(VecScatterEnd(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
37   if (rank == 0) PetscCall(VecView(y, PETSC_VIEWER_STDOUT_SELF));
38 
39   /* Test PetscSFBcastAndOp with op = MPI_SUM, which does y += x */
40   PetscCall(VecScatterBegin(vscat, x, y, ADD_VALUES, SCATTER_FORWARD));
41   PetscCall(VecScatterEnd(vscat, x, y, ADD_VALUES, SCATTER_FORWARD));
42   if (rank == 0) PetscCall(VecView(y, PETSC_VIEWER_STDOUT_SELF));
43 
44   /* Test PetscSFReduce with op = MPI_REPLACE, which does x = y */
45   PetscCall(VecScatterBegin(vscat, y, x, INSERT_VALUES, SCATTER_REVERSE));
46   PetscCall(VecScatterEnd(vscat, y, x, INSERT_VALUES, SCATTER_REVERSE));
47   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
48 
49   /* Test PetscSFReduce with op = MPI_SUM, which does x += y on x's local part on rank 0*/
50   PetscCall(VecScatterBegin(vscat, y, x, ADD_VALUES, SCATTER_REVERSE_LOCAL));
51   PetscCall(VecScatterEnd(vscat, y, x, ADD_VALUES, SCATTER_REVERSE_LOCAL));
52   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
53 
54   PetscCall(VecDestroy(&y));
55   PetscCall(VecScatterDestroy(&vscat));
56 
57   /*-------------------------------------*/
58   /*       VecScatterCreateToAll         */
59   /*-------------------------------------*/
60   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i, INSERT_VALUES));
61   PetscCall(VecAssemblyBegin(x));
62   PetscCall(VecAssemblyEnd(x));
63 
64   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nTesting VecScatterCreateToAll\n"));
65 
66   PetscCall(VecScatterCreateToAll(x, &vscat, &y));
67   PetscCall(PetscObjectSetName((PetscObject)y, "y"));
68 
69   /* Test PetscSFBcastAndOp with op = MPI_REPLACE, which does y = x on all ranks */
70   PetscCall(VecScatterBegin(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
71   PetscCall(VecScatterEnd(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
72   if (rank == 0) PetscCall(VecView(y, PETSC_VIEWER_STDOUT_SELF));
73 
74   /* Test PetscSFBcastAndOp with op = MPI_SUM, which does y += x */
75   PetscCall(VecScatterBegin(vscat, x, y, ADD_VALUES, SCATTER_FORWARD));
76   PetscCall(VecScatterEnd(vscat, x, y, ADD_VALUES, SCATTER_FORWARD));
77   if (rank == 0) PetscCall(VecView(y, PETSC_VIEWER_STDOUT_SELF));
78 
79   /* Test PetscSFReduce with op = MPI_REPLACE, which does x = y */
80   PetscCall(VecScatterBegin(vscat, y, x, INSERT_VALUES, SCATTER_REVERSE));
81   PetscCall(VecScatterEnd(vscat, y, x, INSERT_VALUES, SCATTER_REVERSE));
82   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
83 
84   /* Test PetscSFReduce with op = MPI_SUM, which does x += size*y */
85   PetscCall(VecScatterBegin(vscat, y, x, ADD_VALUES, SCATTER_REVERSE));
86   PetscCall(VecScatterEnd(vscat, y, x, ADD_VALUES, SCATTER_REVERSE));
87   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
88   PetscCall(VecDestroy(&x));
89   PetscCall(VecDestroy(&y));
90   PetscCall(VecScatterDestroy(&vscat));
91 
92   PetscCall(PetscFinalize());
93   return 0;
94 }
95 
96 /*TEST
97 
98    testset:
99       # N=10 is divisible by nsize, to trigger Allgather/Gather in SF
100       nsize: 2
101       # Exact numbers really matter here
102       diff_args: -j
103       filter: grep -v "type"
104       output_file: output/ex8_1.out
105 
106       test:
107         suffix: 1_standard
108 
109       test:
110         suffix: 1_cuda
111         # sf_backend cuda is not needed if compiling only with cuda
112         args: -vec_type cuda -sf_backend cuda
113         requires: cuda
114 
115       test:
116         suffix: 1_hip
117         args: -vec_type hip -sf_backend hip
118         requires: hip
119 
120       test:
121         suffix: 1_cuda_aware_mpi
122         # sf_backend cuda is not needed if compiling only with cuda
123         args: -vec_type cuda -sf_backend cuda
124         requires: cuda defined(PETSC_HAVE_MPI_GPU_AWARE)
125 
126    testset:
127       # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF
128       nsize: 3
129       # Exact numbers really matter here
130       diff_args: -j
131       filter: grep -v "type"
132       output_file: output/ex8_2.out
133 
134       test:
135         suffix: 2_standard
136 
137       test:
138         suffix: 2_cuda
139         # sf_backend cuda is not needed if compiling only with cuda
140         args: -vec_type cuda -sf_backend cuda
141         requires: cuda
142 
143       test:
144         suffix: 2_hip
145         # sf_backend hip is not needed if compiling only with hip
146         args: -vec_type hip -sf_backend hip
147         requires: hip
148 
149       test:
150         suffix: 2_cuda_aware_mpi
151         args: -vec_type cuda
152         requires: cuda defined(PETSC_HAVE_MPI_GPU_AWARE)
153 
154 TEST*/
155