xref: /petsc/src/vec/is/sf/tests/ex8.c (revision 2fa40bb9206b96114faa7cb222621ec184d31cd2) !
1 static char help[]= "Test VecScatterCreateToZero, VecScatterCreateToAll\n\n";
2 
3 #include <petscvec.h>
4 int main(int argc,char **argv)
5 {
6   PetscErrorCode     ierr;
7   PetscInt           i,N=10,low,high;
8   PetscMPIInt        size,rank;
9   Vec                x,y;
10   VecScatter         vscat;
11 
12   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
13   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRMPI(ierr);
14   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr);
15 
16   ierr = VecCreate(PETSC_COMM_WORLD,&x);CHKERRQ(ierr);
17   ierr = VecSetFromOptions(x);CHKERRQ(ierr);
18   ierr = VecSetSizes(x,PETSC_DECIDE,N);CHKERRQ(ierr);
19   ierr = VecGetOwnershipRange(x,&low,&high);CHKERRQ(ierr);
20   ierr = PetscObjectSetName((PetscObject)x,"x");CHKERRQ(ierr);
21 
22   /*-------------------------------------*/
23   /*       VecScatterCreateToZero        */
24   /*-------------------------------------*/
25 
26   /* MPI vec x = [0, 1, 2, .., N-1] */
27   for (i=low; i<high; i++) {ierr = VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES);CHKERRQ(ierr);}
28   ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
29   ierr = VecAssemblyEnd(x);CHKERRQ(ierr);
30 
31   ierr = PetscPrintf(PETSC_COMM_WORLD,"\nTesting VecScatterCreateToZero\n");CHKERRQ(ierr);
32   ierr = VecScatterCreateToZero(x,&vscat,&y);CHKERRQ(ierr);
33   ierr = PetscObjectSetName((PetscObject)y,"y");CHKERRQ(ierr);
34 
35   /* Test PetscSFBcastAndOp with op = MPI_REPLACE, which does y = x on rank 0 */
36   ierr = VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
37   ierr = VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
38   if (rank == 0) {ierr = VecView(y,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);}
39 
40   /* Test PetscSFBcastAndOp with op = MPI_SUM, which does y += x */
41   ierr = VecScatterBegin(vscat,x,y,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
42   ierr = VecScatterEnd(vscat,x,y,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
43   if (rank == 0) {ierr = VecView(y,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);}
44 
45   /* Test PetscSFReduce with op = MPI_REPLACE, which does x = y */
46   ierr = VecScatterBegin(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
47   ierr = VecScatterEnd(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
48   ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
49 
50   /* Test PetscSFReduce with op = MPI_SUM, which does x += y on x's local part on rank 0*/
51   ierr = VecScatterBegin(vscat,y,x,ADD_VALUES,SCATTER_REVERSE_LOCAL);CHKERRQ(ierr);
52   ierr = VecScatterEnd(vscat,y,x,ADD_VALUES,SCATTER_REVERSE_LOCAL);CHKERRQ(ierr);
53   ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
54 
55   ierr = VecDestroy(&y);CHKERRQ(ierr);
56   ierr = VecScatterDestroy(&vscat);CHKERRQ(ierr);
57 
58   /*-------------------------------------*/
59   /*       VecScatterCreateToAll         */
60   /*-------------------------------------*/
61   for (i=low; i<high; i++) {ierr = VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES);CHKERRQ(ierr);}
62   ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
63   ierr = VecAssemblyEnd(x);CHKERRQ(ierr);
64 
65   ierr = PetscPrintf(PETSC_COMM_WORLD,"\nTesting VecScatterCreateToAll\n");CHKERRQ(ierr);
66 
67   ierr = VecScatterCreateToAll(x,&vscat,&y);CHKERRQ(ierr);
68   ierr = PetscObjectSetName((PetscObject)y,"y");CHKERRQ(ierr);
69 
70   /* Test PetscSFBcastAndOp with op = MPI_REPLACE, which does y = x on all ranks */
71   ierr = VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
72   ierr = VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
73   if (rank == 0) {ierr = VecView(y,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);}
74 
75   /* Test PetscSFBcastAndOp with op = MPI_SUM, which does y += x */
76   ierr = VecScatterBegin(vscat,x,y,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
77   ierr = VecScatterEnd(vscat,x,y,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
78   if (rank == 0) {ierr = VecView(y,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);}
79 
80   /* Test PetscSFReduce with op = MPI_REPLACE, which does x = y */
81   ierr = VecScatterBegin(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
82   ierr = VecScatterEnd(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
83   ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
84 
85   /* Test PetscSFReduce with op = MPI_SUM, which does x += size*y */
86   ierr = VecScatterBegin(vscat,y,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
87   ierr = VecScatterEnd(vscat,y,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
88   ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
89   ierr = VecDestroy(&x);CHKERRQ(ierr);
90   ierr = VecDestroy(&y);CHKERRQ(ierr);
91   ierr = VecScatterDestroy(&vscat);CHKERRQ(ierr);
92 
93   ierr = PetscFinalize();
94   return ierr;
95 }
96 
97 /*TEST
98 
99    testset:
100       # N=10 is divisible by nsize, to trigger Allgather/Gather in SF
101       nsize: 2
102       # Exact numbers really matter here
103       diff_args: -j
104       filter: grep -v "type"
105       output_file: output/ex8_1.out
106 
107       test:
108         suffix: 1_standard
109 
110       test:
111         suffix: 1_cuda
112         # sf_backend cuda is not needed if compiling only with cuda
113         args: -vec_type cuda -sf_backend cuda -vecscatter_packongpu true
114         requires: cuda
115 
116       test:
117         suffix: 1_hip
118         args: -vec_type hip -sf_backend hip -vecscatter_packongpu true
119         requires: hip
120 
121       test:
122         suffix: 1_cuda_aware_mpi
123         # sf_backend cuda is not needed if compiling only with cuda
124         args: -vec_type cuda -sf_backend cuda -vecscatter_packongpu false
125         requires: cuda defined(PETSC_HAVE_MPI_GPU_AWARE)
126 
127    testset:
128       # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF
129       nsize: 3
130       # Exact numbers really matter here
131       diff_args: -j
132       filter: grep -v "type"
133       output_file: output/ex8_2.out
134 
135       test:
136         suffix: 2_standard
137 
138       test:
139         suffix: 2_cuda
140         # sf_backend cuda is not needed if compiling only with cuda
141         args: -vec_type cuda -sf_backend cuda -vecscatter_packongpu true
142         requires: cuda
143 
144       test:
145         suffix: 2_hip
146         # sf_backend hip is not needed if compiling only with hip
147         args: -vec_type hip -sf_backend hip -vecscatter_packongpu true
148         requires: hip
149 
150       test:
151         suffix: 2_cuda_aware_mpi
152         args: -vec_type cuda -vecscatter_packongpu false
153         requires: cuda defined(PETSC_HAVE_MPI_GPU_AWARE)
154 
155 TEST*/
156 
157