xref: /petsc/src/vec/is/sf/tests/ex6.c (revision 8fb5bd83c3955fefcf33a54e3bb66920a9fa884b)
1 static char help[]= "  Test VecScatter with x, y on different communicators\n\n";
2 
3 #include <petscvec.h>
4 
5 int main(int argc,char **argv)
6 {
7   PetscInt           i,n=5,rstart;
8   PetscScalar        *val;
9   const PetscScalar  *dat;
10   Vec                x,y1,y2;
11   MPI_Comm           newcomm;
12   PetscMPIInt        nproc,rank;
13   IS                 ix;
14   VecScatter         vscat1,vscat2;
15 
16   PetscFunctionBegin;
17   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
18   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&nproc));
19   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
20 
21   PetscCheck(nproc == 2,PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"This test must run with exactly two MPI ranks");
22 
23   /* Create MPI vectors x and y, which are on the same comm (i.e., MPI_IDENT) */
24   PetscCall(VecCreateMPI(PETSC_COMM_WORLD,n,PETSC_DECIDE,&x));
25   PetscCall(VecDuplicate(x,&y1));
26   PetscCall(VecGetOwnershipRange(x,&rstart,NULL));
27 
28   /* Set x's value locally. x would be {0., 1., 2., ..., 9.} */
29   PetscCall(VecGetArray(x,&val));
30   for (i=0; i<n; i++) val[i] = rstart + i;
31   PetscCall(VecRestoreArray(x,&val));
32 
33   /* Create index set ix = {0, 1, 2, ..., 9} */
34   PetscCall(ISCreateStride(PETSC_COMM_WORLD,n,rstart,1,&ix));
35 
36   /* Create newcomm that reverses processes in x's comm, and then create y2 on it*/
37   PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD,0/*color*/,-rank/*key*/,&newcomm));
38   PetscCall(VecCreateMPI(newcomm,n,PETSC_DECIDE,&y2));
39 
40   /* It looks vscat1/2 are the same, but actually not. y2 is on a different communicator than x */
41   PetscCall(VecScatterCreate(x,ix,y1,ix,&vscat1));
42   PetscCall(VecScatterCreate(x,ix,y2,ix,&vscat2));
43 
44   PetscCall(VecScatterBegin(vscat1,x,y1,INSERT_VALUES,SCATTER_FORWARD));
45   PetscCall(VecScatterBegin(vscat2,x,y2,INSERT_VALUES,SCATTER_FORWARD));
46   PetscCall(VecScatterEnd  (vscat1,x,y1,INSERT_VALUES,SCATTER_FORWARD));
47   PetscCall(VecScatterEnd  (vscat2,x,y2,INSERT_VALUES,SCATTER_FORWARD));
48 
49   /* View on rank 0 of x's comm, which is PETSC_COMM_WORLD */
50   if (rank == 0) {
51     /* Print the part of x on rank 0, which is 0 1 2 3 4 */
52     PetscCall(PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, x  = "));
53     PetscCall(VecGetArrayRead(x,&dat));
54     for (i=0; i<n; i++) PetscCall(PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i])));
55     PetscCall(VecRestoreArrayRead(x,&dat));
56 
57     /* Print the part of y1 on rank 0, which is 0 1 2 3 4 */
58     PetscCall(PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, y1 = "));
59     PetscCall(VecGetArrayRead(y1,&dat));
60     for (i=0; i<n; i++) PetscCall(PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i])));
61     PetscCall(VecRestoreArrayRead(y1,&dat));
62 
63     /* Print the part of y2 on rank 0, which is 5 6 7 8 9 since y2 swapped the processes of PETSC_COMM_WORLD */
64     PetscCall(PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, y2 = "));
65     PetscCall(VecGetArrayRead(y2,&dat));
66     for (i=0; i<n; i++) PetscCall(PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i])));
67     PetscCall(VecRestoreArrayRead(y2,&dat));
68     PetscCall(PetscPrintf(PETSC_COMM_SELF,"\n"));
69   }
70 
71   PetscCall(ISDestroy(&ix));
72   PetscCall(VecDestroy(&x));
73   PetscCall(VecDestroy(&y1));
74   PetscCall(VecDestroy(&y2));
75   PetscCall(VecScatterDestroy(&vscat1));
76   PetscCall(VecScatterDestroy(&vscat2));
77   PetscCallMPI(MPI_Comm_free(&newcomm));
78   PetscCall(PetscFinalize());
79   return 0;
80 }
81 
82 /*TEST
83 
84    test:
85       nsize: 2
86       requires: double
87 TEST*/
88