xref: /petsc/src/vec/is/sf/tests/ex6.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 static char help[]= "  Test VecScatter with x, y on different communicators\n\n";
2 
3 #include <petscvec.h>
4 
5 int main(int argc,char **argv)
6 {
7   PetscErrorCode     ierr;
8   PetscInt           i,n=5,rstart;
9   PetscScalar        *val;
10   const PetscScalar  *dat;
11   Vec                x,y1,y2;
12   MPI_Comm           newcomm;
13   PetscMPIInt        nproc,rank;
14   IS                 ix;
15   VecScatter         vscat1,vscat2;
16 
17   PetscFunctionBegin;
18   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
19   CHKERRMPI(MPI_Comm_size(PETSC_COMM_WORLD,&nproc));
20   CHKERRMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
21 
22   PetscCheckFalse(nproc != 2,PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"This test must run with exactly two MPI ranks");
23 
24   /* Create MPI vectors x and y, which are on the same comm (i.e., MPI_IDENT) */
25   CHKERRQ(VecCreateMPI(PETSC_COMM_WORLD,n,PETSC_DECIDE,&x));
26   CHKERRQ(VecDuplicate(x,&y1));
27   CHKERRQ(VecGetOwnershipRange(x,&rstart,NULL));
28 
29   /* Set x's value locally. x would be {0., 1., 2., ..., 9.} */
30   CHKERRQ(VecGetArray(x,&val));
31   for (i=0; i<n; i++) val[i] = rstart + i;
32   CHKERRQ(VecRestoreArray(x,&val));
33 
34   /* Create index set ix = {0, 1, 2, ..., 9} */
35   CHKERRQ(ISCreateStride(PETSC_COMM_WORLD,n,rstart,1,&ix));
36 
37   /* Create newcomm that reverses processes in x's comm, and then create y2 on it*/
38   CHKERRMPI(MPI_Comm_split(PETSC_COMM_WORLD,0/*color*/,-rank/*key*/,&newcomm));
39   CHKERRQ(VecCreateMPI(newcomm,n,PETSC_DECIDE,&y2));
40 
41   /* It looks vscat1/2 are the same, but actually not. y2 is on a different communicator than x */
42   CHKERRQ(VecScatterCreate(x,ix,y1,ix,&vscat1));
43   CHKERRQ(VecScatterCreate(x,ix,y2,ix,&vscat2));
44 
45   CHKERRQ(VecScatterBegin(vscat1,x,y1,INSERT_VALUES,SCATTER_FORWARD));
46   CHKERRQ(VecScatterBegin(vscat2,x,y2,INSERT_VALUES,SCATTER_FORWARD));
47   CHKERRQ(VecScatterEnd  (vscat1,x,y1,INSERT_VALUES,SCATTER_FORWARD));
48   CHKERRQ(VecScatterEnd  (vscat2,x,y2,INSERT_VALUES,SCATTER_FORWARD));
49 
50   /* View on rank 0 of x's comm, which is PETSC_COMM_WORLD */
51   if (rank == 0) {
52     /* Print the part of x on rank 0, which is 0 1 2 3 4 */
53     CHKERRQ(PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, x  = "));
54     CHKERRQ(VecGetArrayRead(x,&dat));
55     for (i=0; i<n; i++) CHKERRQ(PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i])));
56     CHKERRQ(VecRestoreArrayRead(x,&dat));
57 
58     /* Print the part of y1 on rank 0, which is 0 1 2 3 4 */
59     CHKERRQ(PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, y1 = "));
60     CHKERRQ(VecGetArrayRead(y1,&dat));
61     for (i=0; i<n; i++) CHKERRQ(PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i])));
62     CHKERRQ(VecRestoreArrayRead(y1,&dat));
63 
64     /* Print the part of y2 on rank 0, which is 5 6 7 8 9 since y2 swapped the processes of PETSC_COMM_WORLD */
65     CHKERRQ(PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, y2 = "));
66     CHKERRQ(VecGetArrayRead(y2,&dat));
67     for (i=0; i<n; i++) CHKERRQ(PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i])));
68     CHKERRQ(VecRestoreArrayRead(y2,&dat));
69     CHKERRQ(PetscPrintf(PETSC_COMM_SELF,"\n"));
70   }
71 
72   CHKERRQ(ISDestroy(&ix));
73   CHKERRQ(VecDestroy(&x));
74   CHKERRQ(VecDestroy(&y1));
75   CHKERRQ(VecDestroy(&y2));
76   CHKERRQ(VecScatterDestroy(&vscat1));
77   CHKERRQ(VecScatterDestroy(&vscat2));
78   CHKERRMPI(MPI_Comm_free(&newcomm));
79   ierr = PetscFinalize();
80   return ierr;
81 }
82 
83 /*TEST
84 
85    test:
86       nsize: 2
87       requires: double
88 TEST*/
89