xref: /petsc/src/vec/vec/tests/ex23.c (revision 4302210d98c281cf4e8bc0fee406cf222e373a4c)
1 
2 static char help[] = "Scatters from a parallel vector to a sequential vector.\n\
3   Using a blocked send and a strided receive.\n\n";
4 
5 /*
6         0 1 2 3 | 4 5 6 7 ||  8 9 10 11
7 
8      Scatter first and third block to first processor and
9      second and third block to second processor
10 */
11 
12 #include <petscvec.h>
13 
14 int main(int argc,char **argv)
15 {
16   PetscErrorCode ierr;
17   PetscInt       i,blocks[2],nlocal;
18   PetscMPIInt    size,rank;
19   PetscScalar    value;
20   Vec            x,y;
21   IS             is1,is2;
22   VecScatter     ctx = 0;
23   PetscViewer    subviewer;
24 
25   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
26   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRMPI(ierr);
27   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr);
28 
29   if (size != 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 2 processors");
30 
31   /* create two vectors */
32   if (!rank) nlocal = 8;
33   else nlocal = 4;
34   ierr = VecCreate(PETSC_COMM_WORLD,&x);CHKERRQ(ierr);
35   ierr = VecSetSizes(x,nlocal,12);CHKERRQ(ierr);
36   ierr = VecSetFromOptions(x);CHKERRQ(ierr);
37   ierr = VecCreate(PETSC_COMM_SELF,&y);CHKERRQ(ierr);
38   ierr = VecSetSizes(y,8,PETSC_DECIDE);CHKERRQ(ierr);
39   ierr = VecSetFromOptions(y);CHKERRQ(ierr);
40 
41   /* create two index sets */
42   if (!rank) {
43     blocks[0] = 0; blocks[1] = 2;
44   } else {
45     blocks[0] = 1; blocks[1] = 2;
46   }
47   ierr = ISCreateBlock(PETSC_COMM_SELF,4,2,blocks,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
48   ierr = ISCreateStride(PETSC_COMM_SELF,8,0,1,&is2);CHKERRQ(ierr);
49 
50   for (i=0; i<12; i++) {
51     value = i;
52     ierr  = VecSetValues(x,1,&i,&value,INSERT_VALUES);CHKERRQ(ierr);
53   }
54   ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
55   ierr = VecAssemblyEnd(x);CHKERRQ(ierr);
56 
57   ierr = VecScatterCreate(x,is1,y,is2,&ctx);CHKERRQ(ierr);
58   ierr = VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
59   ierr = VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
60   ierr = VecScatterDestroy(&ctx);CHKERRQ(ierr);
61 
62   ierr = PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&subviewer);CHKERRQ(ierr);
63   ierr = VecView(y,subviewer);CHKERRQ(ierr);
64   ierr = PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&subviewer);CHKERRQ(ierr);
65 
66   ierr = VecDestroy(&x);CHKERRQ(ierr);
67   ierr = VecDestroy(&y);CHKERRQ(ierr);
68   ierr = ISDestroy(&is1);CHKERRQ(ierr);
69   ierr = ISDestroy(&is2);CHKERRQ(ierr);
70 
71   ierr = PetscFinalize();
72   return ierr;
73 }
74 
75 
76 
77 /*TEST
78 
79    testset:
80       nsize: 2
81       output_file: output/ex23_1.out
82       filter: grep -v "  type:"
83       test:
84         suffix: standard
85         args: -vec_type standard
86       test:
87         requires: cuda
88         suffix: cuda
89         args: -vec_type cuda
90       test:
91         requires: viennacl
92         suffix:  viennacl
93         args: -vec_type viennacl
94       test:
95         requires: kokkos_kernels
96         suffix: kokkos
97         args: -vec_type kokkos
98 TEST*/
99