xref: /petsc/src/vec/is/sf/tutorials/ex2.c (revision c4762a1b19cd2af06abeed90e8f9d34fb975dd94)
1 static const char help[] = "Test overlapped communication on a single star forest (PetscSF)\n\n";
2 
3 #include <petscvec.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 int main(int argc, char **argv)
8 {
9   PetscInt    ierr;
10   PetscSF     sf;
11   Vec         A,Aout;
12   Vec         B,Bout;
13   PetscScalar *bufA;
14   PetscScalar *bufAout;
15   PetscScalar *bufB;
16   PetscScalar *bufBout;
17   PetscMPIInt rank, size;
18   PetscInt    nroots, nleaves;
19   PetscInt    i;
20   PetscInt    *ilocal;
21   PetscSFNode *iremote;
22 
23   ierr = PetscInitialize(&argc,&argv,NULL,help);if (ierr) return ierr;
24   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
25   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
26 
27   if (size != 2) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_USER, "Only coded for two MPI processes\n");
28 
29   ierr = PetscSFCreate(PETSC_COMM_WORLD,&sf);CHKERRQ(ierr);
30   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
31 
32   nleaves = 2;
33   nroots = 1;
34   ierr = PetscMalloc1(nleaves,&ilocal);CHKERRQ(ierr);
35 
36   for (i = 0; i<nleaves; i++) {
37     ilocal[i] = i;
38   }
39 
40   ierr = PetscMalloc1(nleaves,&iremote);CHKERRQ(ierr);
41   if (rank == 0) {
42     iremote[0].rank = 0;
43     iremote[0].index = 0;
44     iremote[1].rank = 1;
45     iremote[1].index = 0;
46   } else {
47     iremote[0].rank = 1;
48     iremote[0].index = 0;
49     iremote[1].rank = 0;
50     iremote[1].index = 0;
51   }
52   ierr = PetscSFSetGraph(sf,nroots,nleaves,ilocal,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
53   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
54   ierr = PetscSFView(sf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
55   ierr = VecCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr);
56   ierr = VecSetSizes(A,2,PETSC_DETERMINE);CHKERRQ(ierr);
57   ierr = VecSetFromOptions(A);CHKERRQ(ierr);
58   ierr = VecSetUp(A);CHKERRQ(ierr);
59 
60   ierr = VecDuplicate(A,&B);CHKERRQ(ierr);
61   ierr = VecDuplicate(A,&Aout);CHKERRQ(ierr);
62   ierr = VecDuplicate(A,&Bout);CHKERRQ(ierr);
63   ierr = VecGetArray(A,&bufA);CHKERRQ(ierr);
64   ierr = VecGetArray(B,&bufB);CHKERRQ(ierr);
65   for (i=0; i<2; i++) {
66     bufA[i] = (PetscScalar)rank;
67     bufB[i] = (PetscScalar)(rank) + 10.0;
68   }
69   ierr = VecRestoreArray(A,&bufA);CHKERRQ(ierr);
70   ierr = VecRestoreArray(B,&bufB);CHKERRQ(ierr);
71 
72   ierr = VecGetArrayRead(A,(const PetscScalar**)&bufA);CHKERRQ(ierr);
73   ierr = VecGetArrayRead(B,(const PetscScalar**)&bufB);CHKERRQ(ierr);
74   ierr = VecGetArray(Aout,&bufAout);CHKERRQ(ierr);
75   ierr = VecGetArray(Bout,&bufBout);CHKERRQ(ierr);
76   ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,(const void*)bufA,(void *)bufAout);CHKERRQ(ierr);
77   ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,(const void*)bufB,(void *)bufBout);CHKERRQ(ierr);
78   ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,(const void*)bufA,(void *)bufAout);CHKERRQ(ierr);
79   ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,(const void*)bufB,(void *)bufBout);CHKERRQ(ierr);
80   ierr = VecRestoreArrayRead(A,(const PetscScalar**)&bufA);CHKERRQ(ierr);
81   ierr = VecRestoreArrayRead(B,(const PetscScalar**)&bufB);CHKERRQ(ierr);
82   ierr = VecRestoreArray(Aout,&bufAout);CHKERRQ(ierr);
83   ierr = VecRestoreArray(Bout,&bufBout);CHKERRQ(ierr);
84 
85   ierr = VecView(Aout,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
86   ierr = VecView(Bout,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
87   ierr = VecDestroy(&A);CHKERRQ(ierr);
88   ierr = VecDestroy(&B);CHKERRQ(ierr);
89   ierr = VecDestroy(&Aout);CHKERRQ(ierr);
90   ierr = VecDestroy(&Bout);CHKERRQ(ierr);
91   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
92 
93   ierr = PetscFinalize();
94   return ierr;
95 }
96 
97 
98 /*TEST
99 
100    test:
101       suffix: basic
102       nsize: 2
103       filter: grep -v "type" | grep -v "sort"
104       args: -sf_type basic
105 
106    test:
107       suffix: window
108       nsize: 2
109       filter: grep -v "type" | grep -v "sort"
110       output_file: output/ex2_basic.out
111       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
112       requires: define(PETSC_HAVE_MPI_ONE_SIDED)
113 
114    # The nightly test suite with MPICH uses ch3:sock, which is broken when winsize == 0 in some of the processes
115    test:
116       suffix: window_shared
117       nsize: 2
118       filter: grep -v "type" | grep -v "sort"
119       output_file: output/ex2_basic.out
120       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared
121       requires: define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !define(PETSC_HAVE_MPICH_NUMVERSION) define(PETSC_HAVE_MPI_ONE_SIDED)
122 
123 TEST*/
124