xref: /petsc/src/vec/is/sf/tests/ex14.c (revision ffa8c5705e8ab2cf85ee1d14dbe507a6e2eb5283)
1 
2 static char help[]= "Test event log of VecScatter with various block sizes\n\n";
3 
4 #include <petscvec.h>
5 
6 int main(int argc,char **argv)
7 {
8   PetscInt           i,j,low,high,n=256,N,errors,tot_errors;
9   PetscInt           bs=1,ix[2],iy[2];
10   PetscMPIInt        nproc,rank;
11   PetscScalar       *xval;
12   const PetscScalar *yval;
13   Vec                x,y;
14   IS                 isx,isy;
15   VecScatter         ctx;
16   const PetscInt     niter = 10;
17 #if defined(PETSC_USE_LOG)
18   PetscLogStage      stage1,stage2;
19   PetscLogEvent      event1,event2;
20   PetscLogDouble     numMessages,messageLength;
21   PetscEventPerfInfo eventInfo;
22   PetscInt           tot_msg,tot_len,avg_len;
23 #endif
24 
25   PetscFunctionBegin;
26   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
27   PetscCall(PetscLogDefaultBegin());
28   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&nproc));
29   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
30 
31   PetscCall(PetscLogStageRegister("Scatter(bs=1)", &stage1));
32   PetscCall(PetscLogEventRegister("VecScatter(bs=1)", PETSC_OBJECT_CLASSID, &event1));
33   PetscCall(PetscLogStageRegister("Scatter(bs=4)", &stage2));
34   PetscCall(PetscLogEventRegister("VecScatter(bs=4)", PETSC_OBJECT_CLASSID, &event2));
35 
36   /* Create a parallel vector x and a sequential vector y */
37   PetscCall(VecCreate(PETSC_COMM_WORLD,&x));
38   PetscCall(VecSetSizes(x,n,PETSC_DECIDE));
39   PetscCall(VecSetFromOptions(x));
40   PetscCall(VecGetOwnershipRange(x,&low,&high));
41   PetscCall(VecGetSize(x,&N));
42   PetscCall(VecCreateSeq(PETSC_COMM_SELF,n,&y));
43 
44   /*=======================================
45      test VecScatter with bs = 1
46     ======================================*/
47 
48   /* the code works as if we are going to do 3-point stencil computations on a 1D domain x,
49      which has periodic boundary conditions but the two ghosts are scatterred to beginning of y.
50    */
51   bs    = 1;
52   ix[0] = rank? low-1 : N-1; /* ix[] contains global indices of the two ghost points */
53   ix[1] = (rank != nproc-1)? high : 0;
54   iy[0] = 0;
55   iy[1] = 1;
56 
57   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,2,ix,PETSC_COPY_VALUES,&isx));
58   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,2,iy,PETSC_COPY_VALUES,&isy));
59   PetscCall(VecScatterCreate(x,isx,y,isy,&ctx));
60   PetscCall(VecScatterSetUp(ctx));
61 
62   PetscCall(PetscLogStagePush(stage1));
63   PetscCall(PetscLogEventBegin(event1,0,0,0,0));
64   errors = 0;
65   for (i=0; i<niter; i++) {
66     /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
67     PetscCall(VecGetArray(x,&xval));
68     for (j=0; j<n; j++) xval[j] = (PetscScalar)(low+j+i);
69     PetscCall(VecRestoreArray(x,&xval));
70     /* scatter the ghosts to y */
71     PetscCall(VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD));
72     PetscCall(VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD));
73     /* check if y has correct values */
74     PetscCall(VecGetArrayRead(y,&yval));
75     if ((PetscInt)PetscRealPart(yval[0]) != ix[0]+i) errors++;
76     if ((PetscInt)PetscRealPart(yval[1]) != ix[1]+i) errors++;
77     PetscCall(VecRestoreArrayRead(y,&yval));
78   }
79   PetscCall(PetscLogEventEnd(event1,0,0,0,0));
80   PetscCall(PetscLogStagePop());
81 
82   /* check if we found wrong values on any processors */
83   PetscCallMPI(MPI_Allreduce(&errors,&tot_errors,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD));
84   if (tot_errors) PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Error: wrong values were scatterred in vecscatter with bs = %" PetscInt_FMT "\n",bs));
85 
86   /* print out event log of VecScatter(bs=1) */
87 #if defined(PETSC_USE_LOG)
88   PetscCall(PetscLogEventGetPerfInfo(stage1,event1,&eventInfo));
89   PetscCallMPI(MPI_Allreduce(&eventInfo.numMessages,  &numMessages,  1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD));
90   PetscCallMPI(MPI_Allreduce(&eventInfo.messageLength,&messageLength,1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD));
91   tot_msg = (PetscInt)numMessages*0.5; /* two MPI calls (Send & Recv) per message */
92   tot_len = (PetscInt)messageLength*0.5;
93   avg_len = tot_msg? (PetscInt)(messageLength/numMessages) : 0;
94   /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
95   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"VecScatter(bs=%" PetscInt_FMT ") has sent out %" PetscInt_FMT " messages, total %" PetscInt_FMT " bytes, with average length %" PetscInt_FMT " bytes\n",bs,tot_msg,tot_len,avg_len));
96 #endif
97 
98   PetscCall(ISDestroy(&isx));
99   PetscCall(ISDestroy(&isy));
100   PetscCall(VecScatterDestroy(&ctx));
101 
102   /*=======================================
103      test VecScatter with bs = 4
104     ======================================*/
105 
106   /* similar to the 3-point stencil above, except that this time a ghost is a block */
107   bs    = 4; /* n needs to be a multiple of bs to make the following code work */
108   ix[0] = rank? low/bs-1 : N/bs-1; /* ix[] contains global indices of the two ghost blocks */
109   ix[1] = (rank != nproc-1)? high/bs : 0;
110   iy[0] = 0;
111   iy[1] = 1;
112 
113   PetscCall(ISCreateBlock(PETSC_COMM_SELF,bs,2,ix,PETSC_COPY_VALUES,&isx));
114   PetscCall(ISCreateBlock(PETSC_COMM_SELF,bs,2,iy,PETSC_COPY_VALUES,&isy));
115 
116   PetscCall(VecScatterCreate(x,isx,y,isy,&ctx));
117    /* Call SetUp explicitly, otherwise messages in implicit SetUp will be counted in events below */
118   PetscCall(VecScatterSetUp(ctx));
119 
120   PetscCall(PetscLogStagePush(stage2));
121   PetscCall(PetscLogEventBegin(event2,0,0,0,0));
122   errors = 0;
123   for (i=0; i<niter; i++) {
124     /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
125     PetscCall(VecGetArray(x,&xval));
126     for (j=0; j<n; j++) xval[j] = (PetscScalar)(low+j+i);
127     PetscCall(VecRestoreArray(x,&xval));
128     /* scatter the ghost blocks to y */
129     PetscCall(VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD));
130     PetscCall(VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD));
131     /* check if y has correct values */
132     PetscCall(VecGetArrayRead(y,&yval));
133     if ((PetscInt)PetscRealPart(yval[0])  != ix[0]*bs+i) errors++;
134     if ((PetscInt)PetscRealPart(yval[bs]) != ix[1]*bs+i) errors++;
135     PetscCall(VecRestoreArrayRead(y,&yval));
136   }
137   PetscCall(PetscLogEventEnd(event2,0,0,0,0));
138   PetscCall(PetscLogStagePop());
139 
140   /* check if we found wrong values on any processors */
141   PetscCallMPI(MPI_Allreduce(&errors,&tot_errors,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD));
142   if (tot_errors) PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Error: wrong values were scatterred in vecscatter with bs = %" PetscInt_FMT "\n",bs));
143 
144   /* print out event log of VecScatter(bs=4) */
145 #if defined(PETSC_USE_LOG)
146   PetscCall(PetscLogEventGetPerfInfo(stage2,event2,&eventInfo));
147   PetscCallMPI(MPI_Allreduce(&eventInfo.numMessages,  &numMessages,  1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD));
148   PetscCallMPI(MPI_Allreduce(&eventInfo.messageLength,&messageLength,1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD));
149   tot_msg = (PetscInt)numMessages*0.5; /* two MPI calls (Send & Recv) per message */
150   tot_len = (PetscInt)messageLength*0.5;
151   avg_len = tot_msg? (PetscInt)(messageLength/numMessages) : 0;
152   /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
153   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"VecScatter(bs=%" PetscInt_FMT ") has sent out %" PetscInt_FMT " messages, total %" PetscInt_FMT " bytes, with average length %" PetscInt_FMT " bytes\n",bs,tot_msg,tot_len,avg_len));
154 #endif
155 
156   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Program finished\n"));
157   PetscCall(ISDestroy(&isx));
158   PetscCall(ISDestroy(&isy));
159   PetscCall(VecScatterDestroy(&ctx));
160   PetscCall(VecDestroy(&x));
161   PetscCall(VecDestroy(&y));
162   PetscCall(PetscFinalize());
163   return 0;
164 }
165 
166 /*TEST
167 
168    test:
169       nsize: 4
170       args:
171       requires: double defined(PETSC_USE_LOG)
172 
173 TEST*/
174