xref: /petsc/src/vec/is/sf/tests/ex9.c (revision b698fc57f0bea7237255b29c1b77df0acc362ffd)
1 static char help[]= "This example shows 1) how to transfer vectors from a parent communicator to vectors on a child communicator and vice versa;\n\
2   2) how to transfer vectors from a subcommunicator to vectors on another subcommunicator. The two subcommunicators are not\n\
3   required to cover all processes in PETSC_COMM_WORLD; 3) how to copy a vector from a parent communicator to vectors on its child communicators.\n\n";
4 
5 #include <petscvec.h>
6 int main(int argc,char **argv)
7 {
8   PetscErrorCode ierr;
9   PetscMPIInt    nproc,grank,mycolor;
10   PetscInt       i,n,N = 20,low,high;
11   MPI_Comm       subcomm;
12   Vec            x,yg; /* global vectors on PETSC_COMM_WORLD */
13   VecScatter     vscat;
14   IS             ix,iy;
15   PetscBool      world2sub  = PETSC_FALSE;  /* Copy a vector from WORLD to a subcomm? */
16   PetscBool      sub2sub    = PETSC_FALSE;  /* Copy a vector from a subcomm to another subcomm? */
17   PetscBool      world2subs = PETSC_FALSE;  /* Copy a vector from WORLD to multiple subcomms? */
18 
19   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
20   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&nproc);CHKERRQ(ierr);
21   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&grank);CHKERRQ(ierr);
22 
23   if (nproc < 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_SIZ,"This test must have at least two processes to run");
24 
25   ierr = PetscOptionsGetBool(NULL,0,"-world2sub",&world2sub,NULL);CHKERRQ(ierr);
26   ierr = PetscOptionsGetBool(NULL,0,"-sub2sub",&sub2sub,NULL);CHKERRQ(ierr);
27   ierr = PetscOptionsGetBool(NULL,0,"-world2subs",&world2subs,NULL);CHKERRQ(ierr);
28 
29   /* Split PETSC_COMM_WORLD into three subcomms. Each process can only see the subcomm it belongs to */
30   mycolor = grank % 3;
31   ierr    = MPI_Comm_split(PETSC_COMM_WORLD,mycolor,grank,&subcomm);CHKERRQ(ierr);
32 
33   /*===========================================================================
34    *  Transfer a vector x defined on PETSC_COMM_WORLD to a vector y defined on
35    *  a subcommunicator of PETSC_COMM_WORLD and vice versa.
36    *===========================================================================*/
37   if (world2sub) {
38     ierr = VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,N,&x);CHKERRQ(ierr);
39     ierr = PetscObjectSetName((PetscObject)x,"x_commworld");CHKERRQ(ierr); /* Give a name to view x clearly */
40 
41     /* Initialize x to [-0.0, -1.0, -2.0, ..., -19.0] */
42     ierr = VecGetOwnershipRange(x,&low,&high);CHKERRQ(ierr);
43     for (i=low; i<high; i++) {
44       PetscScalar val = -i;
45       ierr = VecSetValue(x,i,val,INSERT_VALUES);CHKERRQ(ierr);
46     }
47     ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
48     ierr = VecAssemblyEnd(x);CHKERRQ(ierr);
49 
50     /* Transfer x to a vector y only defined on subcomm0 and vice versa */
51     if (mycolor == 0) { /* subcomm0 contains ranks 0, 3, 6, ... in PETSC_COMM_WORLD */
52       Vec         y;
53       PetscScalar *yvalue;
54 
55       ierr = VecCreateMPI(subcomm,PETSC_DECIDE,N,&y);CHKERRQ(ierr);
56       ierr = PetscObjectSetName((PetscObject)y,"y_subcomm_0");CHKERRQ(ierr); /* Give a name to view y clearly */
57       ierr = VecGetLocalSize(y,&n);CHKERRQ(ierr);
58       ierr = VecGetArray(y,&yvalue);CHKERRQ(ierr);
59       /* Create yg on PETSC_COMM_WORLD and alias yg with y. They share the memory pointed by yvalue.
60         Note this is a collective call. All processes have to call it and supply consistent N.
61       */
62       ierr = VecCreateMPIWithArray(PETSC_COMM_WORLD,1,n,N,yvalue,&yg);CHKERRQ(ierr);
63 
64       /* Create an identity map that makes yg[i] = x[i], i=0..N-1 */
65       ierr = VecGetOwnershipRange(yg,&low,&high);CHKERRQ(ierr); /* low, high are global indices */
66       ierr = ISCreateStride(PETSC_COMM_SELF,high-low,low,1,&ix);CHKERRQ(ierr);
67       ierr = ISDuplicate(ix,&iy);CHKERRQ(ierr);
68 
69       /* Union of ix's on subcomm0 covers the full range of [0,N) */
70       ierr = VecScatterCreate(x,ix,yg,iy,&vscat);CHKERRQ(ierr);
71       ierr = VecScatterBegin(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
72       ierr = VecScatterEnd(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
73 
74       /* Once yg got the data from x, we return yvalue to y so that we can use y in other operations.
75         VecGetArray must be paired with VecRestoreArray.
76       */
77       ierr = VecRestoreArray(y,&yvalue);CHKERRQ(ierr);
78 
79       /* Libraries on subcomm0 can safely use y now, for example, view and scale it */
80       ierr = VecView(y,PETSC_VIEWER_STDOUT_(subcomm));CHKERRQ(ierr);
81       ierr = VecScale(y,2.0);CHKERRQ(ierr);
82 
83       /* Send the new y back to x */
84       ierr = VecGetArray(y,&yvalue);CHKERRQ(ierr); /* If VecScale is done on GPU, Petsc will prepare a valid yvalue for access */
85       /* Supply new yvalue to yg without memory copying */
86       ierr = VecPlaceArray(yg,yvalue);CHKERRQ(ierr);
87       ierr = VecScatterBegin(vscat,yg,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
88       ierr = VecScatterEnd(vscat,yg,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
89       ierr = VecResetArray(yg);CHKERRQ(ierr);
90       ierr = VecRestoreArray(y,&yvalue);CHKERRQ(ierr);
91 
92       ierr = VecDestroy(&y);CHKERRQ(ierr);
93     } else {
94       /* Ranks outside of subcomm0 do not supply values to yg */
95       ierr = VecCreateMPIWithArray(PETSC_COMM_WORLD,1,0/*n*/,N,NULL,&yg);CHKERRQ(ierr);
96 
97       /* Ranks in subcomm0 already specified the full range of the identity map. The remaining
98         ranks just need to create empty ISes to cheat VecScatterCreate.
99       */
100       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&ix);CHKERRQ(ierr);
101       ierr = ISDuplicate(ix,&iy);CHKERRQ(ierr);
102 
103       ierr = VecScatterCreate(x,ix,yg,iy,&vscat);CHKERRQ(ierr);
104       ierr = VecScatterBegin(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
105       ierr = VecScatterEnd(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
106 
107       /* Send the new y back to x. Ranks outside of subcomm0 actually have nothing to send.
108         But they have to call VecScatterBegin/End since these routines are collective.
109       */
110       ierr = VecScatterBegin(vscat,yg,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
111       ierr = VecScatterEnd(vscat,yg,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
112     }
113 
114     ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
115     ierr = ISDestroy(&ix);CHKERRQ(ierr);
116     ierr = ISDestroy(&iy);CHKERRQ(ierr);
117     ierr = VecDestroy(&x);CHKERRQ(ierr);
118     ierr = VecDestroy(&yg);CHKERRQ(ierr);
119     ierr = VecScatterDestroy(&vscat);CHKERRQ(ierr);
120   } /* world2sub */
121 
122   /*===========================================================================
123    *  Transfer a vector x defined on subcomm0 to a vector y defined on
124    *  subcomm1. The two subcomms are not overlapping and their union is
125    *  not necessarily equal to PETSC_COMM_WORLD.
126    *===========================================================================*/
127   if (sub2sub) {
128     if (mycolor == 0) {
129       /* Intentionally declare N as a local variable so that processes in subcomm1 do not know its value */
130       PetscInt          n,N = 22;
131       Vec               x,xg,yg;
132       IS                ix,iy;
133       VecScatter        vscat;
134       const PetscScalar *xvalue;
135       MPI_Comm          intercomm,parentcomm;
136       PetscMPIInt       lrank;
137 
138       ierr = MPI_Comm_rank(subcomm,&lrank);CHKERRQ(ierr);
139       ierr = VecCreateMPI(subcomm,PETSC_DECIDE,N,&x);CHKERRQ(ierr); /* x is on subcomm */
140       ierr = VecGetOwnershipRange(x,&low,&high);CHKERRQ(ierr);
141 
142       /* initialize x = [0.0, 1.0, 2.0, ..., 21.0] */
143       for (i=low; i<high; i++) {
144         PetscScalar val = i;
145         ierr = VecSetValue(x,i,val,INSERT_VALUES);CHKERRQ(ierr);
146       }
147       ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
148       ierr = VecAssemblyEnd(x);CHKERRQ(ierr);
149 
150       ierr = MPI_Intercomm_create(subcomm,0,PETSC_COMM_WORLD/*peer_comm*/,1,100/*tag*/,&intercomm);CHKERRQ(ierr);
151 
152       /* Tell rank 0 of subcomm1 the global size of x */
153       if (!lrank) {ierr = MPI_Send(&N,1,MPIU_INT,0/*receiver's rank in remote comm, i.e., subcomm1*/,200/*tag*/,intercomm);CHKERRQ(ierr);}
154 
155       /* Create an intracomm Petsc can work on. Ranks in subcomm0 are ordered before ranks in subcomm1 in parentcomm.
156         But this order actually does not matter, since what we care is vector y, which is defined on subcomm1.
157       */
158       ierr = MPI_Intercomm_merge(intercomm,0/*low*/,&parentcomm);CHKERRQ(ierr);
159 
160       /* Create a vector xg on parentcomm, which shares memory with x */
161       ierr = VecGetArrayRead(x,&xvalue);CHKERRQ(ierr);
162       ierr = VecGetLocalSize(x,&n);CHKERRQ(ierr);
163       ierr = VecCreateMPIWithArray(parentcomm,1,n,N,xvalue,&xg);CHKERRQ(ierr);
164 
165       /* Ranks in subcomm 0 have nothing on yg, so they simply have n=0, array=NULL */
166       ierr = VecCreateMPIWithArray(parentcomm,1,0/*n*/,N,NULL/*array*/,&yg);CHKERRQ(ierr);
167 
168       /* Create the vecscatter, which does identity map by setting yg[i] = xg[i], i=0..N-1. */
169       ierr = VecGetOwnershipRange(xg,&low,&high);CHKERRQ(ierr); /* low, high are global indices of xg */
170       ierr = ISCreateStride(PETSC_COMM_SELF,high-low,low,1,&ix);CHKERRQ(ierr);
171       ierr = ISDuplicate(ix,&iy);CHKERRQ(ierr);
172       ierr = VecScatterCreate(xg,ix,yg,iy,&vscat);CHKERRQ(ierr);
173 
174       /* Scatter values from xg to yg */
175       ierr = VecScatterBegin(vscat,xg,yg,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
176       ierr = VecScatterEnd(vscat,xg,yg,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
177 
178       /* After the VecScatter is done, xg is idle so we can safely return xvalue to x */
179       ierr = VecRestoreArrayRead(x,&xvalue);CHKERRQ(ierr);
180       ierr = VecDestroy(&x);CHKERRQ(ierr);
181       ierr = ISDestroy(&ix);CHKERRQ(ierr);
182       ierr = ISDestroy(&iy);CHKERRQ(ierr);
183       ierr = VecDestroy(&xg);CHKERRQ(ierr);
184       ierr = VecDestroy(&yg);CHKERRQ(ierr);
185       ierr = VecScatterDestroy(&vscat);CHKERRQ(ierr);
186       ierr = MPI_Comm_free(&intercomm);CHKERRQ(ierr);
187       ierr = MPI_Comm_free(&parentcomm);CHKERRQ(ierr);
188     } else if (mycolor == 1) { /* subcomm 1, containing ranks 1, 4, 7, ... in PETSC_COMM_WORLD */
189       PetscInt    n,N;
190       Vec         y,xg,yg;
191       IS          ix,iy;
192       VecScatter  vscat;
193       PetscScalar *yvalue;
194       MPI_Comm    intercomm,parentcomm;
195       PetscMPIInt lrank;
196 
197       ierr = MPI_Comm_rank(subcomm,&lrank);CHKERRQ(ierr);
198       ierr = MPI_Intercomm_create(subcomm,0,PETSC_COMM_WORLD/*peer_comm*/,0/*remote_leader*/,100/*tag*/,&intercomm);CHKERRQ(ierr);
199 
200       /* Two rank-0 are talking */
201       if (!lrank) {ierr = MPI_Recv(&N,1,MPIU_INT,0/*sender's rank in remote comm, i.e. subcomm0*/,200/*tag*/,intercomm,MPI_STATUS_IGNORE);CHKERRQ(ierr);}
202       /* Rank 0 of subcomm1 bcasts N to its members */
203       ierr = MPI_Bcast(&N,1,MPIU_INT,0/*local root*/,subcomm);CHKERRQ(ierr);
204 
205       /* Create a intracomm Petsc can work on */
206       ierr = MPI_Intercomm_merge(intercomm,1/*high*/,&parentcomm);CHKERRQ(ierr);
207 
208       /* Ranks in subcomm1 have nothing on xg, so they simply have n=0, array=NULL.*/
209       ierr = VecCreateMPIWithArray(parentcomm,1/*bs*/,0/*n*/,N,NULL/*array*/,&xg);CHKERRQ(ierr);
210 
211       ierr = VecCreateMPI(subcomm,PETSC_DECIDE,N,&y);CHKERRQ(ierr);
212       ierr = PetscObjectSetName((PetscObject)y,"y_subcomm_1");CHKERRQ(ierr); /* Give a name to view y clearly */
213       ierr = VecGetLocalSize(y,&n);CHKERRQ(ierr);
214       ierr = VecGetArray(y,&yvalue);CHKERRQ(ierr);
215       /* Create a vector yg on parentcomm, which shares memory with y. xg and yg must be
216         created in the same order in subcomm0/1. For example, we can not reverse the order of
217         creating xg and yg in subcomm1.
218       */
219       ierr = VecCreateMPIWithArray(parentcomm,1/*bs*/,n,N,yvalue,&yg);CHKERRQ(ierr);
220 
221       /* Ranks in subcomm0 already specified the full range of the identity map.
222         ranks in subcomm1 just need to create empty ISes to cheat VecScatterCreate.
223       */
224       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&ix);CHKERRQ(ierr);
225       ierr = ISDuplicate(ix,&iy);CHKERRQ(ierr);
226       ierr = VecScatterCreate(xg,ix,yg,iy,&vscat);CHKERRQ(ierr);
227 
228       /* Scatter values from xg to yg */
229       ierr = VecScatterBegin(vscat,xg,yg,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
230       ierr = VecScatterEnd(vscat,xg,yg,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
231 
232       /* After the VecScatter is done, values in yg are available. y is our interest, so we return yvalue to y */
233       ierr = VecRestoreArray(y,&yvalue);CHKERRQ(ierr);
234 
235       /* Libraries on subcomm1 can safely use y now, for example, view it */
236       ierr = VecView(y,PETSC_VIEWER_STDOUT_(subcomm));CHKERRQ(ierr);
237 
238       ierr = VecDestroy(&y);CHKERRQ(ierr);
239       ierr = ISDestroy(&ix);CHKERRQ(ierr);
240       ierr = ISDestroy(&iy);CHKERRQ(ierr);
241       ierr = VecDestroy(&xg);CHKERRQ(ierr);
242       ierr = VecDestroy(&yg);CHKERRQ(ierr);
243       ierr = VecScatterDestroy(&vscat);CHKERRQ(ierr);
244       ierr = MPI_Comm_free(&intercomm);CHKERRQ(ierr);
245       ierr = MPI_Comm_free(&parentcomm);CHKERRQ(ierr);
246     } else if (mycolor == 2) { /* subcomm2 */
247       /* Processes in subcomm2 do not participate in the VecScatter. They can freely do unrelated things on subcomm2 */
248     }
249   } /* sub2sub */
250 
251   /*===========================================================================
252    *  Copy a vector x defined on PETSC_COMM_WORLD to vectors y defined on
253    *  every subcommunicator of PETSC_COMM_WORLD. We could use multiple transfers
254    *  as we did in case 1, but that is not efficient. Instead, we use one vecscatter
255    *  to achieve that.
256    *===========================================================================*/
257   if (world2subs) {
258     Vec         y,yg;
259     PetscInt    n,N=15,xstart,ystart,low,high;
260     PetscScalar *yvalue;
261 
262     /* Initialize x to [0, 1, 2, 3, ..., N-1] */
263     ierr = VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,N,&x);CHKERRQ(ierr);
264     ierr = VecGetOwnershipRange(x,&low,&high);CHKERRQ(ierr);
265     for (i=low; i<high; i++) {ierr = VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES);CHKERRQ(ierr);}
266     ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
267     ierr = VecAssemblyEnd(x);CHKERRQ(ierr);
268 
269     /* Every subcomm has a y as long as x */
270     ierr = VecCreateMPI(subcomm,PETSC_DECIDE,N,&y);CHKERRQ(ierr);
271     ierr = VecGetLocalSize(y,&n);CHKERRQ(ierr);
272 
273     /* Create a global vector yg on PETSC_COMM_WORLD using y's memory. yg's global size = N*(number of subcommunicators).
274        Eeach rank in subcomms contributes a piece to construct the global yg. Keep in mind that pieces from a subcomm are not
275        necessarily consecutive in yg. That depends on how PETSC_COMM_WORLD is split. In our case, subcomm0 is made of rank
276        0, 3, 6 etc from PETSC_COMM_WORLD. So subcomm0's pieces are interleaved with pieces from other subcomms in yg.
277     */
278     ierr = VecGetArray(y,&yvalue);CHKERRQ(ierr);
279     ierr = VecCreateMPIWithArray(PETSC_COMM_WORLD,1,n,PETSC_DECIDE,yvalue,&yg);CHKERRQ(ierr);
280     ierr = PetscObjectSetName((PetscObject)yg,"yg_on_subcomms");CHKERRQ(ierr); /* Give a name to view yg clearly */
281 
282     /* The following two lines are key. From xstart, we know where to pull entries from x. Note that we get xstart from y,
283        since first entry of y on this rank is from x[xstart]. From ystart, we know where ot put entries to yg.
284      */
285     ierr = VecGetOwnershipRange(y,&xstart,NULL);CHKERRQ(ierr);
286     ierr = VecGetOwnershipRange(yg,&ystart,NULL);CHKERRQ(ierr);
287 
288     ierr = ISCreateStride(PETSC_COMM_SELF,n,xstart,1,&ix);CHKERRQ(ierr);
289     ierr = ISCreateStride(PETSC_COMM_SELF,n,ystart,1,&iy);CHKERRQ(ierr);
290     ierr = VecScatterCreate(x,ix,yg,iy,&vscat);CHKERRQ(ierr);
291     ierr = VecScatterBegin(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
292     ierr = VecScatterEnd(vscat,x,yg,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
293 
294     /* View yg on PETSC_COMM_WORLD before destroying it. We shall see the interleaving effect in output. */
295     ierr = VecView(yg,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
296     ierr = VecDestroy(&yg);CHKERRQ(ierr);
297 
298     /* Restory yvalue so that processes in subcomm can use y from now on. */
299     ierr = VecRestoreArray(y,&yvalue);CHKERRQ(ierr);
300     ierr = VecScale(y,3.0);CHKERRQ(ierr);
301 
302     ierr = ISDestroy(&ix);CHKERRQ(ierr); /* One can also destroy ix, iy immediately after VecScatterCreate() */
303     ierr = ISDestroy(&iy);CHKERRQ(ierr);
304     ierr = VecDestroy(&x);CHKERRQ(ierr);
305     ierr = VecDestroy(&y);CHKERRQ(ierr);
306     ierr = VecScatterDestroy(&vscat);CHKERRQ(ierr);
307   } /* world2subs */
308 
309   ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr);
310   ierr = PetscFinalize();
311   return ierr;
312 }
313 
314 /*TEST
315 
316    build:
317      requires: !define(PETSC_HAVE_MPIUNI)
318    testset:
319      nsize: 7
320 
321      test:
322        suffix: 1
323        args: -world2sub
324 
325      test:
326        suffix: 2
327        args: -sub2sub
328 
329      test:
330        suffix: 3
331        args: -world2subs
332 
333      test:
334        suffix: 4
335        args: -world2sub -sf_type neighbor
336        output_file: output/ex9_1.out
337        # OpenMPI has a bug wrt MPI_Neighbor_alltoallv etc (https://github.com/open-mpi/ompi/pull/6782). Once the patch is in, we can remove !define(PETSC_HAVE_OMPI_MAJOR_VERSION)
338        requires: define(PETSC_HAVE_MPI_NEIGHBORHOOD_COLLECTIVES) !define(PETSC_HAVE_OMPI_MAJOR_VERSION)
339 TEST*/
340 
341