xref: /petsc/src/vec/is/sf/tests/ex17.c (revision 76be6f4ff3bd4e251c19fc00ebbebfd58b6e7589)
1 static const char help[] = "Test PetscSF with MPI large count (more than 2 billion elements in messages)\n\n";
2 
3 #include <petscsys.h>
4 #include <petscsf.h>
5 
6 int main(int argc,char **argv)
7 {
8   PetscSF           sf;
9   PetscInt          i,nroots,nleaves;
10   PetscInt          n = (1ULL<<31) + 1024; /* a little over 2G elements */
11   PetscSFNode       *iremote = NULL;
12   PetscMPIInt       rank,size;
13   char              *rootdata=NULL,*leafdata=NULL;
14   Vec               x,y;
15   VecScatter        vscat;
16   PetscInt          rstart,rend;
17   IS                ix;
18   const PetscScalar *xv;
19 
20   PetscCall(PetscInitialize(&argc,&argv,NULL,help));
21   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
22   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
23   PetscCheck(size == 2,PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"The test can only run with two MPI ranks");
24 
25   /* Test PetscSF */
26   PetscCall(PetscSFCreate(PETSC_COMM_WORLD,&sf));
27   PetscCall(PetscSFSetFromOptions(sf));
28 
29   if (rank == 0) {
30     nroots  = n;
31     nleaves = 0;
32   } else {
33     nroots  = 0;
34     nleaves = n;
35     PetscCall(PetscMalloc1(nleaves,&iremote));
36     for (i=0; i<nleaves; i++) {
37       iremote[i].rank  = 0;
38       iremote[i].index = i;
39     }
40   }
41   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_COPY_VALUES,iremote,PETSC_COPY_VALUES));
42   PetscCall(PetscMalloc2(nroots,&rootdata,nleaves,&leafdata));
43   if (rank == 0) {
44     memset(rootdata,11,nroots);
45     rootdata[nroots-1] = 12; /* Use a different value at the end */
46   }
47 
48   PetscCall(PetscSFBcastBegin(sf,MPI_SIGNED_CHAR,rootdata,leafdata,MPI_REPLACE)); /* rank 0->1, bcast rootdata to leafdata */
49   PetscCall(PetscSFBcastEnd(sf,MPI_SIGNED_CHAR,rootdata,leafdata,MPI_REPLACE));
50   PetscCall(PetscSFReduceBegin(sf,MPI_SIGNED_CHAR,leafdata,rootdata,MPI_SUM)); /* rank 1->0, add leafdata to rootdata */
51   PetscCall(PetscSFReduceEnd(sf,MPI_SIGNED_CHAR,leafdata,rootdata,MPI_SUM));
52   PetscCheck(rank != 0 || (rootdata[0] == 22 && rootdata[nroots-1] == 24),PETSC_COMM_SELF,PETSC_ERR_PLIB,"SF: wrong results");
53 
54   PetscCall(PetscFree2(rootdata,leafdata));
55   PetscCall(PetscFree(iremote));
56   PetscCall(PetscSFDestroy(&sf));
57 
58   /* Test VecScatter */
59   PetscCall(VecCreate(PETSC_COMM_WORLD,&x));
60   PetscCall(VecCreate(PETSC_COMM_WORLD,&y));
61   PetscCall(VecSetSizes(x,rank==0? n : 64,PETSC_DECIDE));
62   PetscCall(VecSetSizes(y,rank==0? 64 : n,PETSC_DECIDE));
63   PetscCall(VecSetFromOptions(x));
64   PetscCall(VecSetFromOptions(y));
65 
66   PetscCall(VecGetOwnershipRange(x,&rstart,&rend));
67   PetscCall(ISCreateStride(PETSC_COMM_SELF,rend-rstart,rstart,1,&ix));
68   PetscCall(VecScatterCreate(x,ix,y,ix,&vscat));
69 
70   PetscCall(VecSet(x,3.0));
71   PetscCall(VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD));
72   PetscCall(VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD));
73 
74   PetscCall(VecScatterBegin(vscat,y,x,ADD_VALUES,SCATTER_REVERSE));
75   PetscCall(VecScatterEnd(vscat,y,x,ADD_VALUES,SCATTER_REVERSE));
76 
77   PetscCall(VecGetArrayRead(x,&xv));
78   PetscCheck(xv[0] == 6.0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"VecScatter: wrong results");
79   PetscCall(VecRestoreArrayRead(x,&xv));
80   PetscCall(VecDestroy(&x));
81   PetscCall(VecDestroy(&y));
82   PetscCall(VecScatterDestroy(&vscat));
83   PetscCall(ISDestroy(&ix));
84 
85   PetscCall(PetscFinalize());
86   return 0;
87 }
88 
89 /**TEST
90    test:
91      requires: defined(PETSC_HAVE_MPI_LARGE_COUNT) defined(PETSC_USE_64BIT_INDICES)
92      TODO: need a machine with big memory (~150GB) to run the test
93      nsize: 2
94      args: -sf_type {{basic neighbor}}
95 
96 TEST**/
97