xref: /petsc/src/dm/tests/ex4.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 
2 static char help[] = "Tests various 2-dimensional DMDA routines.\n\n";
3 
4 #include <petscdm.h>
5 #include <petscdmda.h>
6 
7 int main(int argc,char **argv)
8 {
9   PetscMPIInt      rank;
10   PetscInt         M = 10,N = 8,m = PETSC_DECIDE;
11   PetscInt         s =2,w=2,n = PETSC_DECIDE,nloc,l,i,j,kk;
12   PetscInt         Xs,Xm,Ys,Ym,iloc,*iglobal;
13   const PetscInt   *ltog;
14   PetscInt         *lx       = NULL,*ly = NULL;
15   PetscBool        testorder = PETSC_FALSE,flg;
16   DMBoundaryType   bx        = DM_BOUNDARY_NONE,by= DM_BOUNDARY_NONE;
17   DM               da;
18   PetscViewer      viewer;
19   Vec              local,global;
20   PetscScalar      value;
21   DMDAStencilType  st = DMDA_STENCIL_BOX;
22   AO               ao;
23 
24   PetscFunctionBeginUser;
25   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
26   PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,400,&viewer));
27 
28   /* Readoptions */
29   PetscCall(PetscOptionsGetInt(NULL,NULL,"-NX",&M,NULL));
30   PetscCall(PetscOptionsGetInt(NULL,NULL,"-NY",&N,NULL));
31   PetscCall(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL));
32   PetscCall(PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL));
33   PetscCall(PetscOptionsGetInt(NULL,NULL,"-s",&s,NULL));
34   PetscCall(PetscOptionsGetInt(NULL,NULL,"-w",&w,NULL));
35 
36   flg  = PETSC_FALSE;
37   PetscCall(PetscOptionsGetBool(NULL,NULL,"-xperiodic",&flg,NULL)); if (flg) bx = DM_BOUNDARY_PERIODIC;
38   flg  = PETSC_FALSE;
39   PetscCall(PetscOptionsGetBool(NULL,NULL,"-yperiodic",&flg,NULL)); if (flg) by = DM_BOUNDARY_PERIODIC;
40   flg  = PETSC_FALSE;
41   PetscCall(PetscOptionsGetBool(NULL,NULL,"-xghosted",&flg,NULL)); if (flg) bx = DM_BOUNDARY_GHOSTED;
42   flg  = PETSC_FALSE;
43   PetscCall(PetscOptionsGetBool(NULL,NULL,"-yghosted",&flg,NULL)); if (flg) by = DM_BOUNDARY_GHOSTED;
44   flg  = PETSC_FALSE;
45   PetscCall(PetscOptionsGetBool(NULL,NULL,"-star",&flg,NULL)); if (flg) st = DMDA_STENCIL_STAR;
46   flg  = PETSC_FALSE;
47   PetscCall(PetscOptionsGetBool(NULL,NULL,"-box",&flg,NULL)); if (flg) st = DMDA_STENCIL_BOX;
48   flg  = PETSC_FALSE;
49   PetscCall(PetscOptionsGetBool(NULL,NULL,"-testorder",&testorder,NULL));
50   /*
51       Test putting two nodes in x and y on each processor, exact last processor
52       in x and y gets the rest.
53   */
54   flg  = PETSC_FALSE;
55   PetscCall(PetscOptionsGetBool(NULL,NULL,"-distribute",&flg,NULL));
56   if (flg) {
57     PetscCheck(m != PETSC_DECIDE,PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -m option with -distribute option");
58     PetscCall(PetscMalloc1(m,&lx));
59     for (i=0; i<m-1; i++) { lx[i] = 4;}
60     lx[m-1] = M - 4*(m-1);
61     PetscCheck(n != PETSC_DECIDE,PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -n option with -distribute option");
62     PetscCall(PetscMalloc1(n,&ly));
63     for (i=0; i<n-1; i++) { ly[i] = 2;}
64     ly[n-1] = N - 2*(n-1);
65   }
66 
67   /* Create distributed array and get vectors */
68   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,bx,by,st,M,N,m,n,w,s,lx,ly,&da));
69   PetscCall(DMSetFromOptions(da));
70   PetscCall(DMSetUp(da));
71   PetscCall(PetscFree(lx));
72   PetscCall(PetscFree(ly));
73 
74   PetscCall(DMView(da,viewer));
75   PetscCall(DMCreateGlobalVector(da,&global));
76   PetscCall(DMCreateLocalVector(da,&local));
77 
78   /* Set global vector; send ghost points to local vectors */
79   value = 1;
80   PetscCall(VecSet(global,value));
81   PetscCall(DMGlobalToLocalBegin(da,global,INSERT_VALUES,local));
82   PetscCall(DMGlobalToLocalEnd(da,global,INSERT_VALUES,local));
83 
84   /* Scale local vectors according to processor rank; pass to global vector */
85   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
86   value = rank;
87   PetscCall(VecScale(local,value));
88   PetscCall(DMLocalToGlobalBegin(da,local,INSERT_VALUES,global));
89   PetscCall(DMLocalToGlobalEnd(da,local,INSERT_VALUES,global));
90 
91   if (!testorder) { /* turn off printing when testing ordering mappings */
92     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vectors:\n"));
93     PetscCall(VecView(global,PETSC_VIEWER_STDOUT_WORLD));
94     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\n\n"));
95   }
96 
97   /* Send ghost points to local vectors */
98   PetscCall(DMGlobalToLocalBegin(da,global,INSERT_VALUES,local));
99   PetscCall(DMGlobalToLocalEnd(da,global,INSERT_VALUES,local));
100 
101   flg  = PETSC_FALSE;
102   PetscCall(PetscOptionsGetBool(NULL,NULL,"-local_print",&flg,NULL));
103   if (flg) {
104     PetscViewer sviewer;
105 
106     PetscCall(PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD));
107     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank));
108     PetscCall(PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer));
109     PetscCall(VecView(local,sviewer));
110     PetscCall(PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer));
111     PetscCall(PetscViewerFlush(PETSC_VIEWER_STDOUT_WORLD));
112     PetscCall(PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD));
113   }
114 
115   /* Tests mappings between application/PETSc orderings */
116   if (testorder) {
117     ISLocalToGlobalMapping ltogm;
118 
119     PetscCall(DMGetLocalToGlobalMapping(da,&ltogm));
120     PetscCall(ISLocalToGlobalMappingGetSize(ltogm,&nloc));
121     PetscCall(ISLocalToGlobalMappingGetIndices(ltogm,&ltog));
122     PetscCall(DMDAGetGhostCorners(da,&Xs,&Ys,NULL,&Xm,&Ym,NULL));
123     PetscCall(DMDAGetAO(da,&ao));
124     PetscCall(PetscMalloc1(nloc,&iglobal));
125 
126     /* Set iglobal to be global indices for each processor's local and ghost nodes,
127        using the DMDA ordering of grid points */
128     kk = 0;
129     for (j=Ys; j<Ys+Ym; j++) {
130       for (i=Xs; i<Xs+Xm; i++) {
131         iloc = w*((j-Ys)*Xm + i-Xs);
132         for (l=0; l<w; l++) {
133           iglobal[kk++] = ltog[iloc+l];
134         }
135       }
136     }
137 
138     /* Map this to the application ordering (which for DMDAs is just the natural ordering
139        that would be used for 1 processor, numbering most rapidly by x, then y) */
140     PetscCall(AOPetscToApplication(ao,nloc,iglobal));
141 
142     /* Then map the application ordering back to the PETSc DMDA ordering */
143     PetscCall(AOApplicationToPetsc(ao,nloc,iglobal));
144 
145     /* Verify the mappings */
146     kk=0;
147     for (j=Ys; j<Ys+Ym; j++) {
148       for (i=Xs; i<Xs+Xm; i++) {
149         iloc = w*((j-Ys)*Xm + i-Xs);
150         for (l=0; l<w; l++) {
151           if (iglobal[kk] != ltog[iloc+l]) {
152             PetscCall(PetscFPrintf(PETSC_COMM_SELF,stdout,"[%d] Problem with mapping: j=%" PetscInt_FMT ", i=%" PetscInt_FMT ", l=%" PetscInt_FMT ", petsc1=%" PetscInt_FMT ", petsc2=%" PetscInt_FMT "\n",rank,j,i,l,ltog[iloc+l],iglobal[kk]));
153           }
154           kk++;
155         }
156       }
157     }
158     PetscCall(PetscFree(iglobal));
159     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog));
160   }
161 
162   /* Free memory */
163   PetscCall(PetscViewerDestroy(&viewer));
164   PetscCall(VecDestroy(&local));
165   PetscCall(VecDestroy(&global));
166   PetscCall(DMDestroy(&da));
167 
168   PetscCall(PetscFinalize());
169   return 0;
170 }
171 
172 /*TEST
173 
174    test:
175       nsize: 4
176       args: -nox
177       filter: grep -v -i Object
178       requires: x
179 
180    test:
181       suffix: 2
182       args: -testorder -nox
183       requires: x
184 
185 TEST*/
186