xref: /petsc/src/dm/tests/ex4.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 
2 static char help[] = "Tests various 2-dimensional DMDA routines.\n\n";
3 
4 #include <petscdm.h>
5 #include <petscdmda.h>
6 
7 int main(int argc,char **argv)
8 {
9   PetscMPIInt      rank;
10   PetscErrorCode   ierr;
11   PetscInt         M = 10,N = 8,m = PETSC_DECIDE;
12   PetscInt         s =2,w=2,n = PETSC_DECIDE,nloc,l,i,j,kk;
13   PetscInt         Xs,Xm,Ys,Ym,iloc,*iglobal;
14   const PetscInt   *ltog;
15   PetscInt         *lx       = NULL,*ly = NULL;
16   PetscBool        testorder = PETSC_FALSE,flg;
17   DMBoundaryType   bx        = DM_BOUNDARY_NONE,by= DM_BOUNDARY_NONE;
18   DM               da;
19   PetscViewer      viewer;
20   Vec              local,global;
21   PetscScalar      value;
22   DMDAStencilType  st = DMDA_STENCIL_BOX;
23   AO               ao;
24 
25   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
26   CHKERRQ(PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,400,&viewer));
27 
28   /* Readoptions */
29   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-NX",&M,NULL));
30   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-NY",&N,NULL));
31   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL));
32   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL));
33   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-s",&s,NULL));
34   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-w",&w,NULL));
35 
36   flg  = PETSC_FALSE;
37   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-xperiodic",&flg,NULL)); if (flg) bx = DM_BOUNDARY_PERIODIC;
38   flg  = PETSC_FALSE;
39   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-yperiodic",&flg,NULL)); if (flg) by = DM_BOUNDARY_PERIODIC;
40   flg  = PETSC_FALSE;
41   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-xghosted",&flg,NULL)); if (flg) bx = DM_BOUNDARY_GHOSTED;
42   flg  = PETSC_FALSE;
43   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-yghosted",&flg,NULL)); if (flg) by = DM_BOUNDARY_GHOSTED;
44   flg  = PETSC_FALSE;
45   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-star",&flg,NULL)); if (flg) st = DMDA_STENCIL_STAR;
46   flg  = PETSC_FALSE;
47   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-box",&flg,NULL)); if (flg) st = DMDA_STENCIL_BOX;
48   flg  = PETSC_FALSE;
49   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-testorder",&testorder,NULL));
50   /*
51       Test putting two nodes in x and y on each processor, exact last processor
52       in x and y gets the rest.
53   */
54   flg  = PETSC_FALSE;
55   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-distribute",&flg,NULL));
56   if (flg) {
57     PetscCheckFalse(m == PETSC_DECIDE,PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -m option with -distribute option");
58     CHKERRQ(PetscMalloc1(m,&lx));
59     for (i=0; i<m-1; i++) { lx[i] = 4;}
60     lx[m-1] = M - 4*(m-1);
61     PetscCheckFalse(n == PETSC_DECIDE,PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -n option with -distribute option");
62     CHKERRQ(PetscMalloc1(n,&ly));
63     for (i=0; i<n-1; i++) { ly[i] = 2;}
64     ly[n-1] = N - 2*(n-1);
65   }
66 
67   /* Create distributed array and get vectors */
68   CHKERRQ(DMDACreate2d(PETSC_COMM_WORLD,bx,by,st,M,N,m,n,w,s,lx,ly,&da));
69   CHKERRQ(DMSetFromOptions(da));
70   CHKERRQ(DMSetUp(da));
71   CHKERRQ(PetscFree(lx));
72   CHKERRQ(PetscFree(ly));
73 
74   CHKERRQ(DMView(da,viewer));
75   CHKERRQ(DMCreateGlobalVector(da,&global));
76   CHKERRQ(DMCreateLocalVector(da,&local));
77 
78   /* Set global vector; send ghost points to local vectors */
79   value = 1;
80   CHKERRQ(VecSet(global,value));
81   CHKERRQ(DMGlobalToLocalBegin(da,global,INSERT_VALUES,local));
82   CHKERRQ(DMGlobalToLocalEnd(da,global,INSERT_VALUES,local));
83 
84   /* Scale local vectors according to processor rank; pass to global vector */
85   CHKERRMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
86   value = rank;
87   CHKERRQ(VecScale(local,value));
88   CHKERRQ(DMLocalToGlobalBegin(da,local,INSERT_VALUES,global));
89   CHKERRQ(DMLocalToGlobalEnd(da,local,INSERT_VALUES,global));
90 
91   if (!testorder) { /* turn off printing when testing ordering mappings */
92     CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vectors:\n"));
93     CHKERRQ(VecView(global,PETSC_VIEWER_STDOUT_WORLD));
94     CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"\n\n"));
95   }
96 
97   /* Send ghost points to local vectors */
98   CHKERRQ(DMGlobalToLocalBegin(da,global,INSERT_VALUES,local));
99   CHKERRQ(DMGlobalToLocalEnd(da,global,INSERT_VALUES,local));
100 
101   flg  = PETSC_FALSE;
102   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-local_print",&flg,NULL));
103   if (flg) {
104     PetscViewer sviewer;
105 
106     CHKERRQ(PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD));
107     CHKERRQ(PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank));
108     CHKERRQ(PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer));
109     CHKERRQ(VecView(local,sviewer));
110     CHKERRQ(PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer));
111     CHKERRQ(PetscViewerFlush(PETSC_VIEWER_STDOUT_WORLD));
112     CHKERRQ(PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD));
113   }
114 
115   /* Tests mappings between application/PETSc orderings */
116   if (testorder) {
117     ISLocalToGlobalMapping ltogm;
118 
119     CHKERRQ(DMGetLocalToGlobalMapping(da,&ltogm));
120     CHKERRQ(ISLocalToGlobalMappingGetSize(ltogm,&nloc));
121     CHKERRQ(ISLocalToGlobalMappingGetIndices(ltogm,&ltog));
122     CHKERRQ(DMDAGetGhostCorners(da,&Xs,&Ys,NULL,&Xm,&Ym,NULL));
123     CHKERRQ(DMDAGetAO(da,&ao));
124     CHKERRQ(PetscMalloc1(nloc,&iglobal));
125 
126     /* Set iglobal to be global indices for each processor's local and ghost nodes,
127        using the DMDA ordering of grid points */
128     kk = 0;
129     for (j=Ys; j<Ys+Ym; j++) {
130       for (i=Xs; i<Xs+Xm; i++) {
131         iloc = w*((j-Ys)*Xm + i-Xs);
132         for (l=0; l<w; l++) {
133           iglobal[kk++] = ltog[iloc+l];
134         }
135       }
136     }
137 
138     /* Map this to the application ordering (which for DMDAs is just the natural ordering
139        that would be used for 1 processor, numbering most rapidly by x, then y) */
140     CHKERRQ(AOPetscToApplication(ao,nloc,iglobal));
141 
142     /* Then map the application ordering back to the PETSc DMDA ordering */
143     CHKERRQ(AOApplicationToPetsc(ao,nloc,iglobal));
144 
145     /* Verify the mappings */
146     kk=0;
147     for (j=Ys; j<Ys+Ym; j++) {
148       for (i=Xs; i<Xs+Xm; i++) {
149         iloc = w*((j-Ys)*Xm + i-Xs);
150         for (l=0; l<w; l++) {
151           if (iglobal[kk] != ltog[iloc+l]) {
152             CHKERRQ(PetscFPrintf(PETSC_COMM_SELF,stdout,"[%d] Problem with mapping: j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",rank,j,i,l,ltog[iloc+l],iglobal[kk]));
153           }
154           kk++;
155         }
156       }
157     }
158     CHKERRQ(PetscFree(iglobal));
159     CHKERRQ(ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog));
160   }
161 
162   /* Free memory */
163   CHKERRQ(PetscViewerDestroy(&viewer));
164   CHKERRQ(VecDestroy(&local));
165   CHKERRQ(VecDestroy(&global));
166   CHKERRQ(DMDestroy(&da));
167 
168   ierr = PetscFinalize();
169   return ierr;
170 }
171 
172 /*TEST
173 
174    test:
175       nsize: 4
176       args: -nox
177       filter: grep -v -i Object
178       requires: x
179 
180    test:
181       suffix: 2
182       args: -testorder -nox
183       requires: x
184 
185 TEST*/
186