xref: /petsc/src/dm/tests/ex4.c (revision 2f613bf53f46f9356e00a2ca2bd69453be72fc31)
1 
2 static char help[] = "Tests various 2-dimensional DMDA routines.\n\n";
3 
4 #include <petscdm.h>
5 #include <petscdmda.h>
6 
7 int main(int argc,char **argv)
8 {
9   PetscMPIInt      rank;
10   PetscErrorCode   ierr;
11   PetscInt         M = 10,N = 8,m = PETSC_DECIDE;
12   PetscInt         s =2,w=2,n = PETSC_DECIDE,nloc,l,i,j,kk;
13   PetscInt         Xs,Xm,Ys,Ym,iloc,*iglobal;
14   const PetscInt   *ltog;
15   PetscInt         *lx       = NULL,*ly = NULL;
16   PetscBool        testorder = PETSC_FALSE,flg;
17   DMBoundaryType   bx        = DM_BOUNDARY_NONE,by= DM_BOUNDARY_NONE;
18   DM               da;
19   PetscViewer      viewer;
20   Vec              local,global;
21   PetscScalar      value;
22   DMDAStencilType  st = DMDA_STENCIL_BOX;
23   AO               ao;
24 
25   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
26   ierr = PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,400,&viewer);CHKERRQ(ierr);
27 
28   /* Readoptions */
29   ierr = PetscOptionsGetInt(NULL,NULL,"-NX",&M,NULL);CHKERRQ(ierr);
30   ierr = PetscOptionsGetInt(NULL,NULL,"-NY",&N,NULL);CHKERRQ(ierr);
31   ierr = PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);CHKERRQ(ierr);
32   ierr = PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);CHKERRQ(ierr);
33   ierr = PetscOptionsGetInt(NULL,NULL,"-s",&s,NULL);CHKERRQ(ierr);
34   ierr = PetscOptionsGetInt(NULL,NULL,"-w",&w,NULL);CHKERRQ(ierr);
35 
36   flg  = PETSC_FALSE;
37   ierr = PetscOptionsGetBool(NULL,NULL,"-xperiodic",&flg,NULL);CHKERRQ(ierr); if (flg) bx = DM_BOUNDARY_PERIODIC;
38   flg  = PETSC_FALSE;
39   ierr = PetscOptionsGetBool(NULL,NULL,"-yperiodic",&flg,NULL);CHKERRQ(ierr); if (flg) by = DM_BOUNDARY_PERIODIC;
40   flg  = PETSC_FALSE;
41   ierr = PetscOptionsGetBool(NULL,NULL,"-xghosted",&flg,NULL);CHKERRQ(ierr); if (flg) bx = DM_BOUNDARY_GHOSTED;
42   flg  = PETSC_FALSE;
43   ierr = PetscOptionsGetBool(NULL,NULL,"-yghosted",&flg,NULL);CHKERRQ(ierr); if (flg) by = DM_BOUNDARY_GHOSTED;
44   flg  = PETSC_FALSE;
45   ierr = PetscOptionsGetBool(NULL,NULL,"-star",&flg,NULL);CHKERRQ(ierr); if (flg) st = DMDA_STENCIL_STAR;
46   flg  = PETSC_FALSE;
47   ierr = PetscOptionsGetBool(NULL,NULL,"-box",&flg,NULL);CHKERRQ(ierr); if (flg) st = DMDA_STENCIL_BOX;
48   flg  = PETSC_FALSE;
49   ierr = PetscOptionsGetBool(NULL,NULL,"-testorder",&testorder,NULL);CHKERRQ(ierr);
50   /*
51       Test putting two nodes in x and y on each processor, exact last processor
52       in x and y gets the rest.
53   */
54   flg  = PETSC_FALSE;
55   ierr = PetscOptionsGetBool(NULL,NULL,"-distribute",&flg,NULL);CHKERRQ(ierr);
56   if (flg) {
57     if (m == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -m option with -distribute option");
58     ierr = PetscMalloc1(m,&lx);CHKERRQ(ierr);
59     for (i=0; i<m-1; i++) { lx[i] = 4;}
60     lx[m-1] = M - 4*(m-1);
61     if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -n option with -distribute option");
62     ierr = PetscMalloc1(n,&ly);CHKERRQ(ierr);
63     for (i=0; i<n-1; i++) { ly[i] = 2;}
64     ly[n-1] = N - 2*(n-1);
65   }
66 
67   /* Create distributed array and get vectors */
68   ierr = DMDACreate2d(PETSC_COMM_WORLD,bx,by,st,M,N,m,n,w,s,lx,ly,&da);CHKERRQ(ierr);
69   ierr = DMSetFromOptions(da);CHKERRQ(ierr);
70   ierr = DMSetUp(da);CHKERRQ(ierr);
71   ierr = PetscFree(lx);CHKERRQ(ierr);
72   ierr = PetscFree(ly);CHKERRQ(ierr);
73 
74   ierr = DMView(da,viewer);CHKERRQ(ierr);
75   ierr = DMCreateGlobalVector(da,&global);CHKERRQ(ierr);
76   ierr = DMCreateLocalVector(da,&local);CHKERRQ(ierr);
77 
78   /* Set global vector; send ghost points to local vectors */
79   value = 1;
80   ierr = VecSet(global,value);CHKERRQ(ierr);
81   ierr = DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
82   ierr = DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
83 
84   /* Scale local vectors according to processor rank; pass to global vector */
85   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr);
86   value = rank;
87   ierr = VecScale(local,value);CHKERRQ(ierr);
88   ierr = DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);CHKERRQ(ierr);
89   ierr = DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);CHKERRQ(ierr);
90 
91   if (!testorder) { /* turn off printing when testing ordering mappings */
92     ierr = PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vectors:\n");CHKERRQ(ierr);
93     ierr = VecView(global,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
94     ierr = PetscPrintf(PETSC_COMM_WORLD,"\n\n");CHKERRQ(ierr);
95   }
96 
97   /* Send ghost points to local vectors */
98   ierr = DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
99   ierr = DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
100 
101   flg  = PETSC_FALSE;
102   ierr = PetscOptionsGetBool(NULL,NULL,"-local_print",&flg,NULL);CHKERRQ(ierr);
103   if (flg) {
104     PetscViewer sviewer;
105 
106     ierr = PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
107     ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);CHKERRQ(ierr);
108     ierr = PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
109     ierr = VecView(local,sviewer);CHKERRQ(ierr);
110     ierr = PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
111     ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
112     ierr = PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
113   }
114 
115   /* Tests mappings between application/PETSc orderings */
116   if (testorder) {
117     ISLocalToGlobalMapping ltogm;
118 
119     ierr = DMGetLocalToGlobalMapping(da,&ltogm);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingGetSize(ltogm,&nloc);CHKERRQ(ierr);
121     ierr = ISLocalToGlobalMappingGetIndices(ltogm,&ltog);CHKERRQ(ierr);
122     ierr = DMDAGetGhostCorners(da,&Xs,&Ys,NULL,&Xm,&Ym,NULL);CHKERRQ(ierr);
123     ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);
124     ierr = PetscMalloc1(nloc,&iglobal);CHKERRQ(ierr);
125 
126     /* Set iglobal to be global indices for each processor's local and ghost nodes,
127        using the DMDA ordering of grid points */
128     kk = 0;
129     for (j=Ys; j<Ys+Ym; j++) {
130       for (i=Xs; i<Xs+Xm; i++) {
131         iloc = w*((j-Ys)*Xm + i-Xs);
132         for (l=0; l<w; l++) {
133           iglobal[kk++] = ltog[iloc+l];
134         }
135       }
136     }
137 
138     /* Map this to the application ordering (which for DMDAs is just the natural ordering
139        that would be used for 1 processor, numbering most rapidly by x, then y) */
140     ierr = AOPetscToApplication(ao,nloc,iglobal);CHKERRQ(ierr);
141 
142     /* Then map the application ordering back to the PETSc DMDA ordering */
143     ierr = AOApplicationToPetsc(ao,nloc,iglobal);CHKERRQ(ierr);
144 
145     /* Verify the mappings */
146     kk=0;
147     for (j=Ys; j<Ys+Ym; j++) {
148       for (i=Xs; i<Xs+Xm; i++) {
149         iloc = w*((j-Ys)*Xm + i-Xs);
150         for (l=0; l<w; l++) {
151           if (iglobal[kk] != ltog[iloc+l]) {
152             ierr = PetscFPrintf(PETSC_COMM_SELF,stdout,"[%d] Problem with mapping: j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",rank,j,i,l,ltog[iloc+l],iglobal[kk]);CHKERRQ(ierr);
153           }
154           kk++;
155         }
156       }
157     }
158     ierr = PetscFree(iglobal);CHKERRQ(ierr);
159     ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);CHKERRQ(ierr);
160   }
161 
162   /* Free memory */
163   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
164   ierr = VecDestroy(&local);CHKERRQ(ierr);
165   ierr = VecDestroy(&global);CHKERRQ(ierr);
166   ierr = DMDestroy(&da);CHKERRQ(ierr);
167 
168   ierr = PetscFinalize();
169   return ierr;
170 }
171 
172 /*TEST
173 
174    test:
175       nsize: 4
176       args: -nox
177       filter: grep -v -i Object
178       requires: x
179 
180    test:
181       suffix: 2
182       args: -testorder -nox
183       requires: x
184 
185 TEST*/
186