xref: /petsc/src/dm/tests/ex6.c (revision 2fa40bb9206b96114faa7cb222621ec184d31cd2)
1 static char help[] = "Tests various 3-dimensional DMDA routines.\n\n";
2 
3 #include <petscdm.h>
4 #include <petscdmda.h>
5 #include <petscao.h>
6 
7 int main(int argc,char **argv)
8 {
9   PetscMPIInt      rank;
10   PetscInt         M = 3,N = 5,P=3,s=1,w=2,nloc,l,i,j,k,kk,m = PETSC_DECIDE,n = PETSC_DECIDE,p = PETSC_DECIDE;
11   PetscErrorCode   ierr;
12   PetscInt         Xs,Xm,Ys,Ym,Zs,Zm,iloc,*iglobal;
13   const PetscInt   *ltog;
14   PetscInt         *lx        = NULL,*ly = NULL,*lz = NULL;
15   PetscBool        test_order = PETSC_FALSE;
16   DM               da;
17   PetscViewer      viewer;
18   Vec              local,global;
19   PetscScalar      value;
20   DMBoundaryType   bx           = DM_BOUNDARY_NONE,by = DM_BOUNDARY_NONE,bz = DM_BOUNDARY_NONE;
21   DMDAStencilType  stencil_type = DMDA_STENCIL_BOX;
22   AO               ao;
23   PetscBool        flg = PETSC_FALSE;
24 
25   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
26   ierr = PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,300,&viewer);CHKERRQ(ierr);
27 
28   /* Read options */
29   ierr = PetscOptionsGetInt(NULL,NULL,"-NX",&M,NULL);CHKERRQ(ierr);
30   ierr = PetscOptionsGetInt(NULL,NULL,"-NY",&N,NULL);CHKERRQ(ierr);
31   ierr = PetscOptionsGetInt(NULL,NULL,"-NZ",&P,NULL);CHKERRQ(ierr);
32   ierr = PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);CHKERRQ(ierr);
33   ierr = PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);CHKERRQ(ierr);
34   ierr = PetscOptionsGetInt(NULL,NULL,"-p",&p,NULL);CHKERRQ(ierr);
35   ierr = PetscOptionsGetInt(NULL,NULL,"-s",&s,NULL);CHKERRQ(ierr);
36   ierr = PetscOptionsGetInt(NULL,NULL,"-w",&w,NULL);CHKERRQ(ierr);
37   flg  = PETSC_FALSE;
38   ierr = PetscOptionsGetBool(NULL,NULL,"-star",&flg,NULL);CHKERRQ(ierr);
39   if (flg) stencil_type =  DMDA_STENCIL_STAR;
40   flg  = PETSC_FALSE;
41   ierr = PetscOptionsGetBool(NULL,NULL,"-box",&flg,NULL);CHKERRQ(ierr);
42   if (flg) stencil_type =  DMDA_STENCIL_BOX;
43 
44   flg  = PETSC_FALSE;
45   ierr = PetscOptionsGetBool(NULL,NULL,"-xperiodic",&flg,NULL);CHKERRQ(ierr);
46   if (flg) bx = DM_BOUNDARY_PERIODIC;
47   flg  = PETSC_FALSE;
48   ierr = PetscOptionsGetBool(NULL,NULL,"-xghosted",&flg,NULL);CHKERRQ(ierr);
49   if (flg) bx = DM_BOUNDARY_GHOSTED;
50   flg  = PETSC_FALSE;
51   ierr = PetscOptionsGetBool(NULL,NULL,"-xnonghosted",&flg,NULL);CHKERRQ(ierr);
52 
53   flg  = PETSC_FALSE;
54   ierr = PetscOptionsGetBool(NULL,NULL,"-yperiodic",&flg,NULL);CHKERRQ(ierr);
55   if (flg) by = DM_BOUNDARY_PERIODIC;
56   flg  = PETSC_FALSE;
57   ierr = PetscOptionsGetBool(NULL,NULL,"-yghosted",&flg,NULL);CHKERRQ(ierr);
58   if (flg) by = DM_BOUNDARY_GHOSTED;
59   flg  = PETSC_FALSE;
60   ierr = PetscOptionsGetBool(NULL,NULL,"-ynonghosted",&flg,NULL);CHKERRQ(ierr);
61 
62   flg  = PETSC_FALSE;
63   ierr = PetscOptionsGetBool(NULL,NULL,"-zperiodic",&flg,NULL);CHKERRQ(ierr);
64   if (flg) bz = DM_BOUNDARY_PERIODIC;
65   flg  = PETSC_FALSE;
66   ierr = PetscOptionsGetBool(NULL,NULL,"-zghosted",&flg,NULL);CHKERRQ(ierr);
67   if (flg) bz = DM_BOUNDARY_GHOSTED;
68   flg  = PETSC_FALSE;
69   ierr = PetscOptionsGetBool(NULL,NULL,"-znonghosted",&flg,NULL);CHKERRQ(ierr);
70 
71   ierr = PetscOptionsGetBool(NULL,NULL,"-testorder",&test_order,NULL);CHKERRQ(ierr);
72 
73   flg  = PETSC_FALSE;
74   ierr = PetscOptionsGetBool(NULL,NULL,"-distribute",&flg,NULL);CHKERRQ(ierr);
75   if (flg) {
76     if (m == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -m option with -distribute option");
77     ierr = PetscMalloc1(m,&lx);CHKERRQ(ierr);
78     for (i=0; i<m-1; i++) lx[i] = 4;
79     lx[m-1] = M - 4*(m-1);
80     if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -n option with -distribute option");
81     ierr = PetscMalloc1(n,&ly);CHKERRQ(ierr);
82     for (i=0; i<n-1; i++) ly[i] = 2;
83     ly[n-1] = N - 2*(n-1);
84     if (p == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -p option with -distribute option");
85     ierr = PetscMalloc1(p,&lz);CHKERRQ(ierr);
86     for (i=0; i<p-1; i++) lz[i] = 2;
87     lz[p-1] = P - 2*(p-1);
88   }
89 
90   /* Create distributed array and get vectors */
91   ierr = DMDACreate3d(PETSC_COMM_WORLD,bx,by,bz,stencil_type,M,N,P,m,n,p,w,s,lx,ly,lz,&da);CHKERRQ(ierr);
92   ierr = DMSetFromOptions(da);CHKERRQ(ierr);
93   ierr = DMSetUp(da);CHKERRQ(ierr);
94   ierr = PetscFree(lx);CHKERRQ(ierr);
95   ierr = PetscFree(ly);CHKERRQ(ierr);
96   ierr = PetscFree(lz);CHKERRQ(ierr);
97   ierr = DMView(da,viewer);CHKERRQ(ierr);
98   ierr = DMCreateGlobalVector(da,&global);CHKERRQ(ierr);
99   ierr = DMCreateLocalVector(da,&local);CHKERRQ(ierr);
100 
101   /* Set global vector; send ghost points to local vectors */
102   value = 1;
103   ierr = VecSet(global,value);CHKERRQ(ierr);
104   ierr = DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
105   ierr = DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
106 
107   /* Scale local vectors according to processor rank; pass to global vector */
108   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr);
109   value = rank;
110   ierr = VecScale(local,value);CHKERRQ(ierr);
111   ierr = DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);CHKERRQ(ierr);
112   ierr = DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);CHKERRQ(ierr);
113 
114   if (!test_order) { /* turn off printing when testing ordering mappings */
115     if (M*N*P<40) {
116       ierr = PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vector:\n");CHKERRQ(ierr);
117       ierr = VecView(global,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
118       ierr = PetscPrintf(PETSC_COMM_WORLD,"\n");CHKERRQ(ierr);
119     }
120   }
121 
122   /* Send ghost points to local vectors */
123   ierr = DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
124   ierr = DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
125 
126   flg  = PETSC_FALSE;
127   ierr = PetscOptionsGetBool(NULL,NULL,"-local_print",&flg,NULL);CHKERRQ(ierr);
128   if (flg) {
129     PetscViewer sviewer;
130     ierr = PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
131     ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);CHKERRQ(ierr);
132     ierr = PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
133     ierr = VecView(local,sviewer);CHKERRQ(ierr);
134     ierr = PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
135     ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);CHKERRQ(ierr);
136     ierr = PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
137   }
138 
139   /* Tests mappings between application/PETSc orderings */
140   if (test_order) {
141     ISLocalToGlobalMapping ltogm;
142 
143     ierr = DMGetLocalToGlobalMapping(da,&ltogm);CHKERRQ(ierr);
144     ierr = ISLocalToGlobalMappingGetSize(ltogm,&nloc);CHKERRQ(ierr);
145     ierr = ISLocalToGlobalMappingGetIndices(ltogm,&ltog);CHKERRQ(ierr);
146 
147     ierr = DMDAGetGhostCorners(da,&Xs,&Ys,&Zs,&Xm,&Ym,&Zm);CHKERRQ(ierr);
148     ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);
149     /* ierr = AOView(ao,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
150     ierr = PetscMalloc1(nloc,&iglobal);CHKERRQ(ierr);
151 
152     /* Set iglobal to be global indices for each processor's local and ghost nodes,
153        using the DMDA ordering of grid points */
154     kk = 0;
155     for (k=Zs; k<Zs+Zm; k++) {
156       for (j=Ys; j<Ys+Ym; j++) {
157         for (i=Xs; i<Xs+Xm; i++) {
158           iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
159           for (l=0; l<w; l++) {
160             iglobal[kk++] = ltog[iloc+l];
161           }
162         }
163       }
164     }
165 
166     /* Map this to the application ordering (which for DMDAs is just the natural ordering
167        that would be used for 1 processor, numbering most rapidly by x, then y, then z) */
168     ierr = AOPetscToApplication(ao,nloc,iglobal);CHKERRQ(ierr);
169 
170     /* Then map the application ordering back to the PETSc DMDA ordering */
171     ierr = AOApplicationToPetsc(ao,nloc,iglobal);CHKERRQ(ierr);
172 
173     /* Verify the mappings */
174     kk=0;
175     for (k=Zs; k<Zs+Zm; k++) {
176       for (j=Ys; j<Ys+Ym; j++) {
177         for (i=Xs; i<Xs+Xm; i++) {
178           iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
179           for (l=0; l<w; l++) {
180             if (iglobal[kk] != ltog[iloc+l]) {
181               ierr = PetscPrintf(MPI_COMM_WORLD,"[%D] Problem with mapping: z=%D, j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",rank,k,j,i,l,ltog[iloc+l],iglobal[kk]);CHKERRQ(ierr);
182             }
183             kk++;
184           }
185         }
186       }
187     }
188     ierr = PetscFree(iglobal);CHKERRQ(ierr);
189     ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);CHKERRQ(ierr);
190   }
191 
192   /* Free memory */
193   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
194   ierr = VecDestroy(&local);CHKERRQ(ierr);
195   ierr = VecDestroy(&global);CHKERRQ(ierr);
196   ierr = DMDestroy(&da);CHKERRQ(ierr);
197   ierr = PetscFinalize();
198   return ierr;
199 }
200 
201 /*TEST
202 
203     test:
204       args:  -testorder -nox
205 
206  TEST*/
207 
208