xref: /petsc/src/dm/tests/ex4.c (revision c4762a1b19cd2af06abeed90e8f9d34fb975dd94)
1 
2 static char help[] = "Tests various 2-dimensional DMDA routines.\n\n";
3 
4 #include <petscdm.h>
5 #include <petscdmda.h>
6 
7 int main(int argc,char **argv)
8 {
9   PetscMPIInt      rank;
10   PetscErrorCode   ierr;
11   PetscInt         M = 10,N = 8,m = PETSC_DECIDE;
12   PetscInt         s =2,w=2,n = PETSC_DECIDE,nloc,l,i,j,kk;
13   PetscInt         Xs,Xm,Ys,Ym,iloc,*iglobal;
14   const PetscInt   *ltog;
15   PetscInt         *lx       = NULL,*ly = NULL;
16   PetscBool        testorder = PETSC_FALSE,flg;
17   DMBoundaryType   bx        = DM_BOUNDARY_NONE,by= DM_BOUNDARY_NONE;
18   DM               da;
19   PetscViewer      viewer;
20   Vec              local,global;
21   PetscScalar      value;
22   DMDAStencilType  st = DMDA_STENCIL_BOX;
23   AO               ao;
24 
25   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
26   ierr = PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,400,&viewer);CHKERRQ(ierr);
27 
28   /* Readoptions */
29   ierr = PetscOptionsGetInt(NULL,NULL,"-NX",&M,NULL);CHKERRQ(ierr);
30   ierr = PetscOptionsGetInt(NULL,NULL,"-NY",&N,NULL);CHKERRQ(ierr);
31   ierr = PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);CHKERRQ(ierr);
32   ierr = PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);CHKERRQ(ierr);
33   ierr = PetscOptionsGetInt(NULL,NULL,"-s",&s,NULL);CHKERRQ(ierr);
34   ierr = PetscOptionsGetInt(NULL,NULL,"-w",&w,NULL);CHKERRQ(ierr);
35 
36   flg  = PETSC_FALSE;
37   ierr = PetscOptionsGetBool(NULL,NULL,"-xperiodic",&flg,NULL);CHKERRQ(ierr); if (flg) bx = DM_BOUNDARY_PERIODIC;
38   flg  = PETSC_FALSE;
39   ierr = PetscOptionsGetBool(NULL,NULL,"-yperiodic",&flg,NULL);CHKERRQ(ierr); if (flg) by = DM_BOUNDARY_PERIODIC;
40   flg  = PETSC_FALSE;
41   ierr = PetscOptionsGetBool(NULL,NULL,"-xghosted",&flg,NULL);CHKERRQ(ierr); if (flg) bx = DM_BOUNDARY_GHOSTED;
42   flg  = PETSC_FALSE;
43   ierr = PetscOptionsGetBool(NULL,NULL,"-yghosted",&flg,NULL);CHKERRQ(ierr); if (flg) by = DM_BOUNDARY_GHOSTED;
44   flg  = PETSC_FALSE;
45   ierr = PetscOptionsGetBool(NULL,NULL,"-star",&flg,NULL);CHKERRQ(ierr); if (flg) st = DMDA_STENCIL_STAR;
46   flg  = PETSC_FALSE;
47   ierr = PetscOptionsGetBool(NULL,NULL,"-box",&flg,NULL);CHKERRQ(ierr); if (flg) st = DMDA_STENCIL_BOX;
48   flg  = PETSC_FALSE;
49   ierr = PetscOptionsGetBool(NULL,NULL,"-testorder",&testorder,NULL);CHKERRQ(ierr);
50   /*
51       Test putting two nodes in x and y on each processor, exact last processor
52       in x and y gets the rest.
53   */
54   flg  = PETSC_FALSE;
55   ierr = PetscOptionsGetBool(NULL,NULL,"-distribute",&flg,NULL);CHKERRQ(ierr);
56   if (flg) {
57     if (m == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -m option with -distribute option");
58     ierr = PetscMalloc1(m,&lx);CHKERRQ(ierr);
59     for (i=0; i<m-1; i++) { lx[i] = 4;}
60     lx[m-1] = M - 4*(m-1);
61     if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -n option with -distribute option");
62     ierr = PetscMalloc1(n,&ly);CHKERRQ(ierr);
63     for (i=0; i<n-1; i++) { ly[i] = 2;}
64     ly[n-1] = N - 2*(n-1);
65   }
66 
67 
68   /* Create distributed array and get vectors */
69   ierr = DMDACreate2d(PETSC_COMM_WORLD,bx,by,st,M,N,m,n,w,s,lx,ly,&da);CHKERRQ(ierr);
70   ierr = DMSetFromOptions(da);CHKERRQ(ierr);
71   ierr = DMSetUp(da);CHKERRQ(ierr);
72   ierr = PetscFree(lx);CHKERRQ(ierr);
73   ierr = PetscFree(ly);CHKERRQ(ierr);
74 
75   ierr = DMView(da,viewer);CHKERRQ(ierr);
76   ierr = DMCreateGlobalVector(da,&global);CHKERRQ(ierr);
77   ierr = DMCreateLocalVector(da,&local);CHKERRQ(ierr);
78 
79   /* Set global vector; send ghost points to local vectors */
80   value = 1;
81   ierr = VecSet(global,value);CHKERRQ(ierr);
82   ierr = DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
83   ierr = DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
84 
85   /* Scale local vectors according to processor rank; pass to global vector */
86   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
87   value = rank;
88   ierr = VecScale(local,value);CHKERRQ(ierr);
89   ierr = DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);CHKERRQ(ierr);
90   ierr = DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);CHKERRQ(ierr);
91 
92   if (!testorder) { /* turn off printing when testing ordering mappings */
93     ierr = PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vectors:\n");CHKERRQ(ierr);
94     ierr = VecView(global,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
95     ierr = PetscPrintf(PETSC_COMM_WORLD,"\n\n");CHKERRQ(ierr);
96   }
97 
98   /* Send ghost points to local vectors */
99   ierr = DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
100   ierr = DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
101 
102   flg  = PETSC_FALSE;
103   ierr = PetscOptionsGetBool(NULL,NULL,"-local_print",&flg,NULL);CHKERRQ(ierr);
104   if (flg) {
105     PetscViewer sviewer;
106 
107     ierr = PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
108     ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);CHKERRQ(ierr);
109     ierr = PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
110     ierr = VecView(local,sviewer);CHKERRQ(ierr);
111     ierr = PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
112     ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
113     ierr = PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
114   }
115 
116   /* Tests mappings betweeen application/PETSc orderings */
117   if (testorder) {
118     ISLocalToGlobalMapping ltogm;
119 
120     ierr = DMGetLocalToGlobalMapping(da,&ltogm);CHKERRQ(ierr);
121     ierr = ISLocalToGlobalMappingGetSize(ltogm,&nloc);CHKERRQ(ierr);
122     ierr = ISLocalToGlobalMappingGetIndices(ltogm,&ltog);CHKERRQ(ierr);
123     ierr = DMDAGetGhostCorners(da,&Xs,&Ys,NULL,&Xm,&Ym,NULL);CHKERRQ(ierr);
124     ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);
125     ierr = PetscMalloc1(nloc,&iglobal);CHKERRQ(ierr);
126 
127     /* Set iglobal to be global indices for each processor's local and ghost nodes,
128        using the DMDA ordering of grid points */
129     kk = 0;
130     for (j=Ys; j<Ys+Ym; j++) {
131       for (i=Xs; i<Xs+Xm; i++) {
132         iloc = w*((j-Ys)*Xm + i-Xs);
133         for (l=0; l<w; l++) {
134           iglobal[kk++] = ltog[iloc+l];
135         }
136       }
137     }
138 
139     /* Map this to the application ordering (which for DMDAs is just the natural ordering
140        that would be used for 1 processor, numbering most rapidly by x, then y) */
141     ierr = AOPetscToApplication(ao,nloc,iglobal);CHKERRQ(ierr);
142 
143     /* Then map the application ordering back to the PETSc DMDA ordering */
144     ierr = AOApplicationToPetsc(ao,nloc,iglobal);CHKERRQ(ierr);
145 
146     /* Verify the mappings */
147     kk=0;
148     for (j=Ys; j<Ys+Ym; j++) {
149       for (i=Xs; i<Xs+Xm; i++) {
150         iloc = w*((j-Ys)*Xm + i-Xs);
151         for (l=0; l<w; l++) {
152           if (iglobal[kk] != ltog[iloc+l]) {
153             ierr = PetscFPrintf(PETSC_COMM_SELF,stdout,"[%d] Problem with mapping: j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",rank,j,i,l,ltog[iloc+l],iglobal[kk]);CHKERRQ(ierr);
154           }
155           kk++;
156         }
157       }
158     }
159     ierr = PetscFree(iglobal);CHKERRQ(ierr);
160     ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);CHKERRQ(ierr);
161   }
162 
163   /* Free memory */
164   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
165   ierr = VecDestroy(&local);CHKERRQ(ierr);
166   ierr = VecDestroy(&global);CHKERRQ(ierr);
167   ierr = DMDestroy(&da);CHKERRQ(ierr);
168 
169   ierr = PetscFinalize();
170   return ierr;
171 }
172 
173 
174 /*TEST
175 
176    test:
177       nsize: 4
178       args: -nox
179       filter: grep -v -i Object
180       requires: x
181 
182    test:
183       suffix: 2
184       args: -testorder -nox
185       requires: x
186 
187 TEST*/
188