xref: /petsc/src/dm/tests/ex4.c (revision 8fb5bd83c3955fefcf33a54e3bb66920a9fa884b)
1 
2 static char help[] = "Tests various 2-dimensional DMDA routines.\n\n";
3 
4 #include <petscdm.h>
5 #include <petscdmda.h>
6 
7 int main(int argc,char **argv)
8 {
9   PetscMPIInt      rank;
10   PetscInt         M = 10,N = 8,m = PETSC_DECIDE;
11   PetscInt         s =2,w=2,n = PETSC_DECIDE,nloc,l,i,j,kk;
12   PetscInt         Xs,Xm,Ys,Ym,iloc,*iglobal;
13   const PetscInt   *ltog;
14   PetscInt         *lx       = NULL,*ly = NULL;
15   PetscBool        testorder = PETSC_FALSE,flg;
16   DMBoundaryType   bx        = DM_BOUNDARY_NONE,by= DM_BOUNDARY_NONE;
17   DM               da;
18   PetscViewer      viewer;
19   Vec              local,global;
20   PetscScalar      value;
21   DMDAStencilType  st = DMDA_STENCIL_BOX;
22   AO               ao;
23 
24   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
25   PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,400,&viewer));
26 
27   /* Readoptions */
28   PetscCall(PetscOptionsGetInt(NULL,NULL,"-NX",&M,NULL));
29   PetscCall(PetscOptionsGetInt(NULL,NULL,"-NY",&N,NULL));
30   PetscCall(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL));
31   PetscCall(PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL));
32   PetscCall(PetscOptionsGetInt(NULL,NULL,"-s",&s,NULL));
33   PetscCall(PetscOptionsGetInt(NULL,NULL,"-w",&w,NULL));
34 
35   flg  = PETSC_FALSE;
36   PetscCall(PetscOptionsGetBool(NULL,NULL,"-xperiodic",&flg,NULL)); if (flg) bx = DM_BOUNDARY_PERIODIC;
37   flg  = PETSC_FALSE;
38   PetscCall(PetscOptionsGetBool(NULL,NULL,"-yperiodic",&flg,NULL)); if (flg) by = DM_BOUNDARY_PERIODIC;
39   flg  = PETSC_FALSE;
40   PetscCall(PetscOptionsGetBool(NULL,NULL,"-xghosted",&flg,NULL)); if (flg) bx = DM_BOUNDARY_GHOSTED;
41   flg  = PETSC_FALSE;
42   PetscCall(PetscOptionsGetBool(NULL,NULL,"-yghosted",&flg,NULL)); if (flg) by = DM_BOUNDARY_GHOSTED;
43   flg  = PETSC_FALSE;
44   PetscCall(PetscOptionsGetBool(NULL,NULL,"-star",&flg,NULL)); if (flg) st = DMDA_STENCIL_STAR;
45   flg  = PETSC_FALSE;
46   PetscCall(PetscOptionsGetBool(NULL,NULL,"-box",&flg,NULL)); if (flg) st = DMDA_STENCIL_BOX;
47   flg  = PETSC_FALSE;
48   PetscCall(PetscOptionsGetBool(NULL,NULL,"-testorder",&testorder,NULL));
49   /*
50       Test putting two nodes in x and y on each processor, exact last processor
51       in x and y gets the rest.
52   */
53   flg  = PETSC_FALSE;
54   PetscCall(PetscOptionsGetBool(NULL,NULL,"-distribute",&flg,NULL));
55   if (flg) {
56     PetscCheck(m != PETSC_DECIDE,PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -m option with -distribute option");
57     PetscCall(PetscMalloc1(m,&lx));
58     for (i=0; i<m-1; i++) { lx[i] = 4;}
59     lx[m-1] = M - 4*(m-1);
60     PetscCheck(n != PETSC_DECIDE,PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -n option with -distribute option");
61     PetscCall(PetscMalloc1(n,&ly));
62     for (i=0; i<n-1; i++) { ly[i] = 2;}
63     ly[n-1] = N - 2*(n-1);
64   }
65 
66   /* Create distributed array and get vectors */
67   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,bx,by,st,M,N,m,n,w,s,lx,ly,&da));
68   PetscCall(DMSetFromOptions(da));
69   PetscCall(DMSetUp(da));
70   PetscCall(PetscFree(lx));
71   PetscCall(PetscFree(ly));
72 
73   PetscCall(DMView(da,viewer));
74   PetscCall(DMCreateGlobalVector(da,&global));
75   PetscCall(DMCreateLocalVector(da,&local));
76 
77   /* Set global vector; send ghost points to local vectors */
78   value = 1;
79   PetscCall(VecSet(global,value));
80   PetscCall(DMGlobalToLocalBegin(da,global,INSERT_VALUES,local));
81   PetscCall(DMGlobalToLocalEnd(da,global,INSERT_VALUES,local));
82 
83   /* Scale local vectors according to processor rank; pass to global vector */
84   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
85   value = rank;
86   PetscCall(VecScale(local,value));
87   PetscCall(DMLocalToGlobalBegin(da,local,INSERT_VALUES,global));
88   PetscCall(DMLocalToGlobalEnd(da,local,INSERT_VALUES,global));
89 
90   if (!testorder) { /* turn off printing when testing ordering mappings */
91     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vectors:\n"));
92     PetscCall(VecView(global,PETSC_VIEWER_STDOUT_WORLD));
93     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\n\n"));
94   }
95 
96   /* Send ghost points to local vectors */
97   PetscCall(DMGlobalToLocalBegin(da,global,INSERT_VALUES,local));
98   PetscCall(DMGlobalToLocalEnd(da,global,INSERT_VALUES,local));
99 
100   flg  = PETSC_FALSE;
101   PetscCall(PetscOptionsGetBool(NULL,NULL,"-local_print",&flg,NULL));
102   if (flg) {
103     PetscViewer sviewer;
104 
105     PetscCall(PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD));
106     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank));
107     PetscCall(PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer));
108     PetscCall(VecView(local,sviewer));
109     PetscCall(PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer));
110     PetscCall(PetscViewerFlush(PETSC_VIEWER_STDOUT_WORLD));
111     PetscCall(PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD));
112   }
113 
114   /* Tests mappings between application/PETSc orderings */
115   if (testorder) {
116     ISLocalToGlobalMapping ltogm;
117 
118     PetscCall(DMGetLocalToGlobalMapping(da,&ltogm));
119     PetscCall(ISLocalToGlobalMappingGetSize(ltogm,&nloc));
120     PetscCall(ISLocalToGlobalMappingGetIndices(ltogm,&ltog));
121     PetscCall(DMDAGetGhostCorners(da,&Xs,&Ys,NULL,&Xm,&Ym,NULL));
122     PetscCall(DMDAGetAO(da,&ao));
123     PetscCall(PetscMalloc1(nloc,&iglobal));
124 
125     /* Set iglobal to be global indices for each processor's local and ghost nodes,
126        using the DMDA ordering of grid points */
127     kk = 0;
128     for (j=Ys; j<Ys+Ym; j++) {
129       for (i=Xs; i<Xs+Xm; i++) {
130         iloc = w*((j-Ys)*Xm + i-Xs);
131         for (l=0; l<w; l++) {
132           iglobal[kk++] = ltog[iloc+l];
133         }
134       }
135     }
136 
137     /* Map this to the application ordering (which for DMDAs is just the natural ordering
138        that would be used for 1 processor, numbering most rapidly by x, then y) */
139     PetscCall(AOPetscToApplication(ao,nloc,iglobal));
140 
141     /* Then map the application ordering back to the PETSc DMDA ordering */
142     PetscCall(AOApplicationToPetsc(ao,nloc,iglobal));
143 
144     /* Verify the mappings */
145     kk=0;
146     for (j=Ys; j<Ys+Ym; j++) {
147       for (i=Xs; i<Xs+Xm; i++) {
148         iloc = w*((j-Ys)*Xm + i-Xs);
149         for (l=0; l<w; l++) {
150           if (iglobal[kk] != ltog[iloc+l]) {
151             PetscCall(PetscFPrintf(PETSC_COMM_SELF,stdout,"[%d] Problem with mapping: j=%" PetscInt_FMT ", i=%" PetscInt_FMT ", l=%" PetscInt_FMT ", petsc1=%" PetscInt_FMT ", petsc2=%" PetscInt_FMT "\n",rank,j,i,l,ltog[iloc+l],iglobal[kk]));
152           }
153           kk++;
154         }
155       }
156     }
157     PetscCall(PetscFree(iglobal));
158     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog));
159   }
160 
161   /* Free memory */
162   PetscCall(PetscViewerDestroy(&viewer));
163   PetscCall(VecDestroy(&local));
164   PetscCall(VecDestroy(&global));
165   PetscCall(DMDestroy(&da));
166 
167   PetscCall(PetscFinalize());
168   return 0;
169 }
170 
171 /*TEST
172 
173    test:
174       nsize: 4
175       args: -nox
176       filter: grep -v -i Object
177       requires: x
178 
179    test:
180       suffix: 2
181       args: -testorder -nox
182       requires: x
183 
184 TEST*/
185