xref: /petsc/src/dm/tests/ex6.c (revision a69119a591a03a9d906b29c0a4e9802e4d7c9795)
1 static char help[] = "Tests various 3-dimensional DMDA routines.\n\n";
2 
3 #include <petscdm.h>
4 #include <petscdmda.h>
5 #include <petscao.h>
6 
7 int main(int argc, char **argv) {
8   PetscMPIInt     rank;
9   PetscInt        M = 3, N = 5, P = 3, s = 1, w = 2, nloc, l, i, j, k, kk, m = PETSC_DECIDE, n = PETSC_DECIDE, p = PETSC_DECIDE;
10   PetscInt        Xs, Xm, Ys, Ym, Zs, Zm, iloc, *iglobal;
11   const PetscInt *ltog;
12   PetscInt       *lx = NULL, *ly = NULL, *lz = NULL;
13   PetscBool       test_order = PETSC_FALSE;
14   DM              da;
15   PetscViewer     viewer;
16   Vec             local, global;
17   PetscScalar     value;
18   DMBoundaryType  bx = DM_BOUNDARY_NONE, by = DM_BOUNDARY_NONE, bz = DM_BOUNDARY_NONE;
19   DMDAStencilType stencil_type = DMDA_STENCIL_BOX;
20   AO              ao;
21   PetscBool       flg = PETSC_FALSE;
22 
23   PetscFunctionBeginUser;
24   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
25   PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 300, 0, 400, 300, &viewer));
26 
27   /* Read options */
28   PetscCall(PetscOptionsGetInt(NULL, NULL, "-NX", &M, NULL));
29   PetscCall(PetscOptionsGetInt(NULL, NULL, "-NY", &N, NULL));
30   PetscCall(PetscOptionsGetInt(NULL, NULL, "-NZ", &P, NULL));
31   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
32   PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
33   PetscCall(PetscOptionsGetInt(NULL, NULL, "-p", &p, NULL));
34   PetscCall(PetscOptionsGetInt(NULL, NULL, "-s", &s, NULL));
35   PetscCall(PetscOptionsGetInt(NULL, NULL, "-w", &w, NULL));
36   flg = PETSC_FALSE;
37   PetscCall(PetscOptionsGetBool(NULL, NULL, "-star", &flg, NULL));
38   if (flg) stencil_type = DMDA_STENCIL_STAR;
39   flg = PETSC_FALSE;
40   PetscCall(PetscOptionsGetBool(NULL, NULL, "-box", &flg, NULL));
41   if (flg) stencil_type = DMDA_STENCIL_BOX;
42 
43   flg = PETSC_FALSE;
44   PetscCall(PetscOptionsGetBool(NULL, NULL, "-xperiodic", &flg, NULL));
45   if (flg) bx = DM_BOUNDARY_PERIODIC;
46   flg = PETSC_FALSE;
47   PetscCall(PetscOptionsGetBool(NULL, NULL, "-xghosted", &flg, NULL));
48   if (flg) bx = DM_BOUNDARY_GHOSTED;
49   flg = PETSC_FALSE;
50   PetscCall(PetscOptionsGetBool(NULL, NULL, "-xnonghosted", &flg, NULL));
51 
52   flg = PETSC_FALSE;
53   PetscCall(PetscOptionsGetBool(NULL, NULL, "-yperiodic", &flg, NULL));
54   if (flg) by = DM_BOUNDARY_PERIODIC;
55   flg = PETSC_FALSE;
56   PetscCall(PetscOptionsGetBool(NULL, NULL, "-yghosted", &flg, NULL));
57   if (flg) by = DM_BOUNDARY_GHOSTED;
58   flg = PETSC_FALSE;
59   PetscCall(PetscOptionsGetBool(NULL, NULL, "-ynonghosted", &flg, NULL));
60 
61   flg = PETSC_FALSE;
62   PetscCall(PetscOptionsGetBool(NULL, NULL, "-zperiodic", &flg, NULL));
63   if (flg) bz = DM_BOUNDARY_PERIODIC;
64   flg = PETSC_FALSE;
65   PetscCall(PetscOptionsGetBool(NULL, NULL, "-zghosted", &flg, NULL));
66   if (flg) bz = DM_BOUNDARY_GHOSTED;
67   flg = PETSC_FALSE;
68   PetscCall(PetscOptionsGetBool(NULL, NULL, "-znonghosted", &flg, NULL));
69 
70   PetscCall(PetscOptionsGetBool(NULL, NULL, "-testorder", &test_order, NULL));
71 
72   flg = PETSC_FALSE;
73   PetscCall(PetscOptionsGetBool(NULL, NULL, "-distribute", &flg, NULL));
74   if (flg) {
75     PetscCheck(m != PETSC_DECIDE, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must set -m option with -distribute option");
76     PetscCall(PetscMalloc1(m, &lx));
77     for (i = 0; i < m - 1; i++) lx[i] = 4;
78     lx[m - 1] = M - 4 * (m - 1);
79     PetscCheck(n != PETSC_DECIDE, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must set -n option with -distribute option");
80     PetscCall(PetscMalloc1(n, &ly));
81     for (i = 0; i < n - 1; i++) ly[i] = 2;
82     ly[n - 1] = N - 2 * (n - 1);
83     PetscCheck(p != PETSC_DECIDE, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must set -p option with -distribute option");
84     PetscCall(PetscMalloc1(p, &lz));
85     for (i = 0; i < p - 1; i++) lz[i] = 2;
86     lz[p - 1] = P - 2 * (p - 1);
87   }
88 
89   /* Create distributed array and get vectors */
90   PetscCall(DMDACreate3d(PETSC_COMM_WORLD, bx, by, bz, stencil_type, M, N, P, m, n, p, w, s, lx, ly, lz, &da));
91   PetscCall(DMSetFromOptions(da));
92   PetscCall(DMSetUp(da));
93   PetscCall(PetscFree(lx));
94   PetscCall(PetscFree(ly));
95   PetscCall(PetscFree(lz));
96   PetscCall(DMView(da, viewer));
97   PetscCall(DMCreateGlobalVector(da, &global));
98   PetscCall(DMCreateLocalVector(da, &local));
99 
100   /* Set global vector; send ghost points to local vectors */
101   value = 1;
102   PetscCall(VecSet(global, value));
103   PetscCall(DMGlobalToLocalBegin(da, global, INSERT_VALUES, local));
104   PetscCall(DMGlobalToLocalEnd(da, global, INSERT_VALUES, local));
105 
106   /* Scale local vectors according to processor rank; pass to global vector */
107   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
108   value = rank;
109   PetscCall(VecScale(local, value));
110   PetscCall(DMLocalToGlobalBegin(da, local, INSERT_VALUES, global));
111   PetscCall(DMLocalToGlobalEnd(da, local, INSERT_VALUES, global));
112 
113   if (!test_order) { /* turn off printing when testing ordering mappings */
114     if (M * N * P < 40) {
115       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nGlobal Vector:\n"));
116       PetscCall(VecView(global, PETSC_VIEWER_STDOUT_WORLD));
117       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\n"));
118     }
119   }
120 
121   /* Send ghost points to local vectors */
122   PetscCall(DMGlobalToLocalBegin(da, global, INSERT_VALUES, local));
123   PetscCall(DMGlobalToLocalEnd(da, global, INSERT_VALUES, local));
124 
125   flg = PETSC_FALSE;
126   PetscCall(PetscOptionsGetBool(NULL, NULL, "-local_print", &flg, NULL));
127   if (flg) {
128     PetscViewer sviewer;
129     PetscCall(PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD));
130     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\nLocal Vector: processor %d\n", rank));
131     PetscCall(PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &sviewer));
132     PetscCall(VecView(local, sviewer));
133     PetscCall(PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &sviewer));
134     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
135     PetscCall(PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD));
136   }
137 
138   /* Tests mappings between application/PETSc orderings */
139   if (test_order) {
140     ISLocalToGlobalMapping ltogm;
141 
142     PetscCall(DMGetLocalToGlobalMapping(da, &ltogm));
143     PetscCall(ISLocalToGlobalMappingGetSize(ltogm, &nloc));
144     PetscCall(ISLocalToGlobalMappingGetIndices(ltogm, &ltog));
145 
146     PetscCall(DMDAGetGhostCorners(da, &Xs, &Ys, &Zs, &Xm, &Ym, &Zm));
147     PetscCall(DMDAGetAO(da, &ao));
148     /* PetscCall(AOView(ao,PETSC_VIEWER_STDOUT_WORLD)); */
149     PetscCall(PetscMalloc1(nloc, &iglobal));
150 
151     /* Set iglobal to be global indices for each processor's local and ghost nodes,
152        using the DMDA ordering of grid points */
153     kk = 0;
154     for (k = Zs; k < Zs + Zm; k++) {
155       for (j = Ys; j < Ys + Ym; j++) {
156         for (i = Xs; i < Xs + Xm; i++) {
157           iloc = w * ((k - Zs) * Xm * Ym + (j - Ys) * Xm + i - Xs);
158           for (l = 0; l < w; l++) iglobal[kk++] = ltog[iloc + l];
159         }
160       }
161     }
162 
163     /* Map this to the application ordering (which for DMDAs is just the natural ordering
164        that would be used for 1 processor, numbering most rapidly by x, then y, then z) */
165     PetscCall(AOPetscToApplication(ao, nloc, iglobal));
166 
167     /* Then map the application ordering back to the PETSc DMDA ordering */
168     PetscCall(AOApplicationToPetsc(ao, nloc, iglobal));
169 
170     /* Verify the mappings */
171     kk = 0;
172     for (k = Zs; k < Zs + Zm; k++) {
173       for (j = Ys; j < Ys + Ym; j++) {
174         for (i = Xs; i < Xs + Xm; i++) {
175           iloc = w * ((k - Zs) * Xm * Ym + (j - Ys) * Xm + i - Xs);
176           for (l = 0; l < w; l++) {
177             if (iglobal[kk] != ltog[iloc + l]) {
178               PetscCall(PetscPrintf(MPI_COMM_WORLD, "[%d] Problem with mapping: z=%" PetscInt_FMT ", j=%" PetscInt_FMT ", i=%" PetscInt_FMT ", l=%" PetscInt_FMT ", petsc1=%" PetscInt_FMT ", petsc2=%" PetscInt_FMT "\n", rank, k, j, i, l, ltog[iloc + l], iglobal[kk]));
179             }
180             kk++;
181           }
182         }
183       }
184     }
185     PetscCall(PetscFree(iglobal));
186     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogm, &ltog));
187   }
188 
189   /* Free memory */
190   PetscCall(PetscViewerDestroy(&viewer));
191   PetscCall(VecDestroy(&local));
192   PetscCall(VecDestroy(&global));
193   PetscCall(DMDestroy(&da));
194   PetscCall(PetscFinalize());
195   return 0;
196 }
197 
198 /*TEST
199 
200     test:
201       args:  -testorder -nox
202 
203  TEST*/
204