xref: /petsc/src/dm/impls/plex/tests/ex47.c (revision 7a46b59513fac54acb317385fa9864cc0988b1fb)
1 static char help[] = "The main goal of this code is to retrieve the original element numbers as found in the "
2                      "initial partitions (sInitialPartition)... but after the call to DMPlexDistribute";
3 
4 #include <petsc.h>
5 
6 PetscReal sCoords2x5Mesh[18][2] = {
7  {0.00000000000000000e+00, 0.00000000000000000e+00},
8  {2.00000000000000000e+00, 0.00000000000000000e+00},
9  {0.00000000000000000e+00, 1.00000000000000000e+00},
10  {2.00000000000000000e+00, 1.00000000000000000e+00},
11  {9.99999999997387978e-01, 0.00000000000000000e+00},
12  {9.99999999997387978e-01, 1.00000000000000000e+00},
13  {0.00000000000000000e+00, 2.00000000000000011e-01},
14  {0.00000000000000000e+00, 4.00000000000000022e-01},
15  {0.00000000000000000e+00, 5.99999999999999978e-01},
16  {0.00000000000000000e+00, 8.00000000000000044e-01},
17  {2.00000000000000000e+00, 2.00000000000000011e-01},
18  {2.00000000000000000e+00, 4.00000000000000022e-01},
19  {2.00000000000000000e+00, 5.99999999999999978e-01},
20  {2.00000000000000000e+00, 8.00000000000000044e-01},
21  {9.99999999997387756e-01, 2.00000000000000011e-01},
22  {9.99999999997387978e-01, 4.00000000000000022e-01},
23  {9.99999999997387978e-01, 6.00000000000000089e-01},
24  {9.99999999997388089e-01, 8.00000000000000044e-01}};
25 
26 //Connectivity of a 2x5 rectangular mesh of quads :
27 const PetscInt sConnectivity2x5Mesh[10][4] = {
28   {0,4,14,6},
29   {6,14,15,7},
30   {7,15,16,8},
31   {8,16,17,9},
32   {9,17,5,2},
33   {4,1,10,14},
34   {14,10,11,15},
35   {15,11,12,16},
36   {16,12,13,17},
37   {17,13,3,5}};
38 
39 const PetscInt sInitialPartition2x5Mesh[2][5] = {
40   {0,2,4,6,8},
41   {1,3,5,7,9}
42 };
43 
44 const PetscInt sNLoclCells2x5Mesh = 5;
45 const PetscInt sNGlobVerts2x5Mesh = 18;
46 
47 int main(int argc, char **argv)
48 {
49   const PetscInt   Nc                 = sNLoclCells2x5Mesh; //Same on each rank for this example...
50   const PetscInt   Nv                 = sNGlobVerts2x5Mesh;
51   const PetscInt*  InitPartForRank[2] = {&sInitialPartition2x5Mesh[0][0],
52                                          &sInitialPartition2x5Mesh[1][0]};
53   const PetscInt (*Conn)[4]           = sConnectivity2x5Mesh;
54 
55   const PetscInt   Ncor = 4;
56   const PetscInt   dim  = 2;
57   DM               dm, idm, ddm;
58   PetscSF          sfVert, sfMig, sfPart;
59   PetscPartitioner part;
60   PetscSection     s;
61   PetscInt        *cells, c;
62   PetscMPIInt      size, rank;
63   PetscBool        box = PETSC_FALSE, field = PETSC_FALSE;
64 
65   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
66   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
67   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
68   PetscCheck(size == 2, PETSC_COMM_WORLD, PETSC_ERR_SUP, "This is a 2 processors example only");
69   PetscCall(PetscOptionsGetBool(NULL, NULL, "-box", &box, NULL));
70   PetscCall(PetscOptionsGetBool(NULL, NULL, "-field", &field, NULL));
71 
72   PetscCall(DMPlexCreate(PETSC_COMM_WORLD, &dm));
73   if (box) {
74     PetscCall(DMSetType(dm, DMPLEX));
75     PetscCall(DMSetFromOptions(dm));
76   } else {
77     PetscCall(PetscMalloc1(Nc * Ncor, &cells));
78     for (c = 0; c < Nc; ++c) {
79       PetscInt cell = (InitPartForRank[rank])[c], cor;
80 
81       for (cor = 0; cor < Ncor; ++cor) {
82         cells[c*Ncor + cor] = Conn[cell][cor];
83       }
84     }
85     PetscCall(DMSetDimension(dm, dim));
86     PetscCall(DMPlexBuildFromCellListParallel(dm, Nc, PETSC_DECIDE, Nv, Ncor, cells, &sfVert, NULL));
87     PetscCall(PetscSFDestroy(&sfVert));
88     PetscCall(PetscFree(cells));
89     PetscCall(DMPlexInterpolate(dm, &idm));
90     PetscCall(DMDestroy(&dm));
91     dm = idm;
92   }
93   PetscCall(DMSetUseNatural(dm, PETSC_TRUE));
94   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
95 
96   if (field) {
97    const PetscInt Nf         = 1;
98    const PetscInt numComp[1] = {1};
99    const PetscInt numDof[3]  = {0, 0, 1};
100    const PetscInt numBC      = 0;
101 
102    PetscCall(DMSetNumFields(dm, Nf));
103    PetscCall(DMPlexCreateSection(dm, NULL, numComp, numDof, numBC, NULL, NULL, NULL, NULL, &s));
104    PetscCall(DMSetLocalSection(dm, s));
105    PetscCall(PetscSectionView(s, PETSC_VIEWER_STDOUT_WORLD));
106    PetscCall(PetscSectionDestroy(&s));
107   }
108 
109   PetscCall(DMPlexGetPartitioner(dm, &part));
110   PetscCall(PetscPartitionerSetFromOptions(part));
111 
112   PetscCall(DMPlexDistribute(dm, 0, &sfMig, &ddm));
113   PetscCall(PetscSFView(sfMig, PETSC_VIEWER_STDOUT_WORLD));
114   PetscCall(PetscSFCreateInverseSF(sfMig, &sfPart));
115   PetscCall(PetscObjectSetName((PetscObject) sfPart, "Inverse Migration SF"));
116   PetscCall(PetscSFView(sfPart, PETSC_VIEWER_STDOUT_WORLD));
117 
118   Vec          lGlobalVec, lNatVec;
119   PetscScalar *lNatVecArray;
120 
121   {
122     PetscSection s;
123 
124     PetscCall(DMGetGlobalSection(dm, &s));
125     PetscCall(PetscSectionView(s, PETSC_VIEWER_STDOUT_WORLD));
126   }
127   PetscCall(DMGetGlobalVector(dm, &lNatVec));
128   PetscCall(PetscObjectSetName((PetscObject) lNatVec, "Natural Vector (initial partition)"));
129 
130   //Copying the initial partition into the "natural" vector:
131   PetscCall(VecGetArray(lNatVec, &lNatVecArray));
132   for (c = 0; c < Nc; ++c) lNatVecArray[c] = (InitPartForRank[rank])[c];
133   PetscCall(VecRestoreArray(lNatVec, &lNatVecArray));
134 
135   PetscCall(DMGetGlobalVector(ddm,&lGlobalVec));
136   PetscCall(PetscObjectSetName((PetscObject) lGlobalVec, "Global Vector (reordered element numbers in the petsc distributed order)"));
137   PetscCall(VecZeroEntries(lGlobalVec));
138 
139   // The call to DMPlexNaturalToGlobalBegin/End does not produce our expected result...
140   // In lGlobalVec, we expect to have:
141   /*
142    * Process [0]
143    * 2.
144    * 4.
145    * 8.
146    * 3.
147    * 9.
148    * Process [1]
149    * 1.
150    * 5.
151    * 7.
152    * 0.
153    * 6.
154    *
155    * but we obtained:
156    *
157    * Process [0]
158    * 2.
159    * 4.
160    * 8.
161    * 0.
162    * 0.
163    * Process [1]
164    * 0.
165    * 0.
166    * 0.
167    * 0.
168    * 0.
169    */
170 
171    {
172      PetscSF nsf;
173 
174      PetscCall(DMPlexGetGlobalToNaturalSF(ddm, &nsf));
175      PetscCall(PetscSFView(nsf, NULL));
176    }
177   PetscCall(DMPlexNaturalToGlobalBegin(ddm, lNatVec, lGlobalVec));
178   PetscCall(DMPlexNaturalToGlobalEnd  (ddm, lNatVec, lGlobalVec));
179 
180   PetscCall(VecView(lNatVec, PETSC_VIEWER_STDOUT_WORLD));
181   PetscCall(VecView(lGlobalVec, PETSC_VIEWER_STDOUT_WORLD));
182 
183   PetscCall(DMRestoreGlobalVector(dm,&lNatVec));
184   PetscCall(DMRestoreGlobalVector(ddm,&lGlobalVec));
185 
186   PetscCall(PetscSFDestroy(&sfMig));
187   PetscCall(PetscSFDestroy(&sfPart));
188   PetscCall(DMDestroy(&dm));
189   PetscCall(DMDestroy(&ddm));
190   PetscCall(PetscFinalize());
191   return 0;
192 }
193 
194 /*TEST
195 
196   testset:
197     args: -field -petscpartitioner_type simple
198     nsize: 2
199 
200     test:
201       suffix: 0
202       args:
203 
204     test:
205       suffix: 1
206       args: -box -dm_plex_simplex 0 -dm_plex_box_faces 2,5 -dm_distribute
207 
208 TEST*/
209