xref: /petsc/src/dm/impls/plex/plexdd.c (revision 9140fee14acbea959c6ed74f4836a1a89f708038)
1 #include <petsc/private/dmpleximpl.h> /*I   "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>
3 #include <petsc/private/sectionimpl.h>
4 #include <petsc/private/sfimpl.h>
5 
6 PetscErrorCode DMCreateDomainDecomposition_Plex(DM dm, PetscInt *nsub, char ***names, IS **innerises, IS **outerises, DM **dms)
7 {
8   DM           odm;
9   PetscSF      migrationSF = NULL, sectionSF;
10   PetscSection sec, tsec, ogsec, olsec;
11   PetscInt     n, mh, ddovl = 0, pStart, pEnd, ni, no, nl;
12   PetscDS      ds;
13   DMLabel      label;
14   const char  *oname = "__internal_plex_dd_ovl_";
15   IS           gi_is, li_is, go_is, gl_is, ll_is;
16   IS           gis, lis;
17   PetscInt     rst, ren, c, *gidxs, *lidxs, *tidxs;
18   Vec          gvec;
19 
20   PetscFunctionBegin;
21   n = 1;
22   if (nsub) *nsub = n;
23   if (names) PetscCall(PetscCalloc1(n, names));
24   if (innerises) PetscCall(PetscCalloc1(n, innerises));
25   if (outerises) PetscCall(PetscCalloc1(n, outerises));
26   if (dms) PetscCall(PetscCalloc1(n, dms));
27 
28   PetscObjectOptionsBegin((PetscObject)dm);
29   PetscCall(PetscOptionsBoundedInt("-dm_plex_dd_overlap", "The size of the overlap halo for the subdomains", "DMCreateDomainDecomposition", ddovl, &ddovl, NULL, 0));
30   PetscOptionsEnd();
31 
32   PetscCall(DMViewFromOptions(dm, NULL, "-dm_plex_dd_dm_view"));
33   PetscCall(DMPlexDistributeOverlap_Internal(dm, ddovl + 1, PETSC_COMM_SELF, oname, &migrationSF, &odm));
34   if (!odm) PetscCall(DMClone(dm, &odm));
35   if (migrationSF) PetscCall(PetscSFViewFromOptions(migrationSF, (PetscObject)dm, "-dm_plex_dd_sf_view"));
36 
37   PetscCall(DMPlexGetMaxProjectionHeight(dm, &mh));
38   PetscCall(DMPlexSetMaxProjectionHeight(odm, mh));
39 
40   /* share discretization */
41   /* TODO material parameters */
42   /* TODO Labels for regions may need to updated,
43      now it uses the original ones, not the ones for the odm.
44      Not sure what to do here */
45   /* PetscCall(DMCopyDisc(dm, odm)); */
46   PetscCall(DMGetDS(odm, &ds));
47   if (!ds) {
48     PetscCall(DMGetLocalSection(dm, &sec));
49     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)odm), &tsec));
50     if (migrationSF) {
51       PetscCall(PetscSFDistributeSection(migrationSF, sec, NULL, tsec));
52     } else {
53       PetscCall(PetscSectionCopy(sec, tsec));
54     }
55     PetscCall(DMSetLocalSection(dm, tsec));
56     PetscCall(PetscSectionDestroy(&tsec));
57   }
58 
59   /* TODO: it would be nice to automatically translate filenames with wildcards:
60            name%r.vtu -> name${rank}.vtu
61            name%R.vtu -> name${PetscGlobalRank}.vtu
62            So that -dm_view vtk:name%R.vtu will automatically translate to the commented code below
63   */
64 #if 0
65   {
66     char        name[256];
67     PetscViewer viewer;
68 
69     PetscCall(PetscSNPrintf(name, sizeof(name), "ovl_mesh_%d.vtu", PetscGlobalRank));
70     PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)odm), name, FILE_MODE_WRITE, &viewer));
71     PetscCall(DMView(odm, viewer));
72     PetscCall(PetscViewerDestroy(&viewer));
73   }
74 #endif
75   PetscCall(DMViewFromOptions(odm, (PetscObject)dm, "-dm_plex_dd_overlap_dm_view"));
76 
77   /* propagate original global ordering to overlapping DM */
78   PetscCall(DMGetSectionSF(dm, &sectionSF));
79   PetscCall(DMGetLocalSection(dm, &sec));
80   PetscCall(PetscSectionGetStorageSize(sec, &nl));
81   PetscCall(DMGetGlobalVector(dm, &gvec));
82   PetscCall(VecGetOwnershipRange(gvec, &rst, &ren));
83   PetscCall(DMRestoreGlobalVector(dm, &gvec));
84   PetscCall(ISCreateStride(PETSC_COMM_SELF, ren - rst, rst, 1, &gi_is)); /* non-overlapping dofs */
85   PetscCall(PetscMalloc1(nl, &lidxs));
86   for (PetscInt i = 0; i < nl; i++) lidxs[i] = -1;
87   PetscCall(ISGetIndices(gi_is, (const PetscInt **)&gidxs));
88   PetscCall(PetscSFBcastBegin(sectionSF, MPIU_INT, gidxs, lidxs, MPI_REPLACE));
89   PetscCall(PetscSFBcastEnd(sectionSF, MPIU_INT, gidxs, lidxs, MPI_REPLACE));
90   PetscCall(ISRestoreIndices(gi_is, (const PetscInt **)&gidxs));
91   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), nl, lidxs, PETSC_OWN_POINTER, &lis));
92   PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)odm), &tsec));
93   PetscCall(DMPlexDistributeFieldIS(dm, migrationSF, sec, lis, tsec, &gis));
94   PetscCall(PetscSectionDestroy(&tsec));
95   PetscCall(ISDestroy(&lis));
96   PetscCall(PetscSFDestroy(&migrationSF));
97 
98   /* make dofs on the overlap boundary (not the global boundary) constrained */
99   PetscCall(DMGetLabel(odm, oname, &label));
100   PetscCall(DMPlexLabelComplete(odm, label));
101   PetscCall(DMGetLocalSection(odm, &tsec));
102   PetscCall(PetscSectionGetChart(tsec, &pStart, &pEnd));
103   PetscCall(DMLabelCreateIndex(label, pStart, pEnd));
104   PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)tsec), &sec));
105   PetscCall(PetscSectionCopy_Internal(tsec, sec, label->bt));
106   PetscCall(DMSetLocalSection(odm, sec));
107   PetscCall(PetscSectionDestroy(&sec));
108   PetscCall(DMRemoveLabel(odm, oname, NULL));
109 
110   /* Create index sets for dofs in the overlap dm */
111   PetscCall(DMGetSectionSF(odm, &sectionSF));
112   PetscCall(DMGetLocalSection(odm, &olsec));
113   PetscCall(DMGetGlobalSection(odm, &ogsec));
114   PetscCall(PetscSectionViewFromOptions(ogsec, (PetscObject)dm, "-dm_plex_dd_overlap_gsection_view"));
115   PetscCall(PetscSectionViewFromOptions(olsec, (PetscObject)dm, "-dm_plex_dd_overlap_lsection_view"));
116   ni = ren - rst;
117   PetscCall(PetscSectionGetConstrainedStorageSize(ogsec, &no)); /* dofs in the overlap */
118   PetscCall(PetscSectionGetStorageSize(olsec, &nl));            /* local dofs in the overlap */
119   PetscCall(PetscMalloc1(no, &gidxs));
120   PetscCall(ISGetIndices(gis, (const PetscInt **)&lidxs));
121   PetscCall(PetscSFReduceBegin(sectionSF, MPIU_INT, lidxs, gidxs, MPI_REPLACE));
122   PetscCall(PetscSFReduceEnd(sectionSF, MPIU_INT, lidxs, gidxs, MPI_REPLACE));
123   PetscCall(ISRestoreIndices(gis, (const PetscInt **)&lidxs));
124 
125   /* non-overlapping dofs */
126   PetscCall(PetscMalloc1(no, &lidxs));
127   c = 0;
128   for (PetscInt i = 0; i < no; i++) {
129     if (gidxs[i] >= rst && gidxs[i] < ren) lidxs[c++] = i;
130   }
131   PetscCheck(c == ni, PETSC_COMM_SELF, PETSC_ERR_PLIB, "%" PetscInt_FMT " != %" PetscInt_FMT "\n", c, ni);
132   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)odm), ni, lidxs, PETSC_OWN_POINTER, &li_is));
133 
134   /* global dofs in the overlap */
135   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), no, gidxs, PETSC_OWN_POINTER, &go_is));
136   PetscCall(ISViewFromOptions(go_is, (PetscObject)dm, "-dm_plex_dd_overlap_gois_view"));
137   /* PetscCall(ISCreateStride(PetscObjectComm((PetscObject)odm), no, 0, 1, &lo_is)); */
138 
139   /* local dofs of the overlapping subdomain (we actually need only dofs on the boundary of the subdomain) */
140   PetscCall(PetscMalloc1(nl, &lidxs));
141   PetscCall(PetscMalloc1(nl, &gidxs));
142   PetscCall(ISGetIndices(gis, (const PetscInt **)&tidxs));
143   c = 0;
144   for (PetscInt i = 0; i < nl; i++) {
145     if (tidxs[i] < 0) continue;
146     lidxs[c] = i;
147     gidxs[c] = tidxs[i];
148     c++;
149   }
150   PetscCall(ISRestoreIndices(gis, (const PetscInt **)&tidxs));
151   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), c, gidxs, PETSC_OWN_POINTER, &gl_is));
152   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)odm), c, lidxs, PETSC_OWN_POINTER, &ll_is));
153   PetscCall(ISViewFromOptions(gl_is, (PetscObject)dm, "-dm_plex_dd_overlap_glis_view"));
154 
155   PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_gi", (PetscObject)gi_is));
156   PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_li", (PetscObject)li_is));
157   PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_go", (PetscObject)go_is));
158   PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_lo", NULL));
159   PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_gl", (PetscObject)gl_is));
160   PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_ll", (PetscObject)ll_is));
161 
162   if (innerises) (*innerises)[0] = gi_is;
163   else PetscCall(ISDestroy(&gi_is));
164   if (outerises) (*outerises)[0] = go_is;
165   else PetscCall(ISDestroy(&go_is));
166   if (dms) (*dms)[0] = odm;
167   else PetscCall(DMDestroy(&odm));
168   PetscCall(ISDestroy(&li_is));
169   PetscCall(ISDestroy(&gl_is));
170   PetscCall(ISDestroy(&ll_is));
171   PetscCall(ISDestroy(&gis));
172 
173   PetscFunctionReturn(PETSC_SUCCESS);
174 }
175 
176 PetscErrorCode DMCreateDomainDecompositionScatters_Plex(DM dm, PetscInt n, DM *subdms, VecScatter **iscat, VecScatter **oscat, VecScatter **lscat)
177 {
178   Vec gvec, svec, lvec;
179   IS  gi_is, li_is, go_is, lo_is, gl_is, ll_is;
180 
181   PetscFunctionBegin;
182   if (iscat) PetscCall(PetscMalloc1(n, iscat));
183   if (oscat) PetscCall(PetscMalloc1(n, oscat));
184   if (lscat) PetscCall(PetscMalloc1(n, lscat));
185 
186   PetscCall(DMGetGlobalVector(dm, &gvec));
187   for (PetscInt i = 0; i < n; i++) {
188     PetscCall(DMGetGlobalVector(subdms[i], &svec));
189     PetscCall(DMGetLocalVector(subdms[i], &lvec));
190     PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_gi", (PetscObject *)&gi_is));
191     PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_li", (PetscObject *)&li_is));
192     PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_go", (PetscObject *)&go_is));
193     PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_lo", (PetscObject *)&lo_is));
194     PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_gl", (PetscObject *)&gl_is));
195     PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_ll", (PetscObject *)&ll_is));
196     if (iscat) PetscCall(VecScatterCreate(gvec, gi_is, svec, li_is, &(*iscat)[i]));
197     if (oscat) PetscCall(VecScatterCreate(gvec, go_is, svec, lo_is, &(*oscat)[i]));
198     if (lscat) PetscCall(VecScatterCreate(gvec, gl_is, lvec, ll_is, &(*lscat)[i]));
199     PetscCall(DMRestoreGlobalVector(subdms[i], &svec));
200     PetscCall(DMRestoreLocalVector(subdms[i], &lvec));
201   }
202   PetscCall(DMRestoreGlobalVector(dm, &gvec));
203   PetscFunctionReturn(PETSC_SUCCESS);
204 }
205 
206 /*
207      DMCreateNeumannOverlap_Plex - Generates an IS, an unassembled (Neumann) Mat, a setup function, and the corresponding context to be used by PCHPDDM.
208 
209    Input Parameter:
210 .     dm - preconditioner context
211 
212    Output Parameters:
213 +     ovl - index set of overlapping subdomains
214 .     J - unassembled (Neumann) local matrix
215 .     setup - function for generating the matrix
216 -     setup_ctx - context for setup
217 
218    Options Database Keys:
219 +   -dm_plex_view_neumann_original - view the DM without overlap
220 -   -dm_plex_view_neumann_overlap - view the DM with overlap as needed by PCHPDDM
221 
222    Level: advanced
223 
224 .seealso: `DMCreate()`, `DM`, `MATIS`, `PCHPDDM`, `PCHPDDMSetAuxiliaryMat()`
225 */
226 PetscErrorCode DMCreateNeumannOverlap_Plex(DM dm, IS *ovl, Mat *J, PetscErrorCode (**setup)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *), void **setup_ctx)
227 {
228   DM                     odm;
229   Mat                    pJ;
230   PetscSF                sf = NULL;
231   PetscSection           sec, osec;
232   ISLocalToGlobalMapping l2g;
233   const PetscInt        *idxs;
234   PetscInt               n, mh;
235 
236   PetscFunctionBegin;
237   *setup     = NULL;
238   *setup_ctx = NULL;
239   *ovl       = NULL;
240   *J         = NULL;
241 
242   /* Overlapped mesh
243      overlap is a little more generous, since it is not computed starting from the owned (Dirichlet) points, but from the locally owned cells */
244   PetscCall(DMPlexDistributeOverlap(dm, 1, &sf, &odm));
245   if (!odm) {
246     PetscCall(PetscSFDestroy(&sf));
247     PetscFunctionReturn(PETSC_SUCCESS);
248   }
249 
250   /* share discretization */
251   PetscCall(DMGetLocalSection(dm, &sec));
252   PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)sec), &osec));
253   PetscCall(PetscSFDistributeSection(sf, sec, NULL, osec));
254   /* what to do here? using both is fine? */
255   PetscCall(DMSetLocalSection(odm, osec));
256   PetscCall(DMCopyDisc(dm, odm));
257   PetscCall(DMPlexGetMaxProjectionHeight(dm, &mh));
258   PetscCall(DMPlexSetMaxProjectionHeight(odm, mh));
259   PetscCall(PetscSectionDestroy(&osec));
260 
261   /* material parameters */
262   {
263     Vec A;
264 
265     PetscCall(DMGetAuxiliaryVec(dm, NULL, 0, 0, &A));
266     if (A) {
267       DM  dmAux, ocdm, odmAux;
268       Vec oA;
269 
270       PetscCall(VecGetDM(A, &dmAux));
271       PetscCall(DMClone(odm, &odmAux));
272       PetscCall(DMGetCoordinateDM(odm, &ocdm));
273       PetscCall(DMSetCoordinateDM(odmAux, ocdm));
274       PetscCall(DMCopyDisc(dmAux, odmAux));
275 
276       PetscCall(DMGetLocalSection(dmAux, &sec));
277       PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)sec), &osec));
278       PetscCall(VecCreate(PetscObjectComm((PetscObject)A), &oA));
279       PetscCall(VecSetDM(oA, odmAux));
280       /* TODO: what if these values changes? */
281       PetscCall(DMPlexDistributeField(dmAux, sf, sec, A, osec, oA));
282       PetscCall(DMSetLocalSection(odmAux, osec));
283       PetscCall(PetscSectionDestroy(&osec));
284       PetscCall(DMSetAuxiliaryVec(odm, NULL, 0, 0, oA));
285       PetscCall(VecDestroy(&oA));
286       PetscCall(DMDestroy(&odmAux));
287     }
288   }
289   PetscCall(PetscSFDestroy(&sf));
290 
291   PetscCall(DMViewFromOptions(dm, NULL, "-dm_plex_view_neumann_original"));
292   PetscCall(PetscObjectSetName((PetscObject)odm, "OVL"));
293   PetscCall(DMViewFromOptions(odm, NULL, "-dm_plex_view_neumann_overlap"));
294 
295   /* MATIS for the overlap region
296      the HPDDM interface wants local matrices,
297      we attach the global MATIS to the overlap IS,
298      since we need it to do assembly */
299   PetscCall(DMSetMatType(odm, MATIS));
300   PetscCall(DMCreateMatrix(odm, &pJ));
301   PetscCall(MatISGetLocalMat(pJ, J));
302   PetscCall(PetscObjectReference((PetscObject)*J));
303 
304   /* overlap IS */
305   PetscCall(MatISGetLocalToGlobalMapping(pJ, &l2g, NULL));
306   PetscCall(ISLocalToGlobalMappingGetSize(l2g, &n));
307   PetscCall(ISLocalToGlobalMappingGetIndices(l2g, &idxs));
308   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)odm), n, idxs, PETSC_COPY_VALUES, ovl));
309   PetscCall(ISLocalToGlobalMappingRestoreIndices(l2g, &idxs));
310   PetscCall(PetscObjectCompose((PetscObject)*ovl, "_DM_Overlap_HPDDM_MATIS", (PetscObject)pJ));
311   PetscCall(DMDestroy(&odm));
312   PetscCall(MatDestroy(&pJ));
313 
314   /* special purpose setup function (composed in DMPlexSetSNESLocalFEM) */
315   PetscCall(PetscObjectQueryFunction((PetscObject)dm, "MatComputeNeumannOverlap_C", setup));
316   if (*setup) PetscCall(PetscObjectCompose((PetscObject)*ovl, "_DM_Original_HPDDM", (PetscObject)dm));
317   PetscFunctionReturn(PETSC_SUCCESS);
318 }
319