xref: /petsc/src/dm/impls/plex/plexdd.c (revision c28052fffaf1fc78ad5bd552ab6fd2c36087b321)
1 #include <petsc/private/dmpleximpl.h> /*I   "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>
3 #include <petsc/private/sectionimpl.h>
4 #include <petsc/private/sfimpl.h>
5 
6 static PetscErrorCode DMTransferMaterialParameters(DM dm, PetscSF sf, DM odm)
7 {
8   Vec A;
9 
10   PetscFunctionBegin;
11   /* TODO handle regions? */
12   PetscCall(DMGetAuxiliaryVec(dm, NULL, 0, 0, &A));
13   if (A) {
14     DM           dmAux, ocdm, odmAux;
15     Vec          oA, oAt;
16     PetscSection sec, osec;
17 
18     PetscCall(VecGetDM(A, &dmAux));
19     PetscCall(DMClone(odm, &odmAux));
20     PetscCall(DMGetCoordinateDM(odm, &ocdm));
21     PetscCall(DMSetCoordinateDM(odmAux, ocdm));
22     PetscCall(DMCopyDisc(dmAux, odmAux));
23 
24     PetscCall(DMGetLocalSection(dmAux, &sec));
25     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)sec), &osec));
26     PetscCall(VecCreate(PetscObjectComm((PetscObject)A), &oAt));
27     PetscCall(DMPlexDistributeField(dmAux, sf, sec, A, osec, oAt));
28     PetscCall(DMSetLocalSection(odmAux, osec));
29     PetscCall(PetscSectionDestroy(&osec));
30     PetscCall(DMCreateLocalVector(odmAux, &oA));
31     PetscCall(DMDestroy(&odmAux));
32     PetscCall(VecCopy(oAt, oA));
33     PetscCall(VecDestroy(&oAt));
34     PetscCall(DMSetAuxiliaryVec(odm, NULL, 0, 0, oA));
35     PetscCall(VecDestroy(&oA));
36   }
37   PetscFunctionReturn(PETSC_SUCCESS);
38 }
39 
40 PetscErrorCode DMCreateDomainDecomposition_Plex(DM dm, PetscInt *nsub, char ***names, IS **innerises, IS **outerises, DM **dms)
41 {
42   DM           odm;
43   PetscSF      migrationSF = NULL, sectionSF;
44   PetscSection sec, tsec, ogsec, olsec;
45   PetscInt     n, mh, ddovl = 0, pStart, pEnd, ni, no, nl;
46   PetscDS      ds;
47   DMLabel      label;
48   const char  *oname = "__internal_plex_dd_ovl_";
49   IS           gi_is, li_is, go_is, gl_is, ll_is;
50   IS           gis, lis;
51   PetscInt     rst, ren, c, *gidxs, *lidxs, *tidxs;
52   Vec          gvec;
53 
54   PetscFunctionBegin;
55   n = 1;
56   if (nsub) *nsub = n;
57   if (names) PetscCall(PetscCalloc1(n, names));
58   if (innerises) PetscCall(PetscCalloc1(n, innerises));
59   if (outerises) PetscCall(PetscCalloc1(n, outerises));
60   if (dms) PetscCall(PetscCalloc1(n, dms));
61 
62   PetscObjectOptionsBegin((PetscObject)dm);
63   PetscCall(PetscOptionsBoundedInt("-dm_plex_dd_overlap", "The size of the overlap halo for the subdomains", "DMCreateDomainDecomposition", ddovl, &ddovl, NULL, 0));
64   PetscOptionsEnd();
65 
66   PetscCall(DMViewFromOptions(dm, NULL, "-dm_plex_dd_dm_view"));
67   PetscCall(DMPlexDistributeOverlap_Internal(dm, ddovl + 1, PETSC_COMM_SELF, oname, &migrationSF, &odm));
68   if (!odm) PetscCall(DMClone(dm, &odm));
69   if (migrationSF) PetscCall(PetscSFViewFromOptions(migrationSF, (PetscObject)dm, "-dm_plex_dd_sf_view"));
70 
71   PetscCall(DMPlexGetMaxProjectionHeight(dm, &mh));
72   PetscCall(DMPlexSetMaxProjectionHeight(odm, mh));
73 
74   /* share discretization */
75   /* TODO Labels for regions may need to updated,
76      now it uses the original ones, not the ones for the odm.
77      Not sure what to do here */
78   /* PetscCall(DMCopyDisc(dm, odm)); */
79   PetscCall(DMGetDS(odm, &ds));
80   if (!ds) {
81     PetscCall(DMGetLocalSection(dm, &sec));
82     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)odm), &tsec));
83     if (migrationSF) {
84       PetscCall(PetscSFDistributeSection(migrationSF, sec, NULL, tsec));
85     } else {
86       PetscCall(PetscSectionCopy(sec, tsec));
87     }
88     PetscCall(DMSetLocalSection(dm, tsec));
89     PetscCall(PetscSectionDestroy(&tsec));
90   }
91   /* TODO: what if these values changes? add to some DM hook? */
92   PetscCall(DMTransferMaterialParameters(dm, migrationSF, odm));
93 
94   PetscCall(DMViewFromOptions(odm, (PetscObject)dm, "-dm_plex_dd_overlap_dm_view"));
95 #if 0
96   {
97     DM              seqdm;
98     Vec             val;
99     IS              is;
100     PetscInt        vStart, vEnd;
101     const PetscInt *vnum;
102     char            name[256];
103     PetscViewer     viewer;
104 
105     PetscCall(DMPlexDistributeOverlap_Internal(dm, 0, PETSC_COMM_SELF, "local_mesh", NULL, &seqdm));
106     PetscCall(PetscSNPrintf(name, sizeof(name), "local_mesh_%d.vtu", PetscGlobalRank));
107     PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)seqdm), name, FILE_MODE_WRITE, &viewer));
108     PetscCall(DMGetLabel(seqdm, "local_mesh", &label));
109     PetscCall(DMPlexLabelComplete(seqdm, label));
110     PetscCall(DMPlexCreateLabelField(seqdm, label, &val));
111     PetscCall(VecView(val, viewer));
112     PetscCall(VecDestroy(&val));
113     PetscCall(PetscViewerDestroy(&viewer));
114 
115     PetscCall(PetscSNPrintf(name, sizeof(name), "asm_vertices_%d.vtu", PetscGlobalRank));
116     PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)seqdm), name, FILE_MODE_WRITE, &viewer));
117     PetscCall(DMCreateLabel(seqdm, "asm_vertices"));
118     PetscCall(DMGetLabel(seqdm, "asm_vertices", &label));
119     PetscCall(DMPlexGetVertexNumbering(dm, &is));
120     PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
121     PetscCall(ISGetIndices(is, &vnum));
122     for (PetscInt v = 0; v < vEnd - vStart; v++) {
123       if (vnum[v] < 0) continue;
124       PetscCall(DMLabelSetValue(label, v + vStart, 1));
125     }
126     PetscCall(DMPlexCreateLabelField(seqdm, label, &val));
127     PetscCall(VecView(val, viewer));
128     PetscCall(VecDestroy(&val));
129     PetscCall(ISRestoreIndices(is, &vnum));
130     PetscCall(PetscViewerDestroy(&viewer));
131 
132     PetscCall(DMDestroy(&seqdm));
133     PetscCall(PetscSNPrintf(name, sizeof(name), "ovl_mesh_%d.vtu", PetscGlobalRank));
134     PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)odm), name, FILE_MODE_WRITE, &viewer));
135     PetscCall(DMView(odm, viewer));
136     PetscCall(PetscViewerDestroy(&viewer));
137   }
138 #endif
139 
140   /* propagate original global ordering to overlapping DM */
141   PetscCall(DMGetSectionSF(dm, &sectionSF));
142   PetscCall(DMGetLocalSection(dm, &sec));
143   PetscCall(PetscSectionGetStorageSize(sec, &nl));
144   PetscCall(DMGetGlobalVector(dm, &gvec));
145   PetscCall(VecGetOwnershipRange(gvec, &rst, &ren));
146   PetscCall(DMRestoreGlobalVector(dm, &gvec));
147   PetscCall(ISCreateStride(PETSC_COMM_SELF, ren - rst, rst, 1, &gi_is)); /* non-overlapping dofs */
148   PetscCall(PetscMalloc1(nl, &lidxs));
149   for (PetscInt i = 0; i < nl; i++) lidxs[i] = -1;
150   PetscCall(ISGetIndices(gi_is, (const PetscInt **)&gidxs));
151   PetscCall(PetscSFBcastBegin(sectionSF, MPIU_INT, gidxs, lidxs, MPI_REPLACE));
152   PetscCall(PetscSFBcastEnd(sectionSF, MPIU_INT, gidxs, lidxs, MPI_REPLACE));
153   PetscCall(ISRestoreIndices(gi_is, (const PetscInt **)&gidxs));
154   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), nl, lidxs, PETSC_OWN_POINTER, &lis));
155   PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)odm), &tsec));
156   PetscCall(DMPlexDistributeFieldIS(dm, migrationSF, sec, lis, tsec, &gis));
157   PetscCall(PetscSectionDestroy(&tsec));
158   PetscCall(ISDestroy(&lis));
159   PetscCall(PetscSFDestroy(&migrationSF));
160 
161   /* make dofs on the overlap boundary (not the global boundary) constrained */
162   PetscCall(DMGetLabel(odm, oname, &label));
163   PetscCall(DMPlexLabelComplete(odm, label));
164   PetscCall(DMGetLocalSection(odm, &tsec));
165   PetscCall(PetscSectionGetChart(tsec, &pStart, &pEnd));
166   PetscCall(DMLabelCreateIndex(label, pStart, pEnd));
167   PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)tsec), &sec));
168   PetscCall(PetscSectionCopy_Internal(tsec, sec, label->bt));
169   PetscCall(DMSetLocalSection(odm, sec));
170   PetscCall(PetscSectionDestroy(&sec));
171   PetscCall(DMRemoveLabel(odm, oname, NULL));
172 
173   /* Create index sets for dofs in the overlap dm */
174   PetscCall(DMGetSectionSF(odm, &sectionSF));
175   PetscCall(DMGetLocalSection(odm, &olsec));
176   PetscCall(DMGetGlobalSection(odm, &ogsec));
177   PetscCall(PetscSectionViewFromOptions(ogsec, (PetscObject)dm, "-dm_plex_dd_overlap_gsection_view"));
178   PetscCall(PetscSectionViewFromOptions(olsec, (PetscObject)dm, "-dm_plex_dd_overlap_lsection_view"));
179   ni = ren - rst;
180   PetscCall(PetscSectionGetConstrainedStorageSize(ogsec, &no)); /* dofs in the overlap */
181   PetscCall(PetscSectionGetStorageSize(olsec, &nl));            /* local dofs in the overlap */
182   PetscCall(PetscMalloc1(no, &gidxs));
183   PetscCall(ISGetIndices(gis, (const PetscInt **)&lidxs));
184   PetscCall(PetscSFReduceBegin(sectionSF, MPIU_INT, lidxs, gidxs, MPI_REPLACE));
185   PetscCall(PetscSFReduceEnd(sectionSF, MPIU_INT, lidxs, gidxs, MPI_REPLACE));
186   PetscCall(ISRestoreIndices(gis, (const PetscInt **)&lidxs));
187 
188   /* non-overlapping dofs */
189   PetscCall(PetscMalloc1(no, &lidxs));
190   c = 0;
191   for (PetscInt i = 0; i < no; i++) {
192     if (gidxs[i] >= rst && gidxs[i] < ren) lidxs[c++] = i;
193   }
194   PetscCheck(c == ni, PETSC_COMM_SELF, PETSC_ERR_PLIB, "%" PetscInt_FMT " != %" PetscInt_FMT "\n", c, ni);
195   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)odm), ni, lidxs, PETSC_OWN_POINTER, &li_is));
196 
197   /* global dofs in the overlap */
198   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), no, gidxs, PETSC_OWN_POINTER, &go_is));
199   PetscCall(ISViewFromOptions(go_is, (PetscObject)dm, "-dm_plex_dd_overlap_gois_view"));
200   /* PetscCall(ISCreateStride(PetscObjectComm((PetscObject)odm), no, 0, 1, &lo_is)); */
201 
202   /* local dofs of the overlapping subdomain (we actually need only dofs on the boundary of the subdomain) */
203   PetscCall(PetscMalloc1(nl, &lidxs));
204   PetscCall(PetscMalloc1(nl, &gidxs));
205   PetscCall(ISGetIndices(gis, (const PetscInt **)&tidxs));
206   c = 0;
207   for (PetscInt i = 0; i < nl; i++) {
208     if (tidxs[i] < 0) continue;
209     lidxs[c] = i;
210     gidxs[c] = tidxs[i];
211     c++;
212   }
213   PetscCall(ISRestoreIndices(gis, (const PetscInt **)&tidxs));
214   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), c, gidxs, PETSC_OWN_POINTER, &gl_is));
215   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)odm), c, lidxs, PETSC_OWN_POINTER, &ll_is));
216   PetscCall(ISViewFromOptions(gl_is, (PetscObject)dm, "-dm_plex_dd_overlap_glis_view"));
217 
218   PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_gi", (PetscObject)gi_is));
219   PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_li", (PetscObject)li_is));
220   PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_go", (PetscObject)go_is));
221   PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_gl", (PetscObject)gl_is));
222   PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_ll", (PetscObject)ll_is));
223 
224   if (innerises) (*innerises)[0] = gi_is;
225   else PetscCall(ISDestroy(&gi_is));
226   if (outerises) (*outerises)[0] = go_is;
227   else PetscCall(ISDestroy(&go_is));
228   if (dms) (*dms)[0] = odm;
229   else PetscCall(DMDestroy(&odm));
230   PetscCall(ISDestroy(&li_is));
231   PetscCall(ISDestroy(&gl_is));
232   PetscCall(ISDestroy(&ll_is));
233   PetscCall(ISDestroy(&gis));
234 
235   PetscFunctionReturn(PETSC_SUCCESS);
236 }
237 
238 PetscErrorCode DMCreateDomainDecompositionScatters_Plex(DM dm, PetscInt n, DM *subdms, VecScatter **iscat, VecScatter **oscat, VecScatter **lscat)
239 {
240   Vec gvec, svec, lvec;
241   IS  gi_is, li_is, go_is, gl_is, ll_is;
242 
243   PetscFunctionBegin;
244   if (iscat) PetscCall(PetscMalloc1(n, iscat));
245   if (oscat) PetscCall(PetscMalloc1(n, oscat));
246   if (lscat) PetscCall(PetscMalloc1(n, lscat));
247 
248   PetscCall(DMGetGlobalVector(dm, &gvec));
249   for (PetscInt i = 0; i < n; i++) {
250     PetscCall(DMGetGlobalVector(subdms[i], &svec));
251     PetscCall(DMGetLocalVector(subdms[i], &lvec));
252     PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_gi", (PetscObject *)&gi_is));
253     PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_li", (PetscObject *)&li_is));
254     PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_go", (PetscObject *)&go_is));
255     PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_gl", (PetscObject *)&gl_is));
256     PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_ll", (PetscObject *)&ll_is));
257     PetscCheck(gi_is, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "SubDM not obtained from DMCreateDomainDecomposition");
258     PetscCheck(li_is, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "SubDM not obtained from DMCreateDomainDecomposition");
259     PetscCheck(go_is, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "SubDM not obtained from DMCreateDomainDecomposition");
260     PetscCheck(gl_is, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "SubDM not obtained from DMCreateDomainDecomposition");
261     PetscCheck(ll_is, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "SubDM not obtained from DMCreateDomainDecomposition");
262     if (iscat) PetscCall(VecScatterCreate(gvec, gi_is, svec, li_is, &(*iscat)[i]));
263     if (oscat) PetscCall(VecScatterCreate(gvec, go_is, svec, NULL, &(*oscat)[i]));
264     if (lscat) PetscCall(VecScatterCreate(gvec, gl_is, lvec, ll_is, &(*lscat)[i]));
265     PetscCall(DMRestoreGlobalVector(subdms[i], &svec));
266     PetscCall(DMRestoreLocalVector(subdms[i], &lvec));
267   }
268   PetscCall(DMRestoreGlobalVector(dm, &gvec));
269   PetscFunctionReturn(PETSC_SUCCESS);
270 }
271 
272 /*
273      DMCreateNeumannOverlap_Plex - Generates an IS, an unassembled (Neumann) Mat, a setup function, and the corresponding context to be used by PCHPDDM.
274 
275    Input Parameter:
276 .     dm - preconditioner context
277 
278    Output Parameters:
279 +     ovl - index set of overlapping subdomains
280 .     J - unassembled (Neumann) local matrix
281 .     setup - function for generating the matrix
282 -     setup_ctx - context for setup
283 
284    Options Database Keys:
285 +   -dm_plex_view_neumann_original - view the DM without overlap
286 -   -dm_plex_view_neumann_overlap - view the DM with overlap as needed by PCHPDDM
287 
288    Level: advanced
289 
290 .seealso: `DMCreate()`, `DM`, `MATIS`, `PCHPDDM`, `PCHPDDMSetAuxiliaryMat()`
291 */
292 PetscErrorCode DMCreateNeumannOverlap_Plex(DM dm, IS *ovl, Mat *J, PetscErrorCode (**setup)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *), void **setup_ctx)
293 {
294   DM                     odm;
295   Mat                    pJ;
296   PetscSF                sf = NULL;
297   PetscSection           sec, osec;
298   ISLocalToGlobalMapping l2g;
299   const PetscInt        *idxs;
300   PetscInt               n, mh;
301 
302   PetscFunctionBegin;
303   *setup     = NULL;
304   *setup_ctx = NULL;
305   *ovl       = NULL;
306   *J         = NULL;
307 
308   /* Overlapped mesh
309      overlap is a little more generous, since it is not computed starting from the owned (Dirichlet) points, but from the locally owned cells */
310   PetscCall(DMPlexDistributeOverlap(dm, 1, &sf, &odm));
311   if (!odm) {
312     PetscCall(PetscSFDestroy(&sf));
313     PetscFunctionReturn(PETSC_SUCCESS);
314   }
315 
316   /* share discretization */
317   PetscCall(DMGetLocalSection(dm, &sec));
318   PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)sec), &osec));
319   PetscCall(PetscSFDistributeSection(sf, sec, NULL, osec));
320   /* what to do here? using both is fine? */
321   PetscCall(DMSetLocalSection(odm, osec));
322   PetscCall(DMCopyDisc(dm, odm));
323   PetscCall(DMPlexGetMaxProjectionHeight(dm, &mh));
324   PetscCall(DMPlexSetMaxProjectionHeight(odm, mh));
325   PetscCall(PetscSectionDestroy(&osec));
326 
327   /* material parameters */
328   /* TODO: what if these values changes? add to some DM hook? */
329   PetscCall(DMTransferMaterialParameters(dm, sf, odm));
330   PetscCall(PetscSFDestroy(&sf));
331 
332   PetscCall(DMViewFromOptions(dm, NULL, "-dm_plex_view_neumann_original"));
333   PetscCall(PetscObjectSetName((PetscObject)odm, "OVL"));
334   PetscCall(DMViewFromOptions(odm, NULL, "-dm_plex_view_neumann_overlap"));
335 
336   /* MATIS for the overlap region
337      the HPDDM interface wants local matrices,
338      we attach the global MATIS to the overlap IS,
339      since we need it to do assembly */
340   PetscCall(DMSetMatType(odm, MATIS));
341   PetscCall(DMCreateMatrix(odm, &pJ));
342   PetscCall(MatISGetLocalMat(pJ, J));
343   PetscCall(PetscObjectReference((PetscObject)*J));
344 
345   /* overlap IS */
346   PetscCall(MatISGetLocalToGlobalMapping(pJ, &l2g, NULL));
347   PetscCall(ISLocalToGlobalMappingGetSize(l2g, &n));
348   PetscCall(ISLocalToGlobalMappingGetIndices(l2g, &idxs));
349   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)odm), n, idxs, PETSC_COPY_VALUES, ovl));
350   PetscCall(ISLocalToGlobalMappingRestoreIndices(l2g, &idxs));
351   PetscCall(PetscObjectCompose((PetscObject)*ovl, "_DM_Overlap_HPDDM_MATIS", (PetscObject)pJ));
352   PetscCall(DMDestroy(&odm));
353   PetscCall(MatDestroy(&pJ));
354 
355   /* special purpose setup function (composed in DMPlexSetSNESLocalFEM) */
356   PetscCall(PetscObjectQueryFunction((PetscObject)dm, "MatComputeNeumannOverlap_C", setup));
357   if (*setup) PetscCall(PetscObjectCompose((PetscObject)*ovl, "_DM_Original_HPDDM", (PetscObject)dm));
358   PetscFunctionReturn(PETSC_SUCCESS);
359 }
360