xref: /honee/src/dm-utils.c (revision 337840fc9f8b699b98947f09e30443f8c0c7f962)
1 // SPDX-FileCopyrightText: Copyright (c) 2017-2024, HONEE contributors.
2 // SPDX-License-Identifier: Apache-2.0 OR BSD-2-Clause
3 
4 /// @file
5 /// Utilities for setting up DM and PetscFE
6 
7 #include <ceed.h>
8 #include <dm-utils.h>
9 #include <petsc-ceed-utils.h>
10 #include <petscdmplex.h>
11 #include <petscds.h>
12 
13 /**
14   @brief Convert `DM` field to `DS` field.
15 
16   @param[in]   dm            `DM` holding mesh
17   @param[in]   domain_label  Label for `DM` domain
18   @param[in]   dm_field      Index of `DM` field
19   @param[out]  ds_field      Index of `DS` field
20 
21   @return An error code: 0 - success, otherwise - failure
22 **/
DMFieldToDSField(DM dm,DMLabel domain_label,PetscInt dm_field,PetscInt * ds_field)23 PetscErrorCode DMFieldToDSField(DM dm, DMLabel domain_label, PetscInt dm_field, PetscInt *ds_field) {
24   PetscDS         ds;
25   IS              field_is;
26   const PetscInt *fields;
27   PetscInt        num_fields;
28 
29   PetscFunctionBeginUser;
30   PetscCall(DMGetRegionDS(dm, domain_label, &field_is, &ds, NULL));
31   PetscCall(ISGetIndices(field_is, &fields));
32   PetscCall(ISGetSize(field_is, &num_fields));
33   for (PetscInt i = 0; i < num_fields; i++) {
34     if (dm_field == fields[i]) {
35       *ds_field = i;
36       break;
37     }
38   }
39   PetscCall(ISRestoreIndices(field_is, &fields));
40 
41   PetscCheck(*ds_field != -1, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Could not find dm_field %" PetscInt_FMT " in DS", dm_field);
42   PetscFunctionReturn(PETSC_SUCCESS);
43 }
44 
45 /**
46   @brief Destroy `ElemRestriction` in a `PetscContainer`.
47 
48   Not collective across MPI processes.
49 
50   @param[in,out]  ctx  `CeedElemRestriction`
51 
52   @return An error code: 0 - success, otherwise - failure
53 **/
DMPlexCeedElemRestrictionDestroy(void ** ctx)54 static PetscErrorCode DMPlexCeedElemRestrictionDestroy(void **ctx) {
55   CeedElemRestriction rstr = *(CeedElemRestriction *)ctx;
56 
57   PetscFunctionBegin;
58   PetscCallCeed(CeedElemRestrictionReturnCeed(rstr), CeedElemRestrictionDestroy(&rstr));
59   PetscFunctionReturn(PETSC_SUCCESS);
60 }
61 
62 /**
63   @brief Create `CeedElemRestriction` from `DMPlex`.
64 
65  The `CeedElemRestriction` is stored in the `DMPlex` object for reuse;
66  if the routine is called again with the same arguments, the same `CeedElemRestriction` is returned.
67 
68   Not collective across MPI processes.
69 
70   @param[in]   ceed          `Ceed` context
71   @param[in]   dm            `DMPlex` holding mesh
72   @param[in]   domain_label  `DMLabel` for `DMPlex` domain
73   @param[in]   label_value   Stratum value
74   @param[in]   height        Height of `DMPlex` topology
75   @param[in]   dm_field      Index of `DMPlex` field
76   @param[out]  restriction   `CeedElemRestriction` for `DMPlex`
77 
78   @return An error code: 0 - success, otherwise - failure
79 **/
DMPlexCeedElemRestrictionCreate(Ceed ceed,DM dm,DMLabel domain_label,PetscInt label_value,PetscInt height,PetscInt dm_field,CeedElemRestriction * restriction)80 PetscErrorCode DMPlexCeedElemRestrictionCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height, PetscInt dm_field,
81                                                CeedElemRestriction *restriction) {
82   char                container_name[1024];
83   CeedElemRestriction container_restriction;
84 
85   PetscFunctionBeginUser;
86   {
87     const char *label_name = NULL;
88 
89     if (domain_label) PetscCall(PetscObjectGetName((PetscObject)domain_label, &label_name));
90     PetscCall(PetscSNPrintf(container_name, sizeof(container_name),
91                             "DM CeedElemRestriction - label: %s label value: %" PetscInt_FMT " height: %" PetscInt_FMT " DM field: %" PetscInt_FMT,
92                             label_name ? label_name : "NONE", label_value, height, dm_field));
93   }
94   PetscCall(PetscObjectContainerQuery((PetscObject)dm, container_name, (void **)&container_restriction));
95 
96   if (container_restriction) {
97     // Copy existing restriction
98     *restriction = NULL;
99     PetscCallCeed(ceed, CeedElemRestrictionReferenceCopy(container_restriction, restriction));
100   } else {
101     PetscInt num_elem, elem_size, num_dof, num_comp, *restriction_offsets_petsc;
102     CeedInt *restriction_offsets_ceed = NULL;
103 
104     // Build restriction
105     PetscCall(DMPlexGetLocalOffsets(dm, domain_label, label_value, height, dm_field, &num_elem, &elem_size, &num_comp, &num_dof,
106                                     &restriction_offsets_petsc));
107     PetscCall(IntArrayPetscToCeed(num_elem * elem_size, &restriction_offsets_petsc, &restriction_offsets_ceed));
108     PetscCallCeed(ceed, CeedElemRestrictionCreate(ceed, num_elem, elem_size, num_comp, 1, num_dof, CEED_MEM_HOST, CEED_COPY_VALUES,
109                                                   restriction_offsets_ceed, restriction));
110     PetscCall(PetscFree(restriction_offsets_ceed));
111 
112     // Set in container
113     PetscCallCeed(ceed, CeedElemRestrictionReferenceCopy(*restriction, &container_restriction));
114     PetscCall(PetscObjectContainerCompose((PetscObject)dm, container_name, container_restriction,
115                                           (PetscCtxDestroyFn *)DMPlexCeedElemRestrictionDestroy));
116   }
117   PetscFunctionReturn(PETSC_SUCCESS);
118 }
119 
120 /**
121   @brief Create `CeedElemRestriction` from `DMPlex` domain for mesh coordinates.
122 
123  The `CeedElemRestriction` is stored in the coordinate `DMPlex` object for reuse;
124  if the routine is called again with the same arguments, the same `CeedElemRestriction` is returned.
125 
126   Not collective across MPI processes.
127 
128   @param[in]   ceed          `Ceed` context
129   @param[in]   dm            `DMPlex` holding mesh
130   @param[in]   domain_label  Label for `DMPlex` domain
131   @param[in]   label_value   Stratum value
132   @param[in]   height        Height of `DMPlex` topology
133   @param[out]  restriction   `CeedElemRestriction` for mesh
134 
135   @return An error code: 0 - success, otherwise - failure
136 **/
DMPlexCeedElemRestrictionCoordinateCreate(Ceed ceed,DM dm,DMLabel domain_label,PetscInt label_value,PetscInt height,CeedElemRestriction * restriction)137 PetscErrorCode DMPlexCeedElemRestrictionCoordinateCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height,
138                                                          CeedElemRestriction *restriction) {
139   DM dm_coord;
140 
141   PetscFunctionBeginUser;
142   PetscCall(DMGetCellCoordinateDM(dm, &dm_coord));
143   if (!dm_coord) {
144     PetscCall(DMGetCoordinateDM(dm, &dm_coord));
145   }
146   PetscCall(DMPlexCeedElemRestrictionCreate(ceed, dm_coord, domain_label, label_value, height, 0, restriction));
147   PetscFunctionReturn(PETSC_SUCCESS);
148 }
149 
150 /**
151   @brief Create `CeedElemRestriction` from `DMPlex` domain for auxilury `QFunction` data.
152 
153   Not collective across MPI processes.
154 
155   @param[in]   ceed           `Ceed` context
156   @param[in]   dm             `DMPlex` holding mesh
157   @param[in]   domain_label   Label for `DMPlex` domain
158   @param[in]   label_value    Stratum value
159   @param[in]   height         Height of `DMPlex` topology
160   @param[in]   q_data_size    Number of components for `QFunction` data
161   @param[in]   is_collocated  Boolean flag indicating if the data is collocated on the nodes (`PETSC_TRUE`) or on quadrature points (`PETSC_FALSE`)
162   @param[out]  restriction    Strided `CeedElemRestriction` for `QFunction` data
163 
164   @return An error code: 0 - success, otherwise - failure
165 **/
DMPlexCeedElemRestrictionStridedCreate(Ceed ceed,DM dm,DMLabel domain_label,PetscInt label_value,PetscInt height,PetscInt q_data_size,PetscBool is_collocated,CeedElemRestriction * restriction)166 static PetscErrorCode DMPlexCeedElemRestrictionStridedCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height,
167                                                              PetscInt q_data_size, PetscBool is_collocated, CeedElemRestriction *restriction) {
168   PetscInt num_elem, num_qpts, dm_field = 0;
169 
170   PetscFunctionBeginUser;
171   {  // Get number of elements
172     PetscInt depth;
173     DMLabel  depth_label;
174     IS       point_is, depth_is;
175 
176     PetscCall(DMPlexGetDepth(dm, &depth));
177     PetscCall(DMPlexGetDepthLabel(dm, &depth_label));
178     PetscCall(DMLabelGetStratumIS(depth_label, depth - height, &depth_is));
179     if (domain_label) {
180       IS domain_is;
181 
182       PetscCall(DMLabelGetStratumIS(domain_label, label_value, &domain_is));
183       if (domain_is) {
184         PetscCall(ISIntersect(depth_is, domain_is, &point_is));
185         PetscCall(ISDestroy(&domain_is));
186       } else {
187         point_is = NULL;
188       }
189       PetscCall(ISDestroy(&depth_is));
190     } else {
191       point_is = depth_is;
192     }
193     if (point_is) {
194       PetscCall(ISGetLocalSize(point_is, &num_elem));
195     } else {
196       num_elem = 0;
197     }
198     PetscCall(ISDestroy(&point_is));
199   }
200 
201   {  // Get number of quadrature points
202     PetscDS  ds;
203     PetscFE  fe;
204     PetscInt ds_field = -1;
205 
206     PetscCall(DMGetRegionDS(dm, domain_label, NULL, &ds, NULL));
207     PetscCall(DMFieldToDSField(dm, domain_label, dm_field, &ds_field));
208     PetscCall(PetscDSGetDiscretization(ds, ds_field, (PetscObject *)&fe));
209     PetscCall(PetscFEGetHeightSubspace(fe, height, &fe));
210     if (is_collocated) {
211       PetscDualSpace dual_space;
212       PetscInt       num_dual_basis_vectors, dim, num_comp;
213 
214       PetscCall(PetscFEGetSpatialDimension(fe, &dim));
215       PetscCall(PetscFEGetNumComponents(fe, &num_comp));
216       PetscCall(PetscFEGetDualSpace(fe, &dual_space));
217       PetscCall(PetscDualSpaceGetDimension(dual_space, &num_dual_basis_vectors));
218       num_qpts = num_dual_basis_vectors / num_comp;
219     } else {
220       PetscQuadrature quadrature;
221 
222       PetscCall(DMGetRegionDS(dm, domain_label, NULL, &ds, NULL));
223       PetscCall(DMFieldToDSField(dm, domain_label, dm_field, &ds_field));
224       PetscCall(PetscDSGetDiscretization(ds, ds_field, (PetscObject *)&fe));
225       PetscCall(PetscFEGetHeightSubspace(fe, height, &fe));
226       PetscCall(PetscFEGetQuadrature(fe, &quadrature));
227       PetscCall(PetscQuadratureGetData(quadrature, NULL, NULL, &num_qpts, NULL, NULL));
228     }
229   }
230 
231   // Create the restriction
232   PetscCallCeed(ceed, CeedElemRestrictionCreateStrided(ceed, num_elem, num_qpts, q_data_size, num_elem * num_qpts * q_data_size, CEED_STRIDES_BACKEND,
233                                                        restriction));
234   PetscFunctionReturn(PETSC_SUCCESS);
235 }
236 
237 /**
238   @brief Create `CeedElemRestriction` from `DMPlex` domain for auxilury `QFunction` data.
239 
240   Not collective across MPI processes.
241 
242   @param[in]   ceed           `Ceed` context
243   @param[in]   dm             `DMPlex` holding mesh
244   @param[in]   domain_label   Label for `DMPlex` domain
245   @param[in]   label_value    Stratum value
246   @param[in]   height         Height of `DMPlex` topology
247   @param[in]   q_data_size    Number of components for `QFunction` data
248   @param[out]  restriction    Strided `CeedElemRestriction` for `QFunction` data
249 
250   @return An error code: 0 - success, otherwise - failure
251 **/
DMPlexCeedElemRestrictionQDataCreate(Ceed ceed,DM dm,DMLabel domain_label,PetscInt label_value,PetscInt height,PetscInt q_data_size,CeedElemRestriction * restriction)252 PetscErrorCode DMPlexCeedElemRestrictionQDataCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height,
253                                                     PetscInt q_data_size, CeedElemRestriction *restriction) {
254   PetscFunctionBeginUser;
255   PetscCall(DMPlexCeedElemRestrictionStridedCreate(ceed, dm, domain_label, label_value, height, q_data_size, PETSC_FALSE, restriction));
256   PetscFunctionReturn(PETSC_SUCCESS);
257 }
258 
259 /**
260   @brief Create `CeedElemRestriction` from `DMPlex` domain for nodally collocated auxilury `QFunction` data.
261 
262   Not collective across MPI processes.
263 
264   @param[in]   ceed           `Ceed` context
265   @param[in]   dm             `DMPlex` holding mesh
266   @param[in]   domain_label   Label for `DMPlex` domain
267   @param[in]   label_value    Stratum value
268   @param[in]   height         Height of `DMPlex` topology
269   @param[in]   q_data_size    Number of components for `QFunction` data
270   @param[out]  restriction    Strided `CeedElemRestriction` for `QFunction` data
271 
272   @return An error code: 0 - success, otherwise - failure
273 **/
DMPlexCeedElemRestrictionCollocatedCreate(Ceed ceed,DM dm,DMLabel domain_label,PetscInt label_value,PetscInt height,PetscInt q_data_size,CeedElemRestriction * restriction)274 PetscErrorCode DMPlexCeedElemRestrictionCollocatedCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height,
275                                                          PetscInt q_data_size, CeedElemRestriction *restriction) {
276   PetscFunctionBeginUser;
277   PetscCall(DMPlexCeedElemRestrictionStridedCreate(ceed, dm, domain_label, label_value, height, q_data_size, PETSC_TRUE, restriction));
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 /**
282   @brief Creates a tensor-product uniform quadrature.
283          This is only for comparison studies and generally should not be used.
284 
285   @param[in]   dim         Spatial dimension
286   @param[in]   num_comp    Number of components
287   @param[in]   num_points  Number of points in one dimension
288   @param[in]   a           Left end of interval (often -1)
289   @param[in]   b           Right end of interval (often +1)
290   @param[out]  q           `PetscQuadrature` object
291 
292   @return An error code: 0 - success, otherwise - failure
293 **/
PetscDTUniformTensorQuadrature(PetscInt dim,PetscInt num_comp,PetscInt num_points,PetscReal a,PetscReal b,PetscQuadrature * q)294 static PetscErrorCode PetscDTUniformTensorQuadrature(PetscInt dim, PetscInt num_comp, PetscInt num_points, PetscReal a, PetscReal b,
295                                                      PetscQuadrature *q) {
296   DMPolytopeType ct;
297   PetscInt       num_total_points = dim > 1 ? (dim > 2 ? (num_points * num_points * num_points) : (num_points * num_points)) : num_points;
298   PetscReal     *coords, *weights, *coords_1d;
299 
300   PetscFunctionBeginUser;
301   PetscCall(PetscMalloc1(num_total_points * dim, &coords));
302   PetscCall(PetscMalloc1(num_total_points * num_comp, &weights));
303   // Compute weights, points
304   switch (dim) {
305     case 0:
306       ct = DM_POLYTOPE_POINT;
307       PetscCall(PetscFree(coords));
308       PetscCall(PetscFree(weights));
309       PetscCall(PetscMalloc1(1, &coords));
310       PetscCall(PetscMalloc1(num_comp, &weights));
311       num_total_points = 1;
312       coords[0]        = 0.0;
313       for (PetscInt c = 0; c < num_comp; c++) weights[c] = 1.0;
314       break;
315     case 1: {
316       ct             = DM_POLYTOPE_SEGMENT;
317       PetscReal step = (b - a) / num_points;
318 
319       for (PetscInt i = 0; i < num_points; i++) {
320         coords[i] = step * (i + 0.5) + a;
321         for (PetscInt c = 0; c < num_comp; c++) weights[i * num_comp + c] = 1.0;
322       }
323     } break;
324     case 2: {
325       ct = DM_POLYTOPE_QUADRILATERAL;
326       PetscCall(PetscMalloc1(num_points, &coords_1d));
327       PetscReal step = (b - a) / num_points;
328 
329       for (PetscInt i = 0; i < num_points; i++) coords_1d[i] = step * (i + 0.5) + a;
330       for (PetscInt i = 0; i < num_points; i++) {
331         for (PetscInt j = 0; j < num_points; j++) {
332           coords[(i * num_points + j) * dim + 0] = coords_1d[i];
333           coords[(i * num_points + j) * dim + 1] = coords_1d[j];
334           for (PetscInt c = 0; c < num_comp; c++) weights[(i * num_points + j) * num_comp + c] = 1.0;
335         }
336       }
337       PetscCall(PetscFree(coords_1d));
338     } break;
339     case 3: {
340       ct = DM_POLYTOPE_HEXAHEDRON;
341       PetscCall(PetscMalloc1(num_points, &coords_1d));
342       PetscReal step = (b - a) / num_points;
343 
344       for (PetscInt i = 0; i < num_points; i++) coords_1d[i] = step * (i + 0.5) + a;
345       for (PetscInt i = 0; i < num_points; i++) {
346         for (PetscInt j = 0; j < num_points; j++) {
347           for (PetscInt k = 0; k < num_points; k++) {
348             coords[((i * num_points + j) * num_points + k) * dim + 0] = coords_1d[i];
349             coords[((i * num_points + j) * num_points + k) * dim + 1] = coords_1d[j];
350             coords[((i * num_points + j) * num_points + k) * dim + 2] = coords_1d[k];
351             for (PetscInt c = 0; c < num_comp; c++) weights[((i * num_points + j) * num_points + k) * num_comp + c] = 1.0;
352           }
353         }
354       }
355       PetscCall(PetscFree(coords_1d));
356     } break;
357     // LCOV_EXCL_START
358     default:
359       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Cannot construct quadrature rule for dimension %" PetscInt_FMT, dim);
360       // LCOV_EXCL_STOP
361   }
362   PetscCall(PetscQuadratureCreate(PETSC_COMM_SELF, q));
363   PetscCall(PetscQuadratureSetCellType(*q, ct));
364   PetscCall(PetscQuadratureSetOrder(*q, num_points));
365   PetscCall(PetscQuadratureSetData(*q, dim, num_comp, num_total_points, coords, weights));
366   PetscCall(PetscObjectChangeTypeName((PetscObject)*q, "UniformTensor"));
367   PetscFunctionReturn(PETSC_SUCCESS);
368 }
369 
370 /**
371   @brief Setup `DM` with FE space of appropriate degree
372 
373   @param[in]   comm        MPI communicator
374   @param[in]   dim         Spatial dimension
375   @param[in]   num_comp    Number of components
376   @param[in]   is_simplex  Flag for simplex or tensor product element
377   @param[in]   order       Polynomial order of space
378   @param[in]   q_order     Quadrature order
379   @param[in]   prefix      The options prefix, or `NULL`
380   @param[out]  fem         `PetscFE` object
381 
382   @return An error code: 0 - success, otherwise - failure
383 **/
PetscFECreateLagrangeFromOptions(MPI_Comm comm,PetscInt dim,PetscInt num_comp,PetscBool is_simplex,PetscInt order,PetscInt q_order,const char prefix[],PetscFE * fem)384 PetscErrorCode PetscFECreateLagrangeFromOptions(MPI_Comm comm, PetscInt dim, PetscInt num_comp, PetscBool is_simplex, PetscInt order,
385                                                 PetscInt q_order, const char prefix[], PetscFE *fem) {
386   PetscBool       is_tensor     = !is_simplex;
387   DMPolytopeType  polytope_type = DMPolytopeTypeSimpleShape(dim, is_simplex);
388   PetscSpace      fe_space;
389   PetscDualSpace  fe_dual_space;
390   PetscQuadrature quadrature, face_quadrature;
391 
392   PetscFunctionBeginUser;
393   // Create space
394   PetscCall(PetscSpaceCreate(comm, &fe_space));
395   PetscCall(PetscSpaceSetType(fe_space, PETSCSPACEPOLYNOMIAL));
396   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)fe_space, prefix));
397   PetscCall(PetscSpacePolynomialSetTensor(fe_space, is_tensor));
398   PetscCall(PetscSpaceSetNumComponents(fe_space, num_comp));
399   PetscCall(PetscSpaceSetNumVariables(fe_space, dim));
400   if (order >= 0) {
401     PetscCall(PetscSpaceSetDegree(fe_space, order, PETSC_DETERMINE));
402     if (polytope_type == DM_POLYTOPE_TRI_PRISM || polytope_type == DM_POLYTOPE_TRI_PRISM_TENSOR) {
403       PetscSpace fe_space_end, fe_space_side;
404 
405       PetscCall(PetscSpaceSetNumComponents(fe_space, 1));
406       PetscCall(PetscSpaceCreate(comm, &fe_space_end));
407       PetscCall(PetscSpaceSetType(fe_space_end, PETSCSPACEPOLYNOMIAL));
408       PetscCall(PetscSpacePolynomialSetTensor(fe_space_end, PETSC_FALSE));
409       PetscCall(PetscSpaceSetNumComponents(fe_space_end, 1));
410       PetscCall(PetscSpaceSetNumVariables(fe_space_end, dim - 1));
411       PetscCall(PetscSpaceSetDegree(fe_space_end, order, PETSC_DETERMINE));
412       PetscCall(PetscSpaceCreate(comm, &fe_space_side));
413       PetscCall(PetscSpaceSetType(fe_space_side, PETSCSPACEPOLYNOMIAL));
414       PetscCall(PetscSpacePolynomialSetTensor(fe_space_side, PETSC_FALSE));
415       PetscCall(PetscSpaceSetNumComponents(fe_space_side, 1));
416       PetscCall(PetscSpaceSetNumVariables(fe_space_side, 1));
417       PetscCall(PetscSpaceSetDegree(fe_space_side, order, PETSC_DETERMINE));
418       PetscCall(PetscSpaceSetType(fe_space, PETSCSPACETENSOR));
419       PetscCall(PetscSpaceTensorSetNumSubspaces(fe_space, 2));
420       PetscCall(PetscSpaceTensorSetSubspace(fe_space, 0, fe_space_end));
421       PetscCall(PetscSpaceTensorSetSubspace(fe_space, 1, fe_space_side));
422       PetscCall(PetscSpaceDestroy(&fe_space_end));
423       PetscCall(PetscSpaceDestroy(&fe_space_side));
424 
425       if (num_comp > 1) {
426         PetscSpace scalar_fe_space = fe_space;
427 
428         PetscCall(PetscSpaceCreate(comm, &fe_space));
429         PetscCall(PetscSpaceSetNumVariables(fe_space, dim));
430         PetscCall(PetscSpaceSetNumComponents(fe_space, num_comp));
431         PetscCall(PetscSpaceSetType(fe_space, PETSCSPACESUM));
432         PetscCall(PetscSpaceSumSetNumSubspaces(fe_space, num_comp));
433         PetscCall(PetscSpaceSumSetConcatenate(fe_space, PETSC_TRUE));
434         PetscCall(PetscSpaceSumSetInterleave(fe_space, PETSC_TRUE, PETSC_FALSE));
435         for (PetscInt i = 0; i < num_comp; i++) PetscCall(PetscSpaceSumSetSubspace(fe_space, i, scalar_fe_space));
436         PetscCall(PetscSpaceDestroy(&scalar_fe_space));
437       }
438     }
439   }
440   PetscCall(PetscSpaceSetFromOptions(fe_space));
441   PetscCall(PetscSpaceSetUp(fe_space));
442   PetscCall(PetscSpaceGetDegree(fe_space, &order, NULL));
443   PetscCall(PetscSpacePolynomialGetTensor(fe_space, &is_tensor));
444   PetscCall(PetscSpaceGetNumComponents(fe_space, &num_comp));
445 
446   // Create dual space
447   PetscCall(PetscDualSpaceCreate(comm, &fe_dual_space));
448   PetscCall(PetscDualSpaceSetType(fe_dual_space, PETSCDUALSPACELAGRANGE));
449   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)fe_dual_space, prefix));
450   {
451     DM dual_space_dm;
452 
453     PetscCall(DMPlexCreateReferenceCell(PETSC_COMM_SELF, polytope_type, &dual_space_dm));
454     PetscCall(PetscDualSpaceSetDM(fe_dual_space, dual_space_dm));
455     PetscCall(DMDestroy(&dual_space_dm));
456   }
457   PetscCall(PetscDualSpaceSetNumComponents(fe_dual_space, num_comp));
458   PetscCall(PetscDualSpaceSetOrder(fe_dual_space, order));
459   PetscCall(PetscDualSpaceLagrangeSetTensor(fe_dual_space, (is_tensor || (polytope_type == DM_POLYTOPE_TRI_PRISM)) ? PETSC_TRUE : PETSC_FALSE));
460   PetscCall(PetscDualSpaceSetFromOptions(fe_dual_space));
461   PetscCall(PetscDualSpaceSetUp(fe_dual_space));
462 
463   // Create quadrature
464   q_order               = q_order >= 0 ? q_order : order;
465   PetscBool use_uniform = PETSC_FALSE;
466 
467   PetscOptionsBegin(comm, NULL, "Uniform quadrature check", NULL);
468   PetscCall(PetscOptionsBool("-petscdt_uniform_tensor_quadrature", "", NULL, use_uniform, &use_uniform, NULL));
469   PetscOptionsEnd();
470   PetscCheck(!use_uniform || (is_tensor || dim == 1), comm, PETSC_ERR_SUP, "Can only use uniform quadrature on tensor product elements");
471   if (use_uniform) {
472     PetscCall(PetscDTUniformTensorQuadrature(dim, 1, q_order + 1, -1.0, 1.0, &quadrature));
473     PetscCall(PetscDTUniformTensorQuadrature(dim - 1, 1, q_order + 1, -1.0, 1.0, &face_quadrature));
474   } else {
475     PetscCall(PetscDTCreateDefaultQuadrature(polytope_type, q_order, &quadrature, &face_quadrature));
476   }
477 
478   // Create finite element
479   PetscCall(PetscFECreateFromSpaces(fe_space, fe_dual_space, quadrature, face_quadrature, fem));
480   PetscCall(PetscFESetFromOptions(*fem));
481   PetscFunctionReturn(PETSC_SUCCESS);
482 }
483 
484 /**
485   @brief Get global `DMPlex` topology type.
486 
487   Collective across MPI processes.
488 
489   @param[in]   dm            `DMPlex` holding mesh
490   @param[in]   domain_label  `DMLabel` for `DMPlex` domain
491   @param[in]   label_value   Stratum value
492   @param[in]   height        Height of `DMPlex` topology
493   @param[out]  cell_type     `DMPlex` topology type
494 **/
GetGlobalDMPlexPolytopeType(DM dm,DMLabel domain_label,PetscInt label_value,PetscInt height,DMPolytopeType * cell_type)495 static inline PetscErrorCode GetGlobalDMPlexPolytopeType(DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height,
496                                                          DMPolytopeType *cell_type) {
497   PetscInt first_point;
498   PetscInt ids[1] = {label_value};
499   DMLabel  depth_label;
500 
501   PetscFunctionBeginUser;
502   // Use depth label if no domain label present
503   if (!domain_label) {
504     PetscInt depth;
505 
506     PetscCall(DMPlexGetDepth(dm, &depth));
507     PetscCall(DMPlexGetDepthLabel(dm, &depth_label));
508     ids[0] = depth - height;
509   }
510 
511   // Get cell interp, grad, and quadrature data
512   PetscCall(DMGetFirstLabeledPoint(dm, dm, domain_label ? domain_label : depth_label, 1, ids, height, &first_point, NULL));
513   if (first_point != -1) PetscCall(DMPlexGetCellType(dm, first_point, cell_type));
514 
515   {  // Form agreement about CellType
516     PetscInt cell_type_local = -1, cell_type_global = -1;
517 
518     if (first_point != -1) cell_type_local = (PetscInt)*cell_type;
519     PetscCallMPI(MPIU_Allreduce(&cell_type_local, &cell_type_global, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)dm)));
520     *cell_type = (DMPolytopeType)cell_type_global;
521   }
522   PetscFunctionReturn(PETSC_SUCCESS);
523 }
524 
525 /**
526   @brief Get the permutation and field offset for a given depth.
527 
528   Not collective across MPI processes.
529 
530   @param[in]   dm            `DMPlex` holding mesh
531   @param[in]   depth         Depth of `DMPlex` topology
532   @param[in]   field         Index of `DMPlex` field
533   @param[out]  permutation   Permutation for `DMPlex` field
534   @param[out]  field_offset  Offset to `DMPlex` field
535 **/
GetClosurePermutationAndFieldOffsetAtDepth(DM dm,PetscInt depth,PetscInt field,IS * permutation,PetscInt * field_offset)536 static inline PetscErrorCode GetClosurePermutationAndFieldOffsetAtDepth(DM dm, PetscInt depth, PetscInt field, IS *permutation,
537                                                                         PetscInt *field_offset) {
538   PetscBool    is_field_continuous = PETSC_TRUE;
539   PetscInt     dim, num_fields, offset = 0, size = 0;
540   PetscSection section;
541 
542   PetscFunctionBeginUser;
543   *field_offset = 0;
544 
545   PetscCall(DMGetDimension(dm, &dim));
546   PetscCall(DMGetLocalSection(dm, &section));
547   PetscCall(PetscSectionGetNumFields(section, &num_fields));
548   // ---- Permutation size and offset to current field
549   for (PetscInt f = 0; f < num_fields; f++) {
550     PetscBool      is_continuous;
551     PetscInt       num_components, num_dof_1d, dual_space_size, field_size;
552     PetscObject    obj;
553     PetscFE        fe = NULL;
554     PetscDualSpace dual_space;
555 
556     PetscCall(PetscSectionGetFieldComponents(section, f, &num_components));
557     PetscCall(DMGetField(dm, f, NULL, &obj));
558     fe = (PetscFE)obj;
559     PetscCall(PetscFEGetDualSpace(fe, &dual_space));
560     PetscCall(PetscDualSpaceLagrangeGetContinuity(dual_space, &is_continuous));
561     if (f == field) is_field_continuous = is_continuous;
562     if (!is_continuous) continue;
563     PetscCall(PetscDualSpaceGetDimension(dual_space, &dual_space_size));
564     num_dof_1d = (PetscInt)PetscCeilReal(PetscPowReal(dual_space_size / num_components, 1.0 / dim));
565     field_size = PetscPowInt(num_dof_1d, depth) * num_components;
566     if (f < field) offset += field_size;
567     size += field_size;
568   }
569 
570   if (is_field_continuous) {
571     *field_offset = offset;
572     PetscCall(PetscSectionGetClosurePermutation(section, (PetscObject)dm, depth, size, permutation));
573   }
574   PetscFunctionReturn(PETSC_SUCCESS);
575 }
576 
577 /**
578   @brief Compute field tabulation from `PetscTabulation`.
579 
580   Not collective across MPI processes.
581 
582   @param[in]   dm          `DMPlex` holding mesh
583   @param[in]   field       Index of `DMPlex` field
584   @param[in]   face        Index of basis face, or 0
585   @param[in]   tabulation  PETSc basis tabulation
586   @param[out]  interp      Interpolation matrix in libCEED orientation
587   @param[out]  grad        Gradient matrix in libCEED orientation
588 
589   @note `interp` and `grad` are allocated using `PetscCalloc1` and must be freed by the user.
590 **/
ComputeFieldTabulationP2C(DM dm,PetscInt field,PetscInt face,PetscTabulation tabulation,CeedScalar ** interp,CeedScalar ** grad)591 static inline PetscErrorCode ComputeFieldTabulationP2C(DM dm, PetscInt field, PetscInt face, PetscTabulation tabulation, CeedScalar **interp,
592                                                        CeedScalar **grad) {
593   PetscBool       is_simplex, has_permutation = PETSC_FALSE;
594   PetscInt        field_offset = 0, num_comp, P, Q, dim;
595   const PetscInt *permutation_indices;
596   IS              permutation = NULL;
597 
598   PetscFunctionBeginUser;
599   num_comp = tabulation->Nc;
600   P        = tabulation->Nb / num_comp;
601   Q        = tabulation->Np;
602   dim      = tabulation->cdim;
603 
604   PetscCall(PetscCalloc1(P * Q, interp));
605   PetscCall(PetscCalloc1(P * Q * dim, grad));
606 
607   PetscCall(DMPlexIsSimplex(dm, &is_simplex));
608   if (!is_simplex) {
609     PetscCall(GetClosurePermutationAndFieldOffsetAtDepth(dm, dim, field, &permutation, &field_offset));
610     has_permutation = !!permutation;
611     if (has_permutation) PetscCall(ISGetIndices(permutation, &permutation_indices));
612   }
613 
614   const CeedInt c = 0;
615   for (CeedInt q = 0; q < Q; q++) {
616     for (CeedInt p_ceed = 0; p_ceed < P; p_ceed++) {
617       CeedInt p_petsc           = has_permutation ? (permutation_indices[field_offset + p_ceed * num_comp] - field_offset) : (p_ceed * num_comp);
618       (*interp)[q * P + p_ceed] = tabulation->T[0][((face * Q + q) * P * num_comp + p_petsc) * num_comp + c];
619       for (CeedInt d = 0; d < dim; d++) {
620         (*grad)[(d * Q + q) * P + p_ceed] = tabulation->T[1][(((face * Q + q) * P * num_comp + p_petsc) * num_comp + c) * dim + d];
621       }
622     }
623   }
624 
625   if (has_permutation) PetscCall(ISRestoreIndices(permutation, &permutation_indices));
626   PetscCall(ISDestroy(&permutation));
627   PetscFunctionReturn(PETSC_SUCCESS);
628 }
629 
630 /**
631   @brief Get quadrature data from `PetscQuadrature` for use with libCEED.
632 
633   Not collective across MPI processes.
634 
635   @param[in]   fe          `PetscFE` object
636   @param[in]   quadrature  PETSc basis quadrature
637   @param[out]  q_dim       Quadrature dimension, or NULL if not needed
638   @param[out]  num_comp    Number of components, or NULL if not needed
639   @param[out]  Q           Number of quadrature points, or NULL if not needed
640   @param[out]  q_points    Quadrature points in libCEED orientation, or NULL if not needed
641   @param[out]  q_weights   Quadrature weights in libCEED orientation, or NULL if not needed
642 
643   @note `q_weights` and `q_points` are allocated using `PetscCalloc1` and must be freed by the user.
644 **/
GetQuadratureDataP2C(PetscFE fe,PetscQuadrature quadrature,PetscInt * q_dim,PetscInt * num_comp,PetscInt * Q,CeedScalar ** q_points,CeedScalar ** q_weights)645 static inline PetscErrorCode GetQuadratureDataP2C(PetscFE fe, PetscQuadrature quadrature, PetscInt *q_dim, PetscInt *num_comp, PetscInt *Q,
646                                                   CeedScalar **q_points, CeedScalar **q_weights) {
647   PetscInt         spatial_dim, dim, num_comp_quadrature, num_q_points;
648   const PetscReal *q_points_petsc, *q_weights_petsc;
649 
650   PetscFunctionBeginUser;
651   PetscCall(PetscFEGetSpatialDimension(fe, &spatial_dim));
652   PetscCall(PetscQuadratureGetData(quadrature, &dim, &num_comp_quadrature, &num_q_points, &q_points_petsc, &q_weights_petsc));
653   if (q_dim) *q_dim = dim;
654   if (num_comp) *num_comp = num_comp_quadrature;
655   if (Q) *Q = num_q_points;
656   if (q_weights) {
657     PetscSizeT q_weights_size = num_q_points;
658 
659     PetscCall(PetscCalloc1(q_weights_size, q_weights));
660     for (CeedInt i = 0; i < num_q_points; i++) (*q_weights)[i] = q_weights_petsc[i];
661   }
662   if (q_points) {
663     PetscSizeT q_points_size = num_q_points * spatial_dim;
664 
665     PetscCall(PetscCalloc1(q_points_size, q_points));
666     for (CeedInt i = 0; i < num_q_points; i++) {
667       for (CeedInt d = 0; d < dim; d++) (*q_points)[i * spatial_dim + d] = q_points_petsc[i * dim + d];
668     }
669   }
670   PetscFunctionReturn(PETSC_SUCCESS);
671 }
672 
673 /**
674   @brief Create 1D tabulation from `PetscFE`.
675 
676   Collective across MPI processes.
677 
678   @note `q_weights` and `q_points` are allocated using `PetscCalloc1` and must be freed by the user.
679 
680   @param[in]   comm          `MPI_Comm` for creating `FE` object from options
681   @param[in]   fe            PETSc basis `FE` object
682   @param[out]  tabulation    PETSc basis tabulation
683   @param[out]  q_points_1d   Quadrature points in libCEED orientation
684   @param[out]  q_weights_1d  Quadrature weights in libCEED orientation
685 
686   @return An error code: 0 - success, otherwise - failure
687 **/
Create1DTabulation_Tensor(MPI_Comm comm,PetscFE fe,PetscTabulation * tabulation,PetscReal ** q_points_1d,CeedScalar ** q_weights_1d)688 static inline PetscErrorCode Create1DTabulation_Tensor(MPI_Comm comm, PetscFE fe, PetscTabulation *tabulation, PetscReal **q_points_1d,
689                                                        CeedScalar **q_weights_1d) {
690   PetscFE         fe_1d;
691   PetscInt        dim, num_comp, Q = -1, q_dim = -1, num_derivatives = 1;
692   PetscQuadrature quadrature;
693 
694   PetscFunctionBeginUser;
695   // Create 1D FE
696   PetscCall(PetscFEGetNumComponents(fe, &num_comp));
697   PetscCall(PetscFEGetSpatialDimension(fe, &dim));
698   {
699     const char     *prefix;
700     PetscBool       is_tensor;
701     PetscInt        num_comp, order, q_order;
702     PetscDualSpace  dual_space;
703     PetscQuadrature quadrature;
704 
705     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)fe, &prefix));
706     PetscCall(PetscFEGetDualSpace(fe, &dual_space));
707     PetscCall(PetscFEGetNumComponents(fe, &num_comp));
708     PetscCall(PetscDualSpaceLagrangeGetTensor(dual_space, &is_tensor));
709     PetscCall(PetscDualSpaceGetOrder(dual_space, &order));
710     PetscCall(PetscFEGetQuadrature(fe, &quadrature));
711     PetscCall(PetscQuadratureGetData(quadrature, NULL, NULL, &q_order, NULL, NULL));
712     PetscCall(PetscFECreateLagrangeFromOptions(comm, 1, num_comp, !is_tensor, order,
713                                                (PetscInt)PetscCeilReal(PetscPowReal(1.0 * q_order, 1.0 / dim)) - 1, prefix, &fe_1d));
714   }
715 
716   // Get quadrature data
717   PetscCall(PetscFEGetQuadrature(fe_1d, &quadrature));
718   PetscCall(GetQuadratureDataP2C(fe_1d, quadrature, &q_dim, NULL, &Q, q_points_1d, q_weights_1d));
719 
720   // Compute 1D tabulation
721   PetscCall(PetscFECreateTabulation(fe_1d, 1, Q, *q_points_1d, num_derivatives, tabulation));
722 
723   // Cleanup
724   PetscCall(PetscFEDestroy(&fe_1d));
725 
726   PetscFunctionReturn(PETSC_SUCCESS);
727 }
728 
729 /**
730   @brief Destroy `CeedBasis` in a `PetscContainer`.
731 
732   Not collective across MPI processes.
733 
734   @param[in,out]  ctx  `CeedBasis`
735 
736   @return An error code: 0 - success, otherwise - failure
737 **/
DMPlexCeedBasisDestroy(void ** ctx)738 static PetscErrorCode DMPlexCeedBasisDestroy(void **ctx) {
739   CeedBasis basis = *(CeedBasis *)ctx;
740 
741   PetscFunctionBegin;
742   PetscCallCeed(CeedBasisReturnCeed(basis), CeedBasisDestroy(&basis));
743   PetscFunctionReturn(PETSC_SUCCESS);
744 }
745 
746 /**
747   @brief Create `CeedBasis` from `DMPlex`.
748 
749  The `CeedBasis` is stored in the `DMPlex` object for reuse;
750  if the routine is called again with the same arguments, the same `CeedBasis` is returned.
751 
752   Collective across MPI processes.
753 
754   @param[in]   ceed          `Ceed` context
755   @param[in]   dm            `DMPlex` holding mesh
756   @param[in]   domain_label  `DMLabel` for `DMPlex` domain
757   @param[in]   label_value   Stratum value
758   @param[in]   height        Height of `DMPlex` topology
759   @param[in]   dm_field      Index of `DMPlex` field
760   @param[out]  basis         `CeedBasis` for `DMPlex`
761 
762   @return An error code: 0 - success, otherwise - failure
763   **/
DMPlexCeedBasisCreate(Ceed ceed,DM dm,DMLabel domain_label,PetscInt label_value,PetscInt height,PetscInt dm_field,CeedBasis * basis)764 PetscErrorCode DMPlexCeedBasisCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height, PetscInt dm_field,
765                                      CeedBasis *basis) {
766   MPI_Comm  comm = PetscObjectComm((PetscObject)dm);
767   char      container_name[1024];
768   CeedBasis container_basis;
769 
770   PetscFunctionBeginUser;
771   {
772     const char *label_name = NULL;
773 
774     if (domain_label) PetscCall(PetscObjectGetName((PetscObject)domain_label, &label_name));
775     PetscCall(PetscSNPrintf(container_name, sizeof(container_name),
776                             "DM CeedBasis - label: %s label value: %" PetscInt_FMT " height: %" PetscInt_FMT " DM field: %" PetscInt_FMT,
777                             label_name ? label_name : "NONE", label_value, height, dm_field));
778   }
779   PetscCall(PetscObjectContainerQuery((PetscObject)dm, container_name, (void **)&container_basis));
780 
781   if (container_basis) {
782     // Copy existing basis
783     *basis = NULL;
784     PetscCallCeed(ceed, CeedBasisReferenceCopy(container_basis, basis));
785   } else {
786     PetscDS         ds;
787     PetscFE         fe;
788     PetscDualSpace  dual_space;
789     PetscQuadrature quadrature;
790     PetscBool       is_tensor = PETSC_TRUE;
791     PetscInt        ds_field  = -1;
792 
793     // Get element information
794     PetscCall(DMGetRegionDS(dm, domain_label, NULL, &ds, NULL));
795     PetscCall(DMFieldToDSField(dm, domain_label, dm_field, &ds_field));
796     PetscCall(PetscDSGetDiscretization(ds, ds_field, (PetscObject *)&fe));
797     PetscCall(PetscFEGetHeightSubspace(fe, height, &fe));
798     PetscCall(PetscFEGetQuadrature(fe, &quadrature));
799     PetscCall(PetscFEGetDualSpace(fe, &dual_space));
800     PetscCall(PetscDualSpaceLagrangeGetTensor(dual_space, &is_tensor));
801 
802     // Build libCEED basis
803     PetscBool use_nontensor = PETSC_FALSE;
804 
805     PetscOptionsBegin(comm, NULL, "Tensor Basis as Nontensor Check", NULL);
806     PetscCall(PetscOptionsBool("-ceed_basis_as_nontensor", "", NULL, use_nontensor, &use_nontensor, NULL));
807     PetscOptionsEnd();
808 
809     if (!is_tensor || use_nontensor) {
810       PetscTabulation  basis_tabulation;
811       PetscInt         num_derivatives = 1, face = 0;
812       CeedScalar      *q_points, *q_weights, *interp, *grad;
813       CeedElemTopology elem_topo;
814 
815       {
816         DMPolytopeType cell_type;
817 
818         PetscCall(GetGlobalDMPlexPolytopeType(dm, domain_label, label_value, height, &cell_type));
819         elem_topo = PolytopeTypePetscToCeed(cell_type);
820         PetscCheck(elem_topo, comm, PETSC_ERR_SUP, "DMPlex topology not supported: %s", DMPolytopeTypes[cell_type]);
821       }
822 
823       // Compute basis tabulation
824       PetscCall(GetQuadratureDataP2C(fe, quadrature, NULL, NULL, NULL, &q_points, &q_weights));
825       PetscCall(PetscFEGetCellTabulation(fe, num_derivatives, &basis_tabulation));
826       PetscCall(ComputeFieldTabulationP2C(dm, dm_field, face, basis_tabulation, &interp, &grad));
827       {
828         PetscInt num_comp = basis_tabulation->Nc, P = basis_tabulation->Nb / num_comp, Q = basis_tabulation->Np;
829 
830         PetscCallCeed(ceed, CeedBasisCreateH1(ceed, elem_topo, num_comp, P, Q, interp, grad, q_points, q_weights, basis));
831       }
832 
833       PetscCall(PetscFree(q_points));
834       PetscCall(PetscFree(q_weights));
835       PetscCall(PetscFree(interp));
836       PetscCall(PetscFree(grad));
837     } else {
838       PetscInt        P_1d, Q_1d, num_comp, dim;
839       PetscTabulation basis_tabulation;
840       CeedScalar     *q_points_1d, *q_weights_1d, *interp_1d, *grad_1d;
841 
842       PetscCall(PetscFEGetSpatialDimension(fe, &dim));
843       PetscCall(Create1DTabulation_Tensor(comm, fe, &basis_tabulation, &q_points_1d, &q_weights_1d));
844       num_comp = basis_tabulation->Nc;
845       P_1d     = basis_tabulation->Nb / num_comp;
846       Q_1d     = basis_tabulation->Np;
847       PetscCall(ComputeFieldTabulationP2C(dm, dm_field, 0, basis_tabulation, &interp_1d, &grad_1d));
848       PetscCallCeed(ceed, CeedBasisCreateTensorH1(ceed, dim, num_comp, P_1d, Q_1d, interp_1d, grad_1d, q_points_1d, q_weights_1d, basis));
849 
850       // Cleanup
851       PetscCall(PetscFree(q_points_1d));
852       PetscCall(PetscFree(q_weights_1d));
853       PetscCall(PetscFree(interp_1d));
854       PetscCall(PetscFree(grad_1d));
855       PetscCall(PetscTabulationDestroy(&basis_tabulation));
856     }
857     // Set in container
858     PetscCallCeed(ceed, CeedBasisReferenceCopy(*basis, &container_basis));
859     PetscCall(PetscObjectContainerCompose((PetscObject)dm, container_name, container_basis, (PetscCtxDestroyFn *)DMPlexCeedBasisDestroy));
860   }
861   PetscFunctionReturn(PETSC_SUCCESS);
862 }
863 
864 /**
865   @brief Create `CeedBasis` for coordinate space of `DMPlex`.
866 
867  The `CeedBasis` is stored in the coordinate `DMPlex` object for reuse;
868  if the routine is called again with the same arguments, the same `CeedBasis` is returned.
869 
870   Collective across MPI processes.
871 
872   @param[in]   ceed          `Ceed` context
873   @param[in]   dm            `DMPlex` holding mesh
874   @param[in]   domain_label  `DMLabel` for `DMPlex` domain
875   @param[in]   label_value   Stratum value
876   @param[in]   height        Height of `DMPlex` topology
877   @param[out]  basis         `CeedBasis` for coordinate space of `DMPlex`
878 **/
DMPlexCeedBasisCoordinateCreate(Ceed ceed,DM dm,DMLabel domain_label,PetscInt label_value,PetscInt height,CeedBasis * basis)879 PetscErrorCode DMPlexCeedBasisCoordinateCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height, CeedBasis *basis) {
880   DM dm_coord = NULL;
881 
882   PetscFunctionBeginUser;
883   PetscCall(DMGetCoordinateDM(dm, &dm_coord));
884   PetscCall(DMPlexCeedBasisCreate(ceed, dm_coord, domain_label, label_value, height, 0, basis));
885   PetscFunctionReturn(PETSC_SUCCESS);
886 }
887 
888 typedef struct CeedVectorAndState_ *CeedVectorAndState;
889 struct CeedVectorAndState_ {
890   CeedVector       vector;  // !< `CeedVector` containing copy of data from a `Vec`
891   PetscObjectState state;   // !< Object state of the `Vec` when the data was copied
892 };
893 
894 /**
895   @brief Destroy `CeedVectorAndState` in a `PetscContainer`.
896 
897   Not collective across MPI processes.
898 
899   @param[in,out]  ctx  `CeedVectorAndState`
900 
901   @return An error code: 0 - success, otherwise - failure
902 **/
CeedVectorAndStateDestroy(void ** ctx)903 static PetscErrorCode CeedVectorAndStateDestroy(void **ctx) {
904   CeedVectorAndState vec_and_state = *(CeedVectorAndState *)ctx;
905 
906   PetscFunctionBegin;
907   PetscCallCeed(CeedVectorReturnCeed(vec_and_state->vector), CeedVectorDestroy(&vec_and_state->vector));
908   PetscCall(PetscFree(vec_and_state));
909   *ctx = NULL;
910   PetscFunctionReturn(PETSC_SUCCESS);
911 }
912 
913 /**
914   @brief Get Ceed objects for coordinate field of DM
915 
916   @param[in]  ceed         `Ceed` context
917   @param[in]  dm           `DMPlex` holding mesh
918   @param[in]  domain_label `DMLabel` for `DMPlex` domain
919   @param[in]  label_value  Stratum value
920   @param[in]  height       Height of `DMPlex` topology
921   @param[out] elem_restr   `CeedElemRestriction` for coodinates of `DMPlex`, or `NULL`
922   @param[out] basis        `CeedBasis` for coordinate space of `DMPlex`, or `NULL`
923   @param[out] vector       `CeedVector` for coordinates of `DMPlex`, or `NULL`
924 **/
DMPlexCeedCoordinateCreateField(Ceed ceed,DM dm,DMLabel domain_label,PetscInt label_value,PetscInt height,CeedElemRestriction * elem_restr,CeedBasis * basis,CeedVector * vector)925 PetscErrorCode DMPlexCeedCoordinateCreateField(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height,
926                                                CeedElemRestriction *elem_restr, CeedBasis *basis, CeedVector *vector) {
927   CeedElemRestriction elem_restr_ = NULL;
928 
929   PetscFunctionBeginUser;
930   PetscCall(DMPlexCeedElemRestrictionCoordinateCreate(ceed, dm, domain_label, label_value, height, &elem_restr_));
931   if (elem_restr) {
932     *elem_restr = NULL;
933     PetscCallCeed(ceed, CeedElemRestrictionReferenceCopy(elem_restr_, elem_restr));
934   }
935   if (basis) PetscCall(DMPlexCeedBasisCoordinateCreate(ceed, dm, domain_label, label_value, height, basis));
936   if (vector) {
937     DM                 cdm;
938     char               container_name[] = "DM Coordinate CeedVectorAndState";
939     CeedVectorAndState vec_and_state;
940     Vec                X_loc;
941     PetscObjectState   X_loc_state;
942 
943     PetscCall(DMGetCellCoordinateDM(dm, &cdm));
944     if (cdm) {
945       PetscCall(DMGetCellCoordinatesLocal(dm, &X_loc));
946     } else {
947       PetscCall(DMGetCoordinatesLocal(dm, &X_loc));
948     }
949     PetscCall(VecGetState(X_loc, &X_loc_state));
950     PetscCall(PetscObjectContainerQuery((PetscObject)X_loc, container_name, (void **)&vec_and_state));
951 
952     if (vec_and_state && vec_and_state->state == X_loc_state) {
953       // Copy existing vector
954       *vector = NULL;
955       PetscCallCeed(ceed, CeedVectorReferenceCopy(vec_and_state->vector, vector));
956     } else {
957       PetscCall(CeedElemRestrictionCreateVector(elem_restr_, vector, NULL));
958       PetscCall(VecCopyPetscToCeed(X_loc, *vector));
959 
960       // Set in container
961       PetscCall(PetscNew(&vec_and_state));
962       PetscCallCeed(ceed, CeedVectorReferenceCopy(*vector, &vec_and_state->vector));
963       vec_and_state->state = X_loc_state;
964       PetscCall(PetscObjectContainerCompose((PetscObject)X_loc, container_name, vec_and_state, (PetscCtxDestroyFn *)CeedVectorAndStateDestroy));
965     }
966   }
967   PetscCallCeed(ceed, CeedElemRestrictionDestroy(&elem_restr_));
968   PetscFunctionReturn(PETSC_SUCCESS);
969 }
970 
971 /**
972   @brief Create `CeedBasis` for cell to face quadrature space evaluation from `DMPlex`.
973 
974   Collective across MPI processes.
975 
976   @param[in]   ceed          `Ceed` context
977   @param[in]   dm            `DMPlex` holding mesh
978   @param[in]   domain_label  `DMLabel` for `DMPlex` domain
979   @param[in]   label_value   Stratum value
980   @param[in]   face          Index of face
981   @param[in]   dm_field      Index of `DMPlex` field
982   @param[out]  basis         `CeedBasis` for cell to face evaluation
983 **/
DMPlexCeedBasisCellToFaceCreate(Ceed ceed,DM dm,DMLabel domain_label,PetscInt label_value,PetscInt face,PetscInt dm_field,CeedBasis * basis)984 PetscErrorCode DMPlexCeedBasisCellToFaceCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt face, PetscInt dm_field,
985                                                CeedBasis *basis) {
986   PetscInt        ds_field = -1, height = 0;
987   PetscDS         ds;
988   PetscFE         fe;
989   PetscQuadrature face_quadrature;
990 
991   PetscFunctionBeginUser;
992   // Get element information
993   PetscCall(DMGetRegionDS(dm, domain_label, NULL, &ds, NULL));
994   PetscCall(DMFieldToDSField(dm, domain_label, dm_field, &ds_field));
995   PetscCall(PetscDSGetDiscretization(ds, ds_field, (PetscObject *)&fe));
996   PetscCall(PetscFEGetFaceQuadrature(fe, &face_quadrature));
997 
998   {  // Build libCEED basis
999     PetscInt         num_derivatives = 1, num_comp, P, Q = -1;
1000     PetscTabulation  basis_tabulation;
1001     CeedScalar      *q_points, *q_weights, *interp, *grad;
1002     CeedElemTopology elem_topo;
1003 
1004     {  // Verify compatible element topology
1005       DMPolytopeType cell_type;
1006 
1007       PetscCall(GetGlobalDMPlexPolytopeType(dm, domain_label, label_value, height, &cell_type));
1008       elem_topo = PolytopeTypePetscToCeed(cell_type);
1009       PetscCheck(elem_topo, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "DMPlex topology not supported: %s", DMPolytopeTypes[cell_type]);
1010     }
1011 
1012     // Compute basis tabulation
1013     PetscCall(GetQuadratureDataP2C(fe, face_quadrature, NULL, NULL, &Q, &q_points, &q_weights));
1014     PetscCall(PetscFEGetFaceTabulation(fe, num_derivatives, &basis_tabulation));
1015     num_comp = basis_tabulation->Nc;
1016     P        = basis_tabulation->Nb / num_comp;
1017     PetscCall(ComputeFieldTabulationP2C(dm, dm_field, face, basis_tabulation, &interp, &grad));
1018     PetscCallCeed(ceed, CeedBasisCreateH1(ceed, elem_topo, num_comp, P, Q, interp, grad, q_points, q_weights, basis));
1019 
1020     PetscCall(PetscFree(q_points));
1021     PetscCall(PetscFree(q_weights));
1022     PetscCall(PetscFree(interp));
1023     PetscCall(PetscFree(grad));
1024   }
1025   PetscFunctionReturn(PETSC_SUCCESS);
1026 }
1027 
1028 /**
1029   @brief Create `CeedBasis` for cell to face quadrature space evaluation on coordinate space of `DMPlex`.
1030 
1031   Collective across MPI processes.
1032 
1033   @param[in]   ceed          `Ceed` context
1034   @param[in]   dm            `DMPlex` holding mesh
1035   @param[in]   domain_label  `DMLabel` for `DMPlex` domain
1036   @param[in]   label_value   Stratum value
1037   @param[in]   face          Index of face
1038   @param[out]  basis         `CeedBasis` for cell to face evaluation on coordinate space of `DMPlex`
1039 **/
DMPlexCeedBasisCellToFaceCoordinateCreate(Ceed ceed,DM dm,DMLabel domain_label,PetscInt label_value,PetscInt face,CeedBasis * basis)1040 PetscErrorCode DMPlexCeedBasisCellToFaceCoordinateCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt face,
1041                                                          CeedBasis *basis) {
1042   DM dm_coord;
1043 
1044   PetscFunctionBeginUser;
1045   PetscCall(DMGetCellCoordinateDM(dm, &dm_coord));
1046   if (!dm_coord) {
1047     PetscCall(DMGetCoordinateDM(dm, &dm_coord));
1048   }
1049   PetscCall(DMPlexCeedBasisCellToFaceCreate(ceed, dm_coord, domain_label, label_value, face, 0, basis));
1050   PetscFunctionReturn(PETSC_SUCCESS);
1051 }
1052 
1053 /**
1054   @brief Setup `DM` with FE space of appropriate degree
1055 
1056   Must be followed by `DMSetupByOrderEnd_FEM`
1057 
1058   @param[in]   setup_faces     Flag to setup face geometry
1059   @param[in]   setup_coords    Flag to setup coordinate spaces
1060   @param[in]   degree          Polynomial orders of field
1061   @param[in]   coord_order     Polynomial order of coordinate basis, or `PETSC_DECIDE` for default
1062   @param[in]   q_extra         Additional quadrature order
1063   @param[in]   num_fields      Number of fields in solution vector
1064   @param[in]   field_sizes     Array of number of components for each field
1065   @param[out]  dm              `DM` to setup
1066 
1067   @return An error code: 0 - success, otherwise - failure
1068 **/
DMSetupByOrderBegin_FEM(PetscBool setup_faces,PetscBool setup_coords,PetscInt degree,PetscInt coord_order,PetscInt q_extra,PetscInt num_fields,const PetscInt * field_sizes,DM dm)1069 PetscErrorCode DMSetupByOrderBegin_FEM(PetscBool setup_faces, PetscBool setup_coords, PetscInt degree, PetscInt coord_order, PetscInt q_extra,
1070                                        PetscInt num_fields, const PetscInt *field_sizes, DM dm) {
1071   PetscInt  dim, q_order = degree + q_extra;
1072   PetscBool is_simplex = PETSC_TRUE;
1073   PetscFE   fe;
1074   MPI_Comm  comm = PetscObjectComm((PetscObject)dm);
1075 
1076   PetscFunctionBeginUser;
1077   PetscCall(DMPlexIsSimplex(dm, &is_simplex));
1078 
1079   // Setup DM
1080   PetscCall(DMGetDimension(dm, &dim));
1081   for (PetscInt i = 0; i < num_fields; i++) {
1082     PetscFE  fe_face;
1083     PetscInt q_order = degree + q_extra;
1084 
1085     PetscCall(PetscFECreateLagrange(comm, dim, field_sizes[i], is_simplex, degree, q_order, &fe));
1086     if (setup_faces) PetscCall(PetscFEGetHeightSubspace(fe, 1, &fe_face));
1087     PetscCall(DMAddField(dm, NULL, (PetscObject)fe));
1088     PetscCall(PetscFEDestroy(&fe));
1089   }
1090   PetscCall(DMCreateDS(dm));
1091 
1092   // Project coordinates to enrich quadrature space
1093   if (setup_coords) {
1094     DM             dm_coord;
1095     PetscDS        ds_coord;
1096     PetscFE        fe_coord_current, fe_coord_new, fe_coord_face_new;
1097     PetscDualSpace fe_coord_dual_space;
1098     PetscInt       fe_coord_order, num_comp_coord;
1099 
1100     PetscCall(DMGetCoordinateDM(dm, &dm_coord));
1101     PetscCall(DMGetCoordinateDim(dm, &num_comp_coord));
1102     PetscCall(DMGetRegionDS(dm_coord, NULL, NULL, &ds_coord, NULL));
1103     PetscCall(PetscDSGetDiscretization(ds_coord, 0, (PetscObject *)&fe_coord_current));
1104     PetscCall(PetscFEGetDualSpace(fe_coord_current, &fe_coord_dual_space));
1105     PetscCall(PetscDualSpaceGetOrder(fe_coord_dual_space, &fe_coord_order));
1106 
1107     // Create FE for coordinates
1108     if (coord_order != PETSC_DECIDE) fe_coord_order = coord_order;
1109     PetscCall(PetscFECreateLagrange(comm, dim, num_comp_coord, is_simplex, fe_coord_order, q_order, &fe_coord_new));
1110     if (setup_faces) PetscCall(PetscFEGetHeightSubspace(fe_coord_new, 1, &fe_coord_face_new));
1111     PetscCall(DMSetCoordinateDisc(dm, fe_coord_new, PETSC_FALSE, PETSC_TRUE));
1112     PetscCall(PetscFEDestroy(&fe_coord_new));
1113   }
1114   PetscFunctionReturn(PETSC_SUCCESS);
1115 }
1116 
1117 /**
1118   @brief Finish setting up `DM`
1119 
1120   Must be called after `DMSetupByOrderBegin_FEM` and all strong BCs have be declared via `DMAddBoundaries`.
1121 
1122   @param[in]   setup_coords    Flag to setup coordinate spaces
1123   @param[out]  dm              `DM` to setup
1124 
1125   @return An error code: 0 - success, otherwise - failure
1126 **/
DMSetupByOrderEnd_FEM(PetscBool setup_coords,DM dm)1127 PetscErrorCode DMSetupByOrderEnd_FEM(PetscBool setup_coords, DM dm) {
1128   PetscBool is_simplex;
1129 
1130   PetscFunctionBeginUser;
1131   PetscCall(DMPlexIsSimplex(dm, &is_simplex));
1132   // Set tensor permutation if needed
1133   if (!is_simplex) {
1134     PetscCall(DMPlexSetClosurePermutationTensor(dm, PETSC_DETERMINE, NULL));
1135     if (setup_coords) {
1136       DM dm_coord;
1137 
1138       PetscCall(DMGetCoordinateDM(dm, &dm_coord));
1139       PetscCall(DMPlexSetClosurePermutationTensor(dm_coord, PETSC_DETERMINE, NULL));
1140     }
1141   }
1142   PetscCall(DMLocalizeCoordinates(dm));  // Must localize after tensor closure setting
1143   PetscFunctionReturn(PETSC_SUCCESS);
1144 }
1145 
1146 /**
1147   @brief Setup `DM` with FE space of appropriate degree with no boundary conditions
1148 
1149   Calls `DMSetupByOrderBegin_FEM` and `DMSetupByOrderEnd_FEM` successively
1150 
1151   @param[in]   setup_faces     Flag to setup face geometry
1152   @param[in]   setup_coords    Flag to setup coordinate spaces
1153   @param[in]   degree          Polynomial orders of field
1154   @param[in]   coord_order     Polynomial order of coordinate basis, or `PETSC_DECIDE` for default
1155   @param[in]   q_extra         Additional quadrature order
1156   @param[in]   num_fields      Number of fields in solution vector
1157   @param[in]   field_sizes     Array of number of components for each field
1158   @param[out]  dm              `DM` to setup
1159 
1160   @return An error code: 0 - success, otherwise - failure
1161 **/
DMSetupByOrder_FEM(PetscBool setup_faces,PetscBool setup_coords,PetscInt degree,PetscInt coord_order,PetscInt q_extra,PetscInt num_fields,const PetscInt * field_sizes,DM dm)1162 PetscErrorCode DMSetupByOrder_FEM(PetscBool setup_faces, PetscBool setup_coords, PetscInt degree, PetscInt coord_order, PetscInt q_extra,
1163                                   PetscInt num_fields, const PetscInt *field_sizes, DM dm) {
1164   PetscFunctionBeginUser;
1165   PetscCall(DMSetupByOrderBegin_FEM(setup_faces, setup_coords, degree, coord_order, q_extra, num_fields, field_sizes, dm));
1166   PetscCall(DMSetupByOrderEnd_FEM(setup_coords, dm));
1167   PetscFunctionReturn(PETSC_SUCCESS);
1168 }
1169 
1170 /**
1171   @brief Get the points in a label stratum which have requested height
1172 
1173   Example, from the "Face Sets" label (which contains face, edge, and vertex points), return points that correspond to face points (height = 1) and have a label value of 3:
1174   ```
1175   PetscCall(DMGetStratumISAtHeight(dm, "Face Sets", 3, 1, &is_face_points));
1176   ```
1177 
1178   @param[in]  dm     DM which has the label
1179   @param[in]  name   Name fo the label
1180   @param[in]  value  Label value of points to return
1181   @param[in]  height Height of points to return
1182   @param[out] points `IS` of points, or `NULL` is there are no points in that stratum at height
1183 **/
DMGetStratumISAtHeight(DM dm,const char name[],PetscInt value,PetscInt height,IS * points)1184 static PetscErrorCode DMGetStratumISAtHeight(DM dm, const char name[], PetscInt value, PetscInt height, IS *points) {
1185   PetscInt depth;
1186   DMLabel  depth_label;
1187   IS       label_is, depth_is;
1188 
1189   PetscFunctionBeginUser;
1190   PetscCall(DMPlexGetDepth(dm, &depth));
1191   PetscCall(DMPlexGetDepthLabel(dm, &depth_label));
1192   PetscCall(DMLabelGetStratumIS(depth_label, depth - height, &depth_is));
1193   PetscCall(DMGetStratumIS(dm, name, value, &label_is));
1194   if (label_is == NULL || depth_is == NULL) *points = NULL;
1195   else PetscCall(ISIntersect(depth_is, label_is, points));
1196   PetscCall(ISDestroy(&depth_is));
1197   PetscCall(ISDestroy(&label_is));
1198   PetscFunctionReturn(PETSC_SUCCESS);
1199 }
1200 
1201 /**
1202   @brief Create a label with separate values for the `FE` orientations for all cells with this material for the `DMPlex` face.
1203 
1204   Collective across MPI processes.
1205 
1206   @param[in,out]  dm                `DMPlex` holding mesh
1207   @param[in]      dm_face           Face number on `DMPlex`
1208   @param[out]     face_label_name   Label name for newly created label, or `NULL` if no elements match the `material` and `dm_face`
1209 **/
DMPlexCreateFaceLabel(DM dm,PetscInt dm_face,char ** face_label_name)1210 PetscErrorCode DMPlexCreateFaceLabel(DM dm, PetscInt dm_face, char **face_label_name) {
1211   PetscInt        num_face_points = 0, face_height = 1;
1212   const PetscInt *face_points;
1213   IS              is_face_points;
1214   DMLabel         face_label = NULL;
1215   MPI_Comm        comm       = PetscObjectComm((PetscObject)dm);
1216 
1217   PetscFunctionBeginUser;
1218   {  // Create label name and label
1219     PetscSizeT label_name_len = 64;
1220     PetscBool  has_label_already;
1221 
1222     // Face label
1223     PetscCall(PetscCalloc1(label_name_len, face_label_name));
1224     PetscCall(PetscSNPrintf(*face_label_name, label_name_len, "Orientation for DM face %" PetscInt_FMT, dm_face));
1225     PetscCall(DMHasLabel(dm, *face_label_name, &has_label_already));
1226     if (has_label_already) PetscFunctionReturn(PETSC_SUCCESS);
1227 
1228     PetscCall(DMCreateLabel(dm, *face_label_name));
1229     PetscCall(DMGetLabel(dm, *face_label_name, &face_label));
1230   }
1231 
1232   PetscCall(DMGetStratumISAtHeight(dm, "Face Sets", dm_face, face_height, &is_face_points));
1233   if (is_face_points) {
1234     PetscCall(ISGetLocalSize(is_face_points, &num_face_points));
1235     PetscCall(ISGetIndices(is_face_points, &face_points));
1236   }
1237 
1238   for (PetscInt face_index = 0; face_index < num_face_points; face_index++) {
1239     const PetscInt *face_support, *cell_cone;
1240     PetscInt        face_support_size = 0, cell_cone_size = 0, fe_face_on_cell = -1;
1241     PetscInt        face_point = face_points[face_index];
1242 
1243     // Get cell supporting face
1244     PetscCall(DMPlexGetSupport(dm, face_point, &face_support));
1245     PetscCall(DMPlexGetSupportSize(dm, face_point, &face_support_size));
1246     PetscCheck(face_support_size == 1, comm, PETSC_ERR_SUP, "Expected one cell in support of exterior face, but got %" PetscInt_FMT " cells",
1247                face_support_size);
1248     PetscInt cell_point = face_support[0];
1249 
1250     // Find face number
1251     PetscCall(DMPlexGetCone(dm, cell_point, &cell_cone));
1252     PetscCall(DMPlexGetConeSize(dm, cell_point, &cell_cone_size));
1253     for (PetscInt i = 0; i < cell_cone_size; i++) {
1254       if (cell_cone[i] == face_point) fe_face_on_cell = i;
1255     }
1256     PetscCheck(fe_face_on_cell != -1, comm, PETSC_ERR_SUP, "Could not find face %" PetscInt_FMT " in cone of its support", face_point);
1257 
1258     // Add to label
1259     PetscCall(DMLabelSetValue(face_label, face_point, fe_face_on_cell));
1260   }
1261   PetscCall(DMPlexLabelAddFaceCells(dm, face_label));
1262   PetscCall(ISDestroy(&is_face_points));
1263   PetscFunctionReturn(PETSC_SUCCESS);
1264 }
1265 
1266 /**
1267    @brief Creates an array of all the values set for a `DMLabel`
1268 
1269    Because `DMLabelGetValueIS` returns label values locally, not globally.
1270 
1271    Collective across MPI processes.
1272 
1273    @param[in]  dm          `DM` with the label
1274    @param[in]  label       `DMLabel` to get values of
1275    @param[out] num_values  Total number of values
1276    @param[out] value_array Array of label values, must be freed by user
1277 **/
DMLabelCreateGlobalValueArray(DM dm,DMLabel label,PetscInt * num_values,PetscInt ** value_array)1278 PetscErrorCode DMLabelCreateGlobalValueArray(DM dm, DMLabel label, PetscInt *num_values, PetscInt **value_array) {
1279   PetscInt       num_values_local, minmax_values[2], minmax_values_loc[2];
1280   PetscInt      *values_local = NULL;
1281   MPI_Comm       comm         = PetscObjectComm((PetscObject)dm);
1282   PetscSegBuffer seg;
1283   PetscCount     seg_size;
1284 
1285   PetscFunctionBeginUser;
1286   {  // Extract array of local DMLabel values so it can be sorted
1287     IS              is_values;
1288     const PetscInt *is_values_local = NULL;
1289 
1290     PetscCall(DMLabelGetValueIS(label, &is_values));
1291     PetscCall(ISGetLocalSize(is_values, &num_values_local));
1292 
1293     PetscCall(ISGetIndices(is_values, &is_values_local));
1294     PetscCall(PetscMalloc1(num_values_local, &values_local));
1295     PetscCall(PetscArraycpy(values_local, is_values_local, num_values_local));
1296     PetscCall(ISRestoreIndices(is_values, &is_values_local));
1297     PetscCall(ISDestroy(&is_values));
1298   }
1299 
1300   if (num_values_local) {
1301     PetscCall(PetscSortInt(num_values_local, values_local));
1302     minmax_values_loc[0] = values_local[0];
1303     minmax_values_loc[1] = values_local[num_values_local - 1];
1304   } else {
1305     // Rank has no set label values, therefore set local min/max to have no effect on global min/max
1306     minmax_values_loc[0] = PETSC_INT_MAX;
1307     minmax_values_loc[1] = PETSC_INT_MIN;
1308   }
1309   PetscCall(PetscGlobalMinMaxInt(comm, minmax_values_loc, minmax_values));
1310 
1311   PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 16, &seg));
1312   for (PetscInt value = minmax_values[0]; value <= minmax_values[1]; value++) {
1313     PetscInt location_local = -1, location_global = -1;
1314 
1315     PetscCall(PetscFindInt(value, num_values_local, values_local, &location_local));
1316     PetscCallMPI(MPIU_Allreduce(&location_local, &location_global, 1, MPIU_INT, MPI_MAX, comm));
1317     if (location_global < 0) continue;
1318     else {
1319       PetscInt *seg_slot;
1320 
1321       PetscCall(PetscSegBufferGetInts(seg, 1, &seg_slot));
1322       *seg_slot = value;
1323     }
1324   }
1325   PetscCall(PetscSegBufferGetSize(seg, &seg_size));
1326   *num_values = (PetscInt)seg_size;
1327   PetscCall(PetscSegBufferExtractAlloc(seg, value_array));
1328   PetscCall(PetscSegBufferDestroy(&seg));
1329   if (values_local) PetscCall(PetscFree(values_local));
1330   PetscFunctionReturn(PETSC_SUCCESS);
1331 }
1332 
1333 /**
1334   @brief Get the number of components for `dm`'s coordinate field
1335 
1336   @param[in]  dm       DM
1337   @param[out] num_comp Number of components for the coordinate field
1338 **/
DMGetCoordinateNumComps(DM dm,PetscInt * num_comp)1339 PetscErrorCode DMGetCoordinateNumComps(DM dm, PetscInt *num_comp) {
1340   DM           dm_coord;
1341   PetscSection section_coord;
1342   PetscInt     field = 0;  // Default field has the coordinates
1343 
1344   PetscFunctionBeginUser;
1345   PetscCall(DMGetCoordinateDM(dm, &dm_coord));
1346   PetscCall(DMGetLocalSection(dm_coord, &section_coord));
1347   PetscCall(PetscSectionGetFieldComponents(section_coord, field, num_comp));
1348   PetscFunctionReturn(PETSC_SUCCESS);
1349 }
1350 
1351 /**
1352   @brief Get the number of components for `dm`'s field
1353 
1354   @param[in]  dm       DM
1355   @param[in]  field    The field ID to get the number of components for
1356   @param[out] num_comp Number of components for the coordinate field
1357 **/
DMGetFieldNumComps(DM dm,PetscInt field,PetscInt * num_comp)1358 PetscErrorCode DMGetFieldNumComps(DM dm, PetscInt field, PetscInt *num_comp) {
1359   PetscSection section;
1360 
1361   PetscFunctionBeginUser;
1362   PetscCall(DMGetLocalSection(dm, &section));
1363   PetscCall(PetscSectionGetFieldComponents(section, field, num_comp));
1364   PetscFunctionReturn(PETSC_SUCCESS);
1365 }
1366