xref: /honee/include/petsc-ceed-utils.h (revision 965d9f743f86785935177ec2ff1624a7b09e04f8)
1 // Copyright (c) 2017-2024, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 #pragma once
8 
9 #include <ceed.h>
10 #include <petscdm.h>
11 
12 /**
13   @brief Copy the reference to a `Vec`.
14          Note: If `vec_copy` is non-null, it is assumed to be a valid pointer to a `Vec` and `VecDestroy()` will be called.
15 
16   Collective across MPI processes.
17 
18   @param[in]   vec       `Vec` to reference
19   @param[out]  vec_copy  Copy of reference
20 
21   @return An error code: 0 - success, otherwise - failure
22 **/
23 static inline PetscErrorCode VecReferenceCopy(Vec vec, Vec *vec_copy) {
24   PetscFunctionBeginUser;
25   PetscCall(PetscObjectReference((PetscObject)vec));
26   PetscCall(VecDestroy(vec_copy));
27   *vec_copy = vec;
28   PetscFunctionReturn(PETSC_SUCCESS);
29 }
30 
31 /**
32   @brief Copy the reference to a `DM`.
33          Note: If `dm_copy` is non-null, it is assumed to be a valid pointer to a `DM` and `DMDestroy()` will be called.
34 
35   Collective across MPI processes.
36 
37   @param[in]   dm       `DM` to reference
38   @param[out]  dm_copy  Copy of reference
39 
40   @return An error code: 0 - success, otherwise - failure
41 **/
42 static inline PetscErrorCode DMReferenceCopy(DM dm, DM *dm_copy) {
43   PetscFunctionBeginUser;
44   PetscCall(PetscObjectReference((PetscObject)dm));
45   PetscCall(DMDestroy(dm_copy));
46   *dm_copy = dm;
47   PetscFunctionReturn(PETSC_SUCCESS);
48 }
49 
50 /**
51   @brief Translate PetscMemType to CeedMemType
52 
53   @param[in]  mem_type  PetscMemType
54 
55   @return Equivalent CeedMemType
56 **/
57 static inline CeedMemType MemTypePetscToCeed(PetscMemType mem_type) { return PetscMemTypeDevice(mem_type) ? CEED_MEM_DEVICE : CEED_MEM_HOST; }
58 
59 /**
60   @brief Translate array of `PetscInt` to `CeedInt`.
61     If the types differ, `array_petsc` is freed with `PetscFree()` and `array_ceed` is allocated with `PetscMalloc1()`.
62     Caller is responsible for freeing `array_ceed` with `PetscFree()`.
63 
64   Not collective across MPI processes.
65 
66   @param[in]      num_entries  Number of array entries
67   @param[in,out]  array_petsc  Array of `PetscInt`
68   @param[out]     array_ceed   Array of `CeedInt`
69 
70   @return An error code: 0 - success, otherwise - failure
71 **/
72 static inline PetscErrorCode IntArrayCeedToPetsc(PetscInt num_entries, CeedInt **array_ceed, PetscInt **array_petsc) {
73   const CeedInt  int_c = 0;
74   const PetscInt int_p = 0;
75 
76   PetscFunctionBeginUser;
77   if (sizeof(int_c) == sizeof(int_p)) {
78     *array_petsc = (PetscInt *)*array_ceed;
79   } else {
80     *array_petsc = malloc(num_entries * sizeof(PetscInt));
81     for (PetscInt i = 0; i < num_entries; i++) (*array_petsc)[i] = (*array_ceed)[i];
82     free(*array_ceed);
83   }
84   *array_ceed = NULL;
85   PetscFunctionReturn(PETSC_SUCCESS);
86 }
87 
88 /**
89   @brief Translate array of `PetscInt` to `CeedInt`.
90     If the types differ, `array_petsc` is freed with `PetscFree()` and `array_ceed` is allocated with `PetscMalloc1()`.
91     Caller is responsible for freeing `array_ceed` with `PetscFree()`.
92 
93   Not collective across MPI processes.
94 
95   @param[in]      num_entries  Number of array entries
96   @param[in,out]  array_petsc  Array of `PetscInt`
97   @param[out]     array_ceed   Array of `CeedInt`
98 
99   @return An error code: 0 - success, otherwise - failure
100 **/
101 static inline PetscErrorCode IntArrayPetscToCeed(PetscInt num_entries, PetscInt **array_petsc, CeedInt **array_ceed) {
102   const CeedInt  int_c = 0;
103   const PetscInt int_p = 0;
104 
105   PetscFunctionBeginUser;
106   if (sizeof(int_c) == sizeof(int_p)) {
107     *array_ceed = (CeedInt *)*array_petsc;
108   } else {
109     PetscCall(PetscMalloc1(num_entries, array_ceed));
110     for (PetscInt i = 0; i < num_entries; i++) (*array_ceed)[i] = (*array_petsc)[i];
111     PetscCall(PetscFree(*array_petsc));
112   }
113   *array_petsc = NULL;
114   PetscFunctionReturn(PETSC_SUCCESS);
115 }
116 
117 /**
118   @brief Transfer array from PETSc `Vec` to `CeedVector`.
119 
120   Collective across MPI processes.
121 
122   @param[in]   X_petsc   PETSc `Vec`
123   @param[out]  mem_type  PETSc `MemType`
124   @param[out]  x_ceed    `CeedVector`
125 
126   @return An error code: 0 - success, otherwise - failure
127 **/
128 static inline PetscErrorCode VecPetscToCeed(Vec X_petsc, PetscMemType *mem_type, CeedVector x_ceed) {
129   PetscScalar *x;
130 
131   PetscFunctionBeginUser;
132   PetscCall(VecGetArrayAndMemType(X_petsc, &x, mem_type));
133   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorSetArray(x_ceed, MemTypePetscToCeed(*mem_type), CEED_USE_POINTER, x));
134   PetscFunctionReturn(PETSC_SUCCESS);
135 }
136 
137 /**
138   @brief Transfer array from `CeedVector` to PETSc `Vec`.
139 
140   Collective across MPI processes.
141 
142   @param[in]   x_ceed    `CeedVector`
143   @param[in]   mem_type  PETSc `MemType`
144   @param[out]  X_petsc   PETSc `Vec`
145 
146   @return An error code: 0 - success, otherwise - failure
147 **/
148 static inline PetscErrorCode VecCeedToPetsc(CeedVector x_ceed, PetscMemType mem_type, Vec X_petsc) {
149   PetscScalar *x;
150 
151   PetscFunctionBeginUser;
152   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorTakeArray(x_ceed, MemTypePetscToCeed(mem_type), &x));
153   PetscCall(VecRestoreArrayAndMemType(X_petsc, &x));
154   PetscFunctionReturn(PETSC_SUCCESS);
155 }
156 
157 /**
158   @brief Transfer read only array from PETSc `Vec` to `CeedVector`.
159 
160   Collective across MPI processes.
161 
162   @param[in]   X_petsc   PETSc `Vec`
163   @param[out]  mem_type  PETSc `MemType`
164   @param[out]  x_ceed    `CeedVector`
165 
166   @return An error code: 0 - success, otherwise - failure
167 **/
168 static inline PetscErrorCode VecReadPetscToCeed(Vec X_petsc, PetscMemType *mem_type, CeedVector x_ceed) {
169   PetscScalar *x;
170 
171   PetscFunctionBeginUser;
172   PetscCall(VecGetArrayReadAndMemType(X_petsc, (const PetscScalar **)&x, mem_type));
173   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorSetArray(x_ceed, MemTypePetscToCeed(*mem_type), CEED_USE_POINTER, x));
174   PetscFunctionReturn(PETSC_SUCCESS);
175 }
176 
177 /**
178   @brief Transfer read only array from `CeedVector` to PETSc `Vec`.
179 
180   Collective across MPI processes.
181 
182   @param[in]   x_ceed    `CeedVector`
183   @param[in]   mem_type  PETSc `MemType`
184   @param[out]  X_petsc   PETSc `Vec`
185 
186   @return An error code: 0 - success, otherwise - failure
187 **/
188 static inline PetscErrorCode VecReadCeedToPetsc(CeedVector x_ceed, PetscMemType mem_type, Vec X_petsc) {
189   PetscScalar *x;
190 
191   PetscFunctionBeginUser;
192   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorTakeArray(x_ceed, MemTypePetscToCeed(mem_type), &x));
193   PetscCall(VecRestoreArrayReadAndMemType(X_petsc, (const PetscScalar **)&x));
194   PetscFunctionReturn(PETSC_SUCCESS);
195 }
196 
197 /**
198   @brief Copy PETSc `Vec` data into `CeedVector`
199 
200   @param[in]   X_petsc PETSc `Vec`
201   @param[out]  x_ceed  `CeedVector`
202 
203   @return An error code: 0 - success, otherwise - failure
204 **/
205 static inline PetscErrorCode VecCopyPetscToCeed(Vec X_petsc, CeedVector x_ceed) {
206   PetscScalar *x;
207   PetscMemType mem_type;
208   PetscInt     X_size;
209   CeedSize     x_size;
210   Ceed         ceed;
211 
212   PetscFunctionBeginUser;
213   PetscCall(CeedVectorGetCeed(x_ceed, &ceed));
214   PetscCall(VecGetLocalSize(X_petsc, &X_size));
215   PetscCallCeed(ceed, CeedVectorGetLength(x_ceed, &x_size));
216   PetscCheck(X_size == x_size, PETSC_COMM_WORLD, PETSC_ERR_ARG_SIZ, "X_petsc (%" PetscInt_FMT ") and x_ceed (%" CeedSize_FMT ") must be same size",
217              X_size, x_size);
218 
219   PetscCall(VecGetArrayReadAndMemType(X_petsc, (const PetscScalar **)&x, &mem_type));
220   PetscCallCeed(ceed, CeedVectorSetArray(x_ceed, MemTypePetscToCeed(mem_type), CEED_COPY_VALUES, x));
221   PetscCall(VecRestoreArrayReadAndMemType(X_petsc, (const PetscScalar **)&x));
222   PetscFunctionReturn(PETSC_SUCCESS);
223 }
224 
225 /**
226   @brief Return the quadrature size from the eval mode, dimension, and number of components
227 
228   @param[in]  eval_mode       The basis evaluation mode
229   @param[in]  dim             The basis dimension
230   @param[in]  num_components  The basis number of components
231 
232   @return The maximum of the two integers
233 
234 **/
235 static inline CeedInt GetCeedQuadratureSize(CeedEvalMode eval_mode, CeedInt dim, CeedInt num_components) {
236   switch (eval_mode) {
237     case CEED_EVAL_INTERP:
238       return num_components;
239     case CEED_EVAL_GRAD:
240       return dim * num_components;
241     default:
242       return -1;
243   }
244 }
245 
246 /**
247   @brief Convert from DMPolytopeType to CeedElemTopology
248 
249   @param[in]  cell_type  DMPolytopeType for the cell
250 
251   @return CeedElemTopology, or 0 if no equivelent CeedElemTopology was found
252 **/
253 static inline CeedElemTopology PolytopeTypePetscToCeed(DMPolytopeType cell_type) {
254   switch (cell_type) {
255     case DM_POLYTOPE_TRIANGLE:
256       return CEED_TOPOLOGY_TRIANGLE;
257     case DM_POLYTOPE_QUADRILATERAL:
258       return CEED_TOPOLOGY_QUAD;
259     case DM_POLYTOPE_TETRAHEDRON:
260       return CEED_TOPOLOGY_TET;
261     case DM_POLYTOPE_HEXAHEDRON:
262       return CEED_TOPOLOGY_HEX;
263     default:
264       return 0;
265   }
266 }
267