xref: /honee/include/petsc-ceed-utils.h (revision ae2b091fac884a554e48acc4b4c187524c2a2818)
1 // SPDX-FileCopyrightText: Copyright (c) 2017-2024, HONEE contributors.
2 // SPDX-License-Identifier: Apache-2.0 OR BSD-2-Clause
3 #pragma once
4 
5 #include <ceed.h>
6 #include <petscdm.h>
7 
8 /**
9   @brief Copy the reference to a `Vec`.
10          Note: If `vec_copy` is non-null, it is assumed to be a valid pointer to a `Vec` and `VecDestroy()` will be called.
11 
12   Collective across MPI processes.
13 
14   @param[in]   vec       `Vec` to reference
15   @param[out]  vec_copy  Copy of reference
16 
17   @return An error code: 0 - success, otherwise - failure
18 **/
19 static inline PetscErrorCode VecReferenceCopy(Vec vec, Vec *vec_copy) {
20   PetscFunctionBeginUser;
21   PetscCall(PetscObjectReference((PetscObject)vec));
22   PetscCall(VecDestroy(vec_copy));
23   *vec_copy = vec;
24   PetscFunctionReturn(PETSC_SUCCESS);
25 }
26 
27 /**
28   @brief Copy the reference to a `DM`.
29          Note: If `dm_copy` is non-null, it is assumed to be a valid pointer to a `DM` and `DMDestroy()` will be called.
30 
31   Collective across MPI processes.
32 
33   @param[in]   dm       `DM` to reference
34   @param[out]  dm_copy  Copy of reference
35 
36   @return An error code: 0 - success, otherwise - failure
37 **/
38 static inline PetscErrorCode DMReferenceCopy(DM dm, DM *dm_copy) {
39   PetscFunctionBeginUser;
40   PetscCall(PetscObjectReference((PetscObject)dm));
41   PetscCall(DMDestroy(dm_copy));
42   *dm_copy = dm;
43   PetscFunctionReturn(PETSC_SUCCESS);
44 }
45 
46 /**
47   @brief Translate PetscMemType to CeedMemType
48 
49   @param[in]  mem_type  PetscMemType
50 
51   @return Equivalent CeedMemType
52 **/
53 static inline CeedMemType MemTypePetscToCeed(PetscMemType mem_type) { return PetscMemTypeDevice(mem_type) ? CEED_MEM_DEVICE : CEED_MEM_HOST; }
54 
55 /**
56   @brief Translate array of `PetscInt` to `CeedInt`.
57     If the types differ, `array_petsc` is freed with `PetscFree()` and `array_ceed` is allocated with `PetscMalloc1()`.
58     Caller is responsible for freeing `array_ceed` with `PetscFree()`.
59 
60   Not collective across MPI processes.
61 
62   @param[in]      num_entries  Number of array entries
63   @param[in,out]  array_petsc  Array of `PetscInt`
64   @param[out]     array_ceed   Array of `CeedInt`
65 
66   @return An error code: 0 - success, otherwise - failure
67 **/
68 static inline PetscErrorCode IntArrayCeedToPetsc(PetscInt num_entries, CeedInt **array_ceed, PetscInt **array_petsc) {
69   const CeedInt  int_c = 0;
70   const PetscInt int_p = 0;
71 
72   PetscFunctionBeginUser;
73   if (sizeof(int_c) == sizeof(int_p)) {
74     *array_petsc = (PetscInt *)*array_ceed;
75   } else {
76     *array_petsc = malloc(num_entries * sizeof(PetscInt));
77     for (PetscInt i = 0; i < num_entries; i++) (*array_petsc)[i] = (*array_ceed)[i];
78     free(*array_ceed);
79   }
80   *array_ceed = NULL;
81   PetscFunctionReturn(PETSC_SUCCESS);
82 }
83 
84 /**
85   @brief Translate array of `PetscInt` to `CeedInt`.
86     If the types differ, `array_petsc` is freed with `PetscFree()` and `array_ceed` is allocated with `PetscMalloc1()`.
87     Caller is responsible for freeing `array_ceed` with `PetscFree()`.
88 
89   Not collective across MPI processes.
90 
91   @param[in]      num_entries  Number of array entries
92   @param[in,out]  array_petsc  Array of `PetscInt`
93   @param[out]     array_ceed   Array of `CeedInt`
94 
95   @return An error code: 0 - success, otherwise - failure
96 **/
97 static inline PetscErrorCode IntArrayPetscToCeed(PetscInt num_entries, PetscInt **array_petsc, CeedInt **array_ceed) {
98   const CeedInt  int_c = 0;
99   const PetscInt int_p = 0;
100 
101   PetscFunctionBeginUser;
102   if (sizeof(int_c) == sizeof(int_p)) {
103     *array_ceed = (CeedInt *)*array_petsc;
104   } else {
105     PetscCall(PetscMalloc1(num_entries, array_ceed));
106     for (PetscInt i = 0; i < num_entries; i++) (*array_ceed)[i] = (*array_petsc)[i];
107     PetscCall(PetscFree(*array_petsc));
108   }
109   *array_petsc = NULL;
110   PetscFunctionReturn(PETSC_SUCCESS);
111 }
112 
113 /**
114   @brief Transfer array from PETSc `Vec` to `CeedVector`.
115 
116   Collective across MPI processes.
117 
118   @param[in]   X_petsc   PETSc `Vec`
119   @param[out]  mem_type  PETSc `MemType`
120   @param[out]  x_ceed    `CeedVector`
121 
122   @return An error code: 0 - success, otherwise - failure
123 **/
124 static inline PetscErrorCode VecPetscToCeed(Vec X_petsc, PetscMemType *mem_type, CeedVector x_ceed) {
125   PetscScalar *x;
126 
127   PetscFunctionBeginUser;
128   PetscCall(VecGetArrayAndMemType(X_petsc, &x, mem_type));
129   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorSetArray(x_ceed, MemTypePetscToCeed(*mem_type), CEED_USE_POINTER, x));
130   PetscFunctionReturn(PETSC_SUCCESS);
131 }
132 
133 /**
134   @brief Transfer array from `CeedVector` to PETSc `Vec`.
135 
136   Collective across MPI processes.
137 
138   @param[in]   x_ceed    `CeedVector`
139   @param[in]   mem_type  PETSc `MemType`
140   @param[out]  X_petsc   PETSc `Vec`
141 
142   @return An error code: 0 - success, otherwise - failure
143 **/
144 static inline PetscErrorCode VecCeedToPetsc(CeedVector x_ceed, PetscMemType mem_type, Vec X_petsc) {
145   PetscScalar *x;
146 
147   PetscFunctionBeginUser;
148   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorTakeArray(x_ceed, MemTypePetscToCeed(mem_type), &x));
149   PetscCall(VecRestoreArrayAndMemType(X_petsc, &x));
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 /**
154   @brief Transfer read only array from PETSc `Vec` to `CeedVector`.
155 
156   Collective across MPI processes.
157 
158   @param[in]   X_petsc   PETSc `Vec`
159   @param[out]  mem_type  PETSc `MemType`
160   @param[out]  x_ceed    `CeedVector`
161 
162   @return An error code: 0 - success, otherwise - failure
163 **/
164 static inline PetscErrorCode VecReadPetscToCeed(Vec X_petsc, PetscMemType *mem_type, CeedVector x_ceed) {
165   PetscScalar *x;
166 
167   PetscFunctionBeginUser;
168   PetscCall(VecGetArrayReadAndMemType(X_petsc, (const PetscScalar **)&x, mem_type));
169   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorSetArray(x_ceed, MemTypePetscToCeed(*mem_type), CEED_USE_POINTER, x));
170   PetscFunctionReturn(PETSC_SUCCESS);
171 }
172 
173 /**
174   @brief Transfer read only array from `CeedVector` to PETSc `Vec`.
175 
176   Collective across MPI processes.
177 
178   @param[in]   x_ceed    `CeedVector`
179   @param[in]   mem_type  PETSc `MemType`
180   @param[out]  X_petsc   PETSc `Vec`
181 
182   @return An error code: 0 - success, otherwise - failure
183 **/
184 static inline PetscErrorCode VecReadCeedToPetsc(CeedVector x_ceed, PetscMemType mem_type, Vec X_petsc) {
185   PetscScalar *x;
186 
187   PetscFunctionBeginUser;
188   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorTakeArray(x_ceed, MemTypePetscToCeed(mem_type), &x));
189   PetscCall(VecRestoreArrayReadAndMemType(X_petsc, (const PetscScalar **)&x));
190   PetscFunctionReturn(PETSC_SUCCESS);
191 }
192 
193 /**
194   @brief Copy PETSc `Vec` data into `CeedVector`
195 
196   @param[in]   X_petsc PETSc `Vec`
197   @param[out]  x_ceed  `CeedVector`
198 
199   @return An error code: 0 - success, otherwise - failure
200 **/
201 static inline PetscErrorCode VecCopyPetscToCeed(Vec X_petsc, CeedVector x_ceed) {
202   PetscScalar *x;
203   PetscMemType mem_type;
204   PetscInt     X_size;
205   CeedSize     x_size;
206   Ceed         ceed;
207 
208   PetscFunctionBeginUser;
209   PetscCall(CeedVectorGetCeed(x_ceed, &ceed));
210   PetscCall(VecGetLocalSize(X_petsc, &X_size));
211   PetscCallCeed(ceed, CeedVectorGetLength(x_ceed, &x_size));
212   PetscCheck(X_size == x_size, PETSC_COMM_WORLD, PETSC_ERR_ARG_SIZ, "X_petsc (%" PetscInt_FMT ") and x_ceed (%" CeedSize_FMT ") must be same size",
213              X_size, x_size);
214 
215   PetscCall(VecGetArrayReadAndMemType(X_petsc, (const PetscScalar **)&x, &mem_type));
216   PetscCallCeed(ceed, CeedVectorSetArray(x_ceed, MemTypePetscToCeed(mem_type), CEED_COPY_VALUES, x));
217   PetscCall(VecRestoreArrayReadAndMemType(X_petsc, (const PetscScalar **)&x));
218   PetscFunctionReturn(PETSC_SUCCESS);
219 }
220 
221 /**
222   @brief Return the quadrature size from the eval mode, dimension, and number of components
223 
224   @param[in]  eval_mode       The basis evaluation mode
225   @param[in]  dim             The basis dimension
226   @param[in]  num_components  The basis number of components
227 
228   @return The maximum of the two integers
229 
230 **/
231 static inline CeedInt GetCeedQuadratureSize(CeedEvalMode eval_mode, CeedInt dim, CeedInt num_components) {
232   switch (eval_mode) {
233     case CEED_EVAL_INTERP:
234       return num_components;
235     case CEED_EVAL_GRAD:
236       return dim * num_components;
237     default:
238       return -1;
239   }
240 }
241 
242 /**
243   @brief Convert from DMPolytopeType to CeedElemTopology
244 
245   @param[in]  cell_type  DMPolytopeType for the cell
246 
247   @return CeedElemTopology, or 0 if no equivelent CeedElemTopology was found
248 **/
249 static inline CeedElemTopology PolytopeTypePetscToCeed(DMPolytopeType cell_type) {
250   switch (cell_type) {
251     case DM_POLYTOPE_TRIANGLE:
252       return CEED_TOPOLOGY_TRIANGLE;
253     case DM_POLYTOPE_QUADRILATERAL:
254       return CEED_TOPOLOGY_QUAD;
255     case DM_POLYTOPE_TETRAHEDRON:
256       return CEED_TOPOLOGY_TET;
257     case DM_POLYTOPE_HEXAHEDRON:
258       return CEED_TOPOLOGY_HEX;
259     default:
260       return 0;
261   }
262 }
263