xref: /libCEED/examples/fluids/include/petsc-ceed-utils.h (revision 6a5b1f8c8ca6bae6b6457d14f82c38963f859dea)
1 // Copyright (c) 2017-2024, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 #pragma once
8 
9 #include <ceed.h>
10 #include <petscdm.h>
11 
12 /**
13   @brief Copy the reference to a `Vec`.
14          Note: If `vec_copy` is non-null, it is assumed to be a valid pointer to a `Vec` and `VecDestroy()` will be called.
15 
16   Collective across MPI processes.
17 
18   @param[in]   vec       `Vec` to reference
19   @param[out]  vec_copy  Copy of reference
20 
21   @return An error code: 0 - success, otherwise - failure
22 **/
23 static inline PetscErrorCode VecReferenceCopy(Vec vec, Vec *vec_copy) {
24   PetscFunctionBeginUser;
25   PetscCall(PetscObjectReference((PetscObject)vec));
26   PetscCall(VecDestroy(vec_copy));
27   *vec_copy = vec;
28   PetscFunctionReturn(PETSC_SUCCESS);
29 }
30 
31 /**
32   @brief Copy the reference to a `DM`.
33          Note: If `dm_copy` is non-null, it is assumed to be a valid pointer to a `DM` and `DMDestroy()` will be called.
34 
35   Collective across MPI processes.
36 
37   @param[in]   dm       `DM` to reference
38   @param[out]  dm_copy  Copy of reference
39 
40   @return An error code: 0 - success, otherwise - failure
41 **/
42 static inline PetscErrorCode DMReferenceCopy(DM dm, DM *dm_copy) {
43   PetscFunctionBeginUser;
44   PetscCall(PetscObjectReference((PetscObject)dm));
45   PetscCall(DMDestroy(dm_copy));
46   *dm_copy = dm;
47   PetscFunctionReturn(PETSC_SUCCESS);
48 }
49 
50 /**
51   @brief Translate PetscMemType to CeedMemType
52 
53   @param[in]  mem_type  PetscMemType
54 
55   @return Equivalent CeedMemType
56 **/
57 /// @ingroup RatelInternal
58 static inline CeedMemType MemTypePetscToCeed(PetscMemType mem_type) { return PetscMemTypeDevice(mem_type) ? CEED_MEM_DEVICE : CEED_MEM_HOST; }
59 
60 /**
61   @brief Translate array of `PetscInt` to `CeedInt`.
62     If the types differ, `array_petsc` is freed with `PetscFree()` and `array_ceed` is allocated with `PetscMalloc1()`.
63     Caller is responsible for freeing `array_ceed` with `PetscFree()`.
64 
65   Not collective across MPI processes.
66 
67   @param[in]      num_entries  Number of array entries
68   @param[in,out]  array_petsc  Array of `PetscInt`
69   @param[out]     array_ceed   Array of `CeedInt`
70 
71   @return An error code: 0 - success, otherwise - failure
72 **/
73 static inline PetscErrorCode IntArrayCeedToPetsc(PetscInt num_entries, CeedInt **array_ceed, PetscInt **array_petsc) {
74   const CeedInt  int_c = 0;
75   const PetscInt int_p = 0;
76 
77   PetscFunctionBeginUser;
78   if (sizeof(int_c) == sizeof(int_p)) {
79     *array_petsc = (PetscInt *)*array_ceed;
80   } else {
81     *array_petsc = malloc(num_entries * sizeof(PetscInt));
82     for (PetscInt i = 0; i < num_entries; i++) (*array_petsc)[i] = (*array_ceed)[i];
83     free(*array_ceed);
84   }
85   *array_ceed = NULL;
86   PetscFunctionReturn(PETSC_SUCCESS);
87 }
88 
89 /**
90   @brief Translate array of `PetscInt` to `CeedInt`.
91     If the types differ, `array_petsc` is freed with `PetscFree()` and `array_ceed` is allocated with `PetscMalloc1()`.
92     Caller is responsible for freeing `array_ceed` with `PetscFree()`.
93 
94   Not collective across MPI processes.
95 
96   @param[in]      num_entries  Number of array entries
97   @param[in,out]  array_petsc  Array of `PetscInt`
98   @param[out]     array_ceed   Array of `CeedInt`
99 
100   @return An error code: 0 - success, otherwise - failure
101 **/
102 static inline PetscErrorCode IntArrayPetscToCeed(PetscInt num_entries, PetscInt **array_petsc, CeedInt **array_ceed) {
103   const CeedInt  int_c = 0;
104   const PetscInt int_p = 0;
105 
106   PetscFunctionBeginUser;
107   if (sizeof(int_c) == sizeof(int_p)) {
108     *array_ceed = (CeedInt *)*array_petsc;
109   } else {
110     PetscCall(PetscMalloc1(num_entries, array_ceed));
111     for (PetscInt i = 0; i < num_entries; i++) (*array_ceed)[i] = (*array_petsc)[i];
112     PetscCall(PetscFree(*array_petsc));
113   }
114   *array_petsc = NULL;
115   PetscFunctionReturn(PETSC_SUCCESS);
116 }
117 
118 /**
119   @brief Transfer array from PETSc `Vec` to `CeedVector`.
120 
121   Collective across MPI processes.
122 
123   @param[in]   X_petsc   PETSc `Vec`
124   @param[out]  mem_type  PETSc `MemType`
125   @param[out]  x_ceed    `CeedVector`
126 
127   @return An error code: 0 - success, otherwise - failure
128 **/
129 static inline PetscErrorCode VecPetscToCeed(Vec X_petsc, PetscMemType *mem_type, CeedVector x_ceed) {
130   PetscScalar *x;
131 
132   PetscFunctionBeginUser;
133   PetscCall(VecGetArrayAndMemType(X_petsc, &x, mem_type));
134   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorSetArray(x_ceed, MemTypePetscToCeed(*mem_type), CEED_USE_POINTER, x));
135   PetscFunctionReturn(PETSC_SUCCESS);
136 }
137 
138 /**
139   @brief Transfer array from `CeedVector` to PETSc `Vec`.
140 
141   Collective across MPI processes.
142 
143   @param[in]   x_ceed    `CeedVector`
144   @param[in]   mem_type  PETSc `MemType`
145   @param[out]  X_petsc   PETSc `Vec`
146 
147   @return An error code: 0 - success, otherwise - failure
148 **/
149 static inline PetscErrorCode VecCeedToPetsc(CeedVector x_ceed, PetscMemType mem_type, Vec X_petsc) {
150   PetscScalar *x;
151 
152   PetscFunctionBeginUser;
153   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorTakeArray(x_ceed, MemTypePetscToCeed(mem_type), &x));
154   PetscCall(VecRestoreArrayAndMemType(X_petsc, &x));
155   PetscFunctionReturn(PETSC_SUCCESS);
156 }
157 
158 /**
159   @brief Transfer read only array from PETSc `Vec` to `CeedVector`.
160 
161   Collective across MPI processes.
162 
163   @param[in]   X_petsc   PETSc `Vec`
164   @param[out]  mem_type  PETSc `MemType`
165   @param[out]  x_ceed    `CeedVector`
166 
167   @return An error code: 0 - success, otherwise - failure
168 **/
169 static inline PetscErrorCode VecReadPetscToCeed(Vec X_petsc, PetscMemType *mem_type, CeedVector x_ceed) {
170   PetscScalar *x;
171 
172   PetscFunctionBeginUser;
173   PetscCall(VecGetArrayReadAndMemType(X_petsc, (const PetscScalar **)&x, mem_type));
174   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorSetArray(x_ceed, MemTypePetscToCeed(*mem_type), CEED_USE_POINTER, x));
175   PetscFunctionReturn(PETSC_SUCCESS);
176 }
177 
178 /**
179   @brief Transfer read only array from `CeedVector` to PETSc `Vec`.
180 
181   Collective across MPI processes.
182 
183   @param[in]   x_ceed    `CeedVector`
184   @param[in]   mem_type  PETSc `MemType`
185   @param[out]  X_petsc   PETSc `Vec`
186 
187   @return An error code: 0 - success, otherwise - failure
188 **/
189 static inline PetscErrorCode VecReadCeedToPetsc(CeedVector x_ceed, PetscMemType mem_type, Vec X_petsc) {
190   PetscScalar *x;
191 
192   PetscFunctionBeginUser;
193   PetscCallCeed(CeedVectorReturnCeed(x_ceed), CeedVectorTakeArray(x_ceed, MemTypePetscToCeed(mem_type), &x));
194   PetscCall(VecRestoreArrayReadAndMemType(X_petsc, (const PetscScalar **)&x));
195   PetscFunctionReturn(PETSC_SUCCESS);
196 }
197 
198 /**
199   @brief Copy PETSc `Vec` data into `CeedVector`
200 
201   @param[in]   X_petsc PETSc `Vec`
202   @param[out]  x_ceed  `CeedVector`
203 
204   @return An error code: 0 - success, otherwise - failure
205 **/
206 static inline PetscErrorCode VecCopyPetscToCeed(Vec X_petsc, CeedVector x_ceed) {
207   PetscScalar *x;
208   PetscMemType mem_type;
209   PetscInt     X_size;
210   CeedSize     x_size;
211   Ceed         ceed;
212 
213   PetscFunctionBeginUser;
214   PetscCall(CeedVectorGetCeed(x_ceed, &ceed));
215   PetscCall(VecGetLocalSize(X_petsc, &X_size));
216   PetscCallCeed(ceed, CeedVectorGetLength(x_ceed, &x_size));
217   PetscCheck(X_size == x_size, PETSC_COMM_WORLD, PETSC_ERR_ARG_SIZ, "X_petsc (%" PetscInt_FMT ") and x_ceed (%" CeedSize_FMT ") must be same size",
218              X_size, x_size);
219 
220   PetscCall(VecGetArrayReadAndMemType(X_petsc, (const PetscScalar **)&x, &mem_type));
221   PetscCallCeed(ceed, CeedVectorSetArray(x_ceed, MemTypePetscToCeed(mem_type), CEED_COPY_VALUES, x));
222   PetscCall(VecRestoreArrayReadAndMemType(X_petsc, (const PetscScalar **)&x));
223   PetscFunctionReturn(PETSC_SUCCESS);
224 }
225 
226 /**
227   @brief Return the quadrature size from the eval mode, dimension, and number of components
228 
229   @param[in]  eval_mode       The basis evaluation mode
230   @param[in]  dim             The basis dimension
231   @param[in]  num_components  The basis number of components
232 
233   @return The maximum of the two integers
234 
235 **/
236 /// @ingroup RatelInternal
237 static inline CeedInt GetCeedQuadratureSize(CeedEvalMode eval_mode, CeedInt dim, CeedInt num_components) {
238   switch (eval_mode) {
239     case CEED_EVAL_INTERP:
240       return num_components;
241     case CEED_EVAL_GRAD:
242       return dim * num_components;
243     default:
244       return -1;
245   }
246 }
247 
248 /**
249   @brief Convert from DMPolytopeType to CeedElemTopology
250 
251   @param[in]  cell_type  DMPolytopeType for the cell
252 
253   @return CeedElemTopology, or 0 if no equivelent CeedElemTopology was found
254 **/
255 static inline CeedElemTopology PolytopeTypePetscToCeed(DMPolytopeType cell_type) {
256   switch (cell_type) {
257     case DM_POLYTOPE_TRIANGLE:
258       return CEED_TOPOLOGY_TRIANGLE;
259     case DM_POLYTOPE_QUADRILATERAL:
260       return CEED_TOPOLOGY_QUAD;
261     case DM_POLYTOPE_TETRAHEDRON:
262       return CEED_TOPOLOGY_TET;
263     case DM_POLYTOPE_HEXAHEDRON:
264       return CEED_TOPOLOGY_HEX;
265     default:
266       return 0;
267   }
268 }
269