xref: /petsc/src/mat/impls/hypre/mhypre.c (revision bcee047adeeb73090d7e36cc71e39fc287cdbb97) !
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 #include <petscpkg_version.h>
7 #include <petsc/private/petschypre.h>
8 #include <petscmathypre.h>
9 #include <petsc/private/matimpl.h>
10 #include <petsc/private/deviceimpl.h>
11 #include <../src/mat/impls/hypre/mhypre.h>
12 #include <../src/mat/impls/aij/mpi/mpiaij.h>
13 #include <../src/vec/vec/impls/hypre/vhyp.h>
14 #include <HYPRE.h>
15 #include <HYPRE_utilities.h>
16 #include <_hypre_parcsr_ls.h>
17 #include <_hypre_sstruct_ls.h>
18 
19 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
20   #define hypre_ParCSRMatrixClone(A, B) hypre_ParCSRMatrixCompleteClone(A)
21 #endif
22 
23 static PetscErrorCode MatHYPRE_CreateFromMat(Mat, Mat_HYPRE *);
24 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat, Mat, HYPRE_IJMatrix);
25 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat, HYPRE_IJMatrix);
26 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat, HYPRE_IJMatrix);
27 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat, HYPRE_Complex, Vec, HYPRE_Complex, Vec, PetscBool);
28 static PetscErrorCode hypre_array_destroy(void *);
29 static PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode ins);
30 
31 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
32 {
33   PetscInt        i, n_d, n_o;
34   const PetscInt *ia_d, *ia_o;
35   PetscBool       done_d = PETSC_FALSE, done_o = PETSC_FALSE;
36   HYPRE_Int      *nnz_d = NULL, *nnz_o = NULL;
37 
38   PetscFunctionBegin;
39   if (A_d) { /* determine number of nonzero entries in local diagonal part */
40     PetscCall(MatGetRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, &n_d, &ia_d, NULL, &done_d));
41     if (done_d) {
42       PetscCall(PetscMalloc1(n_d, &nnz_d));
43       for (i = 0; i < n_d; i++) nnz_d[i] = ia_d[i + 1] - ia_d[i];
44     }
45     PetscCall(MatRestoreRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, NULL, &ia_d, NULL, &done_d));
46   }
47   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
48     PetscCall(MatGetRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
49     if (done_o) {
50       PetscCall(PetscMalloc1(n_o, &nnz_o));
51       for (i = 0; i < n_o; i++) nnz_o[i] = ia_o[i + 1] - ia_o[i];
52     }
53     PetscCall(MatRestoreRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
54   }
55   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
56     if (!done_o) { /* only diagonal part */
57       PetscCall(PetscCalloc1(n_d, &nnz_o));
58     }
59 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
60     { /* If we don't do this, the columns of the matrix will be all zeros! */
61       hypre_AuxParCSRMatrix *aux_matrix;
62       aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
63       hypre_AuxParCSRMatrixDestroy(aux_matrix);
64       hypre_IJMatrixTranslator(ij) = NULL;
65       PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
66       /* it seems they partially fixed it in 2.19.0 */
67   #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
68       aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
69       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
70   #endif
71     }
72 #else
73     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
74 #endif
75     PetscCall(PetscFree(nnz_d));
76     PetscCall(PetscFree(nnz_o));
77   }
78   PetscFunctionReturn(PETSC_SUCCESS);
79 }
80 
81 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
82 {
83   PetscInt rstart, rend, cstart, cend;
84 
85   PetscFunctionBegin;
86   PetscCall(PetscLayoutSetUp(A->rmap));
87   PetscCall(PetscLayoutSetUp(A->cmap));
88   rstart = A->rmap->rstart;
89   rend   = A->rmap->rend;
90   cstart = A->cmap->rstart;
91   cend   = A->cmap->rend;
92   PetscHYPREInitialize();
93   if (hA->ij) {
94     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
95     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
96   }
97   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij);
98   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
99   {
100     PetscBool       same;
101     Mat             A_d, A_o;
102     const PetscInt *colmap;
103     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &same));
104     if (same) {
105       PetscCall(MatMPIAIJGetSeqAIJ(A, &A_d, &A_o, &colmap));
106       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
107       PetscFunctionReturn(PETSC_SUCCESS);
108     }
109     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &same));
110     if (same) {
111       PetscCall(MatMPIBAIJGetSeqBAIJ(A, &A_d, &A_o, &colmap));
112       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
113       PetscFunctionReturn(PETSC_SUCCESS);
114     }
115     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &same));
116     if (same) {
117       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
118       PetscFunctionReturn(PETSC_SUCCESS);
119     }
120     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQBAIJ, &same));
121     if (same) {
122       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
123       PetscFunctionReturn(PETSC_SUCCESS);
124     }
125   }
126   PetscFunctionReturn(PETSC_SUCCESS);
127 }
128 
129 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ(Mat A, HYPRE_IJMatrix ij)
130 {
131   PetscBool flg;
132 
133   PetscFunctionBegin;
134 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
135   PetscCallExternal(HYPRE_IJMatrixInitialize, ij);
136 #else
137   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, ij, HYPRE_MEMORY_HOST);
138 #endif
139   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &flg));
140   if (flg) {
141     PetscCall(MatHYPRE_IJMatrixCopyIJ_MPIAIJ(A, ij));
142     PetscFunctionReturn(PETSC_SUCCESS);
143   }
144   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
145   if (flg) {
146     PetscCall(MatHYPRE_IJMatrixCopyIJ_SeqAIJ(A, ij));
147     PetscFunctionReturn(PETSC_SUCCESS);
148   }
149   PetscCheck(PETSC_FALSE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for matrix type %s", ((PetscObject)A)->type_name);
150 }
151 
152 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
153 {
154   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ *)A->data;
155   HYPRE_Int              type;
156   hypre_ParCSRMatrix    *par_matrix;
157   hypre_AuxParCSRMatrix *aux_matrix;
158   hypre_CSRMatrix       *hdiag;
159   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
160 
161   PetscFunctionBegin;
162   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
163   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
164   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
165   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
166   /*
167        this is the Hack part where we monkey directly with the hypre datastructures
168   */
169   if (sameint) {
170     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, A->rmap->n + 1));
171     PetscCall(PetscArraycpy(hdiag->j, pdiag->j, pdiag->nz));
172   } else {
173     PetscInt i;
174 
175     for (i = 0; i < A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
176     for (i = 0; i < pdiag->nz; i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
177   }
178 
179   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
180   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
181   PetscFunctionReturn(PETSC_SUCCESS);
182 }
183 
184 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
185 {
186   Mat_MPIAIJ            *pA = (Mat_MPIAIJ *)A->data;
187   Mat_SeqAIJ            *pdiag, *poffd;
188   PetscInt               i, *garray = pA->garray, *jj, cstart, *pjj;
189   HYPRE_Int             *hjj, type;
190   hypre_ParCSRMatrix    *par_matrix;
191   hypre_AuxParCSRMatrix *aux_matrix;
192   hypre_CSRMatrix       *hdiag, *hoffd;
193   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
194 
195   PetscFunctionBegin;
196   pdiag = (Mat_SeqAIJ *)pA->A->data;
197   poffd = (Mat_SeqAIJ *)pA->B->data;
198   /* cstart is only valid for square MPIAIJ laid out in the usual way */
199   PetscCall(MatGetOwnershipRange(A, &cstart, NULL));
200 
201   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
202   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
203   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
204   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
205   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
206 
207   if (sameint) {
208     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, pA->A->rmap->n + 1));
209   } else {
210     for (i = 0; i < pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]);
211   }
212 
213   hjj = hdiag->j;
214   pjj = pdiag->j;
215 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
216   for (i = 0; i < pdiag->nz; i++) hjj[i] = pjj[i];
217 #else
218   for (i = 0; i < pdiag->nz; i++) hjj[i] = cstart + pjj[i];
219 #endif
220   if (sameint) {
221     PetscCall(PetscArraycpy(hoffd->i, poffd->i, pA->A->rmap->n + 1));
222   } else {
223     for (i = 0; i < pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]);
224   }
225 
226 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
227   PetscCallExternal(hypre_CSRMatrixBigInitialize, hoffd);
228   jj = (PetscInt *)hoffd->big_j;
229 #else
230   jj = (PetscInt *)hoffd->j;
231 #endif
232   pjj = poffd->j;
233   for (i = 0; i < poffd->nz; i++) jj[i] = garray[pjj[i]];
234 
235   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
236   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
237   PetscFunctionReturn(PETSC_SUCCESS);
238 }
239 
240 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat *B)
241 {
242   Mat_HYPRE             *mhA = (Mat_HYPRE *)(A->data);
243   Mat                    lA;
244   ISLocalToGlobalMapping rl2g, cl2g;
245   IS                     is;
246   hypre_ParCSRMatrix    *hA;
247   hypre_CSRMatrix       *hdiag, *hoffd;
248   MPI_Comm               comm;
249   HYPRE_Complex         *hdd, *hod, *aa;
250   PetscScalar           *data;
251   HYPRE_BigInt          *col_map_offd;
252   HYPRE_Int             *hdi, *hdj, *hoi, *hoj;
253   PetscInt              *ii, *jj, *iptr, *jptr;
254   PetscInt               cum, dr, dc, oc, str, stc, nnz, i, jd, jo, M, N;
255   HYPRE_Int              type;
256 
257   PetscFunctionBegin;
258   comm = PetscObjectComm((PetscObject)A);
259   PetscCallExternal(HYPRE_IJMatrixGetObjectType, mhA->ij, &type);
260   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
261   PetscCallExternal(HYPRE_IJMatrixGetObject, mhA->ij, (void **)&hA);
262   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
263   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
264   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
265   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
266   hdiag = hypre_ParCSRMatrixDiag(hA);
267   hoffd = hypre_ParCSRMatrixOffd(hA);
268   dr    = hypre_CSRMatrixNumRows(hdiag);
269   dc    = hypre_CSRMatrixNumCols(hdiag);
270   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
271   hdi   = hypre_CSRMatrixI(hdiag);
272   hdj   = hypre_CSRMatrixJ(hdiag);
273   hdd   = hypre_CSRMatrixData(hdiag);
274   oc    = hypre_CSRMatrixNumCols(hoffd);
275   nnz += hypre_CSRMatrixNumNonzeros(hoffd);
276   hoi = hypre_CSRMatrixI(hoffd);
277   hoj = hypre_CSRMatrixJ(hoffd);
278   hod = hypre_CSRMatrixData(hoffd);
279   if (reuse != MAT_REUSE_MATRIX) {
280     PetscInt *aux;
281 
282     /* generate l2g maps for rows and cols */
283     PetscCall(ISCreateStride(comm, dr, str, 1, &is));
284     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
285     PetscCall(ISDestroy(&is));
286     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
287     PetscCall(PetscMalloc1(dc + oc, &aux));
288     for (i = 0; i < dc; i++) aux[i] = i + stc;
289     for (i = 0; i < oc; i++) aux[i + dc] = col_map_offd[i];
290     PetscCall(ISCreateGeneral(comm, dc + oc, aux, PETSC_OWN_POINTER, &is));
291     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
292     PetscCall(ISDestroy(&is));
293     /* create MATIS object */
294     PetscCall(MatCreate(comm, B));
295     PetscCall(MatSetSizes(*B, dr, dc, M, N));
296     PetscCall(MatSetType(*B, MATIS));
297     PetscCall(MatSetLocalToGlobalMapping(*B, rl2g, cl2g));
298     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
299     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
300 
301     /* allocate CSR for local matrix */
302     PetscCall(PetscMalloc1(dr + 1, &iptr));
303     PetscCall(PetscMalloc1(nnz, &jptr));
304     PetscCall(PetscMalloc1(nnz, &data));
305   } else {
306     PetscInt  nr;
307     PetscBool done;
308     PetscCall(MatISGetLocalMat(*B, &lA));
309     PetscCall(MatGetRowIJ(lA, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&iptr, (const PetscInt **)&jptr, &done));
310     PetscCheck(nr == dr, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of rows in local mat! %" PetscInt_FMT " != %" PetscInt_FMT, nr, dr);
311     PetscCheck(iptr[nr] >= nnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros in local mat! reuse %" PetscInt_FMT " requested %" PetscInt_FMT, iptr[nr], nnz);
312     PetscCall(MatSeqAIJGetArray(lA, &data));
313   }
314   /* merge local matrices */
315   ii  = iptr;
316   jj  = jptr;
317   aa  = (HYPRE_Complex *)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
318   *ii = *(hdi++) + *(hoi++);
319   for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) {
320     PetscScalar *aold = (PetscScalar *)aa;
321     PetscInt    *jold = jj, nc = jd + jo;
322     for (; jd < *hdi; jd++) {
323       *jj++ = *hdj++;
324       *aa++ = *hdd++;
325     }
326     for (; jo < *hoi; jo++) {
327       *jj++ = *hoj++ + dc;
328       *aa++ = *hod++;
329     }
330     *(++ii) = *(hdi++) + *(hoi++);
331     PetscCall(PetscSortIntWithScalarArray(jd + jo - nc, jold, aold));
332   }
333   for (; cum < dr; cum++) *(++ii) = nnz;
334   if (reuse != MAT_REUSE_MATRIX) {
335     Mat_SeqAIJ *a;
336 
337     PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, dc + oc, iptr, jptr, data, &lA));
338     PetscCall(MatISSetLocalMat(*B, lA));
339     /* hack SeqAIJ */
340     a          = (Mat_SeqAIJ *)(lA->data);
341     a->free_a  = PETSC_TRUE;
342     a->free_ij = PETSC_TRUE;
343     PetscCall(MatDestroy(&lA));
344   }
345   PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY));
346   PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY));
347   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, B));
348   PetscFunctionReturn(PETSC_SUCCESS);
349 }
350 
351 /* Move diagonal elements of the diagonal block to the front of their row, as needed by ParCSRMatrix. So damn hacky */
352 static PetscErrorCode EnsureHypreCSR_SeqAIJ(Mat A, hypre_CSRMatrix *hA)
353 {
354   Mat_SeqAIJ   *aij = (Mat_SeqAIJ *)A->data;
355   PetscBool     isseqaij;
356   HYPRE_Int     tmpj, *hj;
357   HYPRE_Complex tmpv, *ha;
358 
359   PetscFunctionBegin;
360   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
361   PetscCheck(isseqaij, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Not for type %s\n", ((PetscObject)A)->type_name);
362   hj = hypre_CSRMatrixJ(hA);
363   ha = hypre_CSRMatrixData(hA);
364 #define swap_with_tmp(a, b, t) \
365   { \
366     t = a; \
367     a = b; \
368     b = t; \
369   }
370   for (PetscInt i = 0; i < A->rmap->n; i++) {
371     if (aij->diag[i] >= aij->i[i] && aij->diag[i] < aij->i[i + 1]) { /* Diagonal element of this row exists in a[] and j[] */
372       swap_with_tmp(ha[aij->i[i]], ha[aij->diag[i]], tmpv);
373       if (hj[aij->i[i]] != i) swap_with_tmp(hj[aij->i[i]], hj[aij->diag[i]], tmpj);
374     }
375   }
376 #undef swap_with_tmp
377   PetscFunctionReturn(PETSC_SUCCESS);
378 }
379 
380 static PetscErrorCode MatHYPRE_IJMatrixCopyValues(Mat A, HYPRE_IJMatrix ij)
381 {
382   HYPRE_Int           type;
383   hypre_ParCSRMatrix *par_matrix;
384   hypre_CSRMatrix    *hdiag, *hoffd;
385   PetscBool           ismpiaij;
386   Mat                 dA = A, oA = NULL;
387   PetscInt            nnz;
388   const PetscScalar  *pa;
389 
390   PetscFunctionBegin;
391   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
392   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
393   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
394   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(A, &dA, &oA, NULL));
395   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
396 
397   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
398   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
399   PetscCall(MatSeqAIJGetArrayRead(dA, &pa));
400   PetscCall(PetscArraycpy(hdiag->data, pa, nnz));
401   PetscCall(MatSeqAIJRestoreArrayRead(dA, &pa));
402   if (oA) {
403     hoffd = hypre_ParCSRMatrixOffd(par_matrix);
404     nnz   = hypre_CSRMatrixNumNonzeros(hoffd);
405     PetscCall(MatSeqAIJGetArrayRead(oA, &pa));
406     PetscCall(PetscArraycpy(hoffd->data, pa, nnz));
407     PetscCall(MatSeqAIJRestoreArrayRead(dA, &pa));
408   }
409   PetscCall(EnsureHypreCSR_SeqAIJ(dA, hdiag));
410   PetscFunctionReturn(PETSC_SUCCESS);
411 }
412 
413 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
414 {
415   MPI_Comm   comm = PetscObjectComm((PetscObject)A);
416   Mat        M    = NULL;
417   Mat        dA = A, oA = NULL;
418   PetscBool  ismpiaij, issbaij, isbaij;
419   Mat_HYPRE *hA;
420 
421   PetscFunctionBegin;
422   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &issbaij, MATSEQSBAIJ, MATMPIBAIJ, ""));
423   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &isbaij, MATSEQBAIJ, MATMPIBAIJ, ""));
424   if (isbaij || issbaij) { /* handle BAIJ and SBAIJ */
425     PetscBool ismpi;
426     MatType   newtype;
427 
428     PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &ismpi, MATMPISBAIJ, MATMPIBAIJ, ""));
429     newtype = ismpi ? MATMPIAIJ : MATSEQAIJ;
430     if (reuse == MAT_REUSE_MATRIX) {
431       PetscCall(MatConvert(*B, newtype, MAT_INPLACE_MATRIX, B));
432       PetscCall(MatConvert(A, newtype, MAT_REUSE_MATRIX, B));
433       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
434     } else if (reuse == MAT_INITIAL_MATRIX) {
435       PetscCall(MatConvert(A, newtype, MAT_INITIAL_MATRIX, B));
436       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
437     } else {
438       PetscCall(MatConvert(A, newtype, MAT_INPLACE_MATRIX, &A));
439       PetscCall(MatConvert(A, MATHYPRE, MAT_INPLACE_MATRIX, &A));
440     }
441     PetscFunctionReturn(PETSC_SUCCESS);
442   }
443   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
444   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(A, &dA, &oA, NULL));
445   if (reuse != MAT_REUSE_MATRIX) {
446     PetscCall(MatCreate(comm, &M));
447     PetscCall(MatSetType(M, MATHYPRE));
448     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
449     PetscCall(MatSetOption(M, MAT_SORTED_FULL, PETSC_TRUE));
450     PetscCall(MatSetOption(M, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
451     PetscCall(PetscLayoutSetUp(M->rmap));
452     PetscCall(PetscLayoutSetUp(M->cmap));
453 
454     hA = (Mat_HYPRE *)M->data;
455     PetscCall(MatHYPRE_CreateFromMat(A, hA));      /* Create hmat->ij and preallocate it */
456     PetscCall(MatHYPRE_IJMatrixCopyIJ(A, hA->ij)); /* Copy A's (a,i,j) to hmat->ij */
457     M->preallocated = PETSC_TRUE;
458     if (reuse == MAT_INITIAL_MATRIX) *B = M;
459   } else M = *B;
460 
461   hA = (Mat_HYPRE *)M->data;
462   PetscCall(MatHYPRE_IJMatrixCopyValues(A, hA->ij));
463   PetscCall(MatAssemblyBegin(M, MAT_FINAL_ASSEMBLY));
464   PetscCall(MatAssemblyEnd(M, MAT_FINAL_ASSEMBLY)); /* Migrate mat to device if it is bound to. Hypre builds its own SpMV context here */
465 
466   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
467   PetscFunctionReturn(PETSC_SUCCESS);
468 }
469 
470 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
471 {
472   Mat_HYPRE          *hA = (Mat_HYPRE *)A->data;
473   hypre_ParCSRMatrix *parcsr;
474   hypre_CSRMatrix    *hdiag, *hoffd;
475   MPI_Comm            comm;
476   PetscScalar        *da, *oa, *aptr;
477   PetscInt           *dii, *djj, *oii, *ojj, *iptr;
478   PetscInt            i, dnnz, onnz, m, n;
479   HYPRE_Int           type;
480   PetscMPIInt         size;
481   PetscBool           sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
482   PetscBool           downs = PETSC_TRUE, oowns = PETSC_TRUE;
483 
484   PetscFunctionBegin;
485   comm = PetscObjectComm((PetscObject)A);
486   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
487   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
488   if (reuse == MAT_REUSE_MATRIX) {
489     PetscBool ismpiaij, isseqaij;
490     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATMPIAIJ, &ismpiaij));
491     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATSEQAIJ, &isseqaij));
492     PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Only MATMPIAIJ or MATSEQAIJ are supported");
493   }
494 #if defined(PETSC_HAVE_HYPRE_DEVICE)
495   PetscCheck(HYPRE_MEMORY_DEVICE != hypre_IJMatrixMemoryLocation(hA->ij), comm, PETSC_ERR_SUP, "Not yet implemented");
496 #endif
497   PetscCallMPI(MPI_Comm_size(comm, &size));
498 
499   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
500   hdiag = hypre_ParCSRMatrixDiag(parcsr);
501   hoffd = hypre_ParCSRMatrixOffd(parcsr);
502   m     = hypre_CSRMatrixNumRows(hdiag);
503   n     = hypre_CSRMatrixNumCols(hdiag);
504   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
505   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
506   if (reuse == MAT_INITIAL_MATRIX) {
507     PetscCall(PetscMalloc1(m + 1, &dii));
508     PetscCall(PetscMalloc1(dnnz, &djj));
509     PetscCall(PetscMalloc1(dnnz, &da));
510   } else if (reuse == MAT_REUSE_MATRIX) {
511     PetscInt  nr;
512     PetscBool done;
513     if (size > 1) {
514       Mat_MPIAIJ *b = (Mat_MPIAIJ *)((*B)->data);
515 
516       PetscCall(MatGetRowIJ(b->A, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&dii, (const PetscInt **)&djj, &done));
517       PetscCheck(nr == m, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of local rows in diag part! %" PetscInt_FMT " != %" PetscInt_FMT, nr, m);
518       PetscCheck(dii[nr] >= dnnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros in diag part! reuse %" PetscInt_FMT " hypre %" PetscInt_FMT, dii[nr], dnnz);
519       PetscCall(MatSeqAIJGetArray(b->A, &da));
520     } else {
521       PetscCall(MatGetRowIJ(*B, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&dii, (const PetscInt **)&djj, &done));
522       PetscCheck(nr == m, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of local rows! %" PetscInt_FMT " != %" PetscInt_FMT, nr, m);
523       PetscCheck(dii[nr] >= dnnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros! reuse %" PetscInt_FMT " hypre %" PetscInt_FMT, dii[nr], dnnz);
524       PetscCall(MatSeqAIJGetArray(*B, &da));
525     }
526   } else { /* MAT_INPLACE_MATRIX */
527     downs = (PetscBool)(hypre_CSRMatrixOwnsData(hdiag));
528     if (!sameint || !downs) {
529       PetscCall(PetscMalloc1(m + 1, &dii));
530       PetscCall(PetscMalloc1(dnnz, &djj));
531     } else {
532       dii = (PetscInt *)hypre_CSRMatrixI(hdiag);
533       djj = (PetscInt *)hypre_CSRMatrixJ(hdiag);
534     }
535     if (!downs) {
536       PetscCall(PetscMalloc1(dnnz, &da));
537     } else da = (PetscScalar *)hypre_CSRMatrixData(hdiag);
538   }
539 
540   if (!sameint) {
541     if (reuse != MAT_REUSE_MATRIX) {
542       for (i = 0; i < m + 1; i++) dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]);
543     }
544     for (i = 0; i < dnnz; i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]);
545   } else {
546     if (reuse != MAT_REUSE_MATRIX) PetscCall(PetscArraycpy(dii, hypre_CSRMatrixI(hdiag), m + 1));
547     PetscCall(PetscArraycpy(djj, hypre_CSRMatrixJ(hdiag), dnnz));
548   }
549   PetscCall(PetscArraycpy(da, hypre_CSRMatrixData(hdiag), dnnz));
550   iptr = djj;
551   aptr = da;
552   for (i = 0; i < m; i++) {
553     PetscInt nc = dii[i + 1] - dii[i];
554     PetscCall(PetscSortIntWithScalarArray(nc, iptr, aptr));
555     iptr += nc;
556     aptr += nc;
557   }
558   if (size > 1) {
559     HYPRE_BigInt *coffd;
560     HYPRE_Int    *offdj;
561 
562     if (reuse == MAT_INITIAL_MATRIX) {
563       PetscCall(PetscMalloc1(m + 1, &oii));
564       PetscCall(PetscMalloc1(onnz, &ojj));
565       PetscCall(PetscMalloc1(onnz, &oa));
566     } else if (reuse == MAT_REUSE_MATRIX) {
567       Mat_MPIAIJ *b = (Mat_MPIAIJ *)((*B)->data);
568       PetscInt    nr, hr = hypre_CSRMatrixNumRows(hoffd);
569       PetscBool   done;
570 
571       PetscCall(MatGetRowIJ(b->B, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&oii, (const PetscInt **)&ojj, &done));
572       PetscCheck(nr == hr, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of local rows in offdiag part! %" PetscInt_FMT " != %" PetscInt_FMT, nr, hr);
573       PetscCheck(oii[nr] >= onnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse matrix: different number of nonzeros in off-diagonal part of matrix! reuse %" PetscInt_FMT " hypre %" PetscInt_FMT, oii[nr], onnz);
574       PetscCall(MatSeqAIJGetArray(b->B, &oa));
575     } else { /* MAT_INPLACE_MATRIX */
576       oowns = (PetscBool)(hypre_CSRMatrixOwnsData(hoffd));
577       if (!sameint || !oowns) {
578         PetscCall(PetscMalloc1(m + 1, &oii));
579         PetscCall(PetscMalloc1(onnz, &ojj));
580       } else {
581         oii = (PetscInt *)hypre_CSRMatrixI(hoffd);
582         ojj = (PetscInt *)hypre_CSRMatrixJ(hoffd);
583       }
584       if (!oowns) {
585         PetscCall(PetscMalloc1(onnz, &oa));
586       } else oa = (PetscScalar *)hypre_CSRMatrixData(hoffd);
587     }
588     if (reuse != MAT_REUSE_MATRIX) {
589       if (!sameint) {
590         for (i = 0; i < m + 1; i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]);
591       } else {
592         PetscCall(PetscArraycpy(oii, hypre_CSRMatrixI(hoffd), m + 1));
593       }
594     }
595     PetscCall(PetscArraycpy(oa, hypre_CSRMatrixData(hoffd), onnz));
596 
597     offdj = hypre_CSRMatrixJ(hoffd);
598     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
599     /* we only need the permutation to be computed properly, I don't know if HYPRE
600        messes up with the ordering. Just in case, allocate some memory and free it
601        later */
602     if (reuse == MAT_REUSE_MATRIX) {
603       Mat_MPIAIJ *b = (Mat_MPIAIJ *)((*B)->data);
604       PetscInt    mnz;
605 
606       PetscCall(MatSeqAIJGetMaxRowNonzeros(b->B, &mnz));
607       PetscCall(PetscMalloc1(mnz, &ojj));
608     } else
609       for (i = 0; i < onnz; i++) ojj[i] = coffd[offdj[i]];
610     iptr = ojj;
611     aptr = oa;
612     for (i = 0; i < m; i++) {
613       PetscInt nc = oii[i + 1] - oii[i];
614       if (reuse == MAT_REUSE_MATRIX) {
615         PetscInt j;
616 
617         iptr = ojj;
618         for (j = 0; j < nc; j++) iptr[j] = coffd[offdj[oii[i] + j]];
619       }
620       PetscCall(PetscSortIntWithScalarArray(nc, iptr, aptr));
621       iptr += nc;
622       aptr += nc;
623     }
624     if (reuse == MAT_REUSE_MATRIX) PetscCall(PetscFree(ojj));
625     if (reuse == MAT_INITIAL_MATRIX) {
626       Mat_MPIAIJ *b;
627       Mat_SeqAIJ *d, *o;
628 
629       PetscCall(MatCreateMPIAIJWithSplitArrays(comm, m, n, PETSC_DECIDE, PETSC_DECIDE, dii, djj, da, oii, ojj, oa, B));
630       /* hack MPIAIJ */
631       b          = (Mat_MPIAIJ *)((*B)->data);
632       d          = (Mat_SeqAIJ *)b->A->data;
633       o          = (Mat_SeqAIJ *)b->B->data;
634       d->free_a  = PETSC_TRUE;
635       d->free_ij = PETSC_TRUE;
636       o->free_a  = PETSC_TRUE;
637       o->free_ij = PETSC_TRUE;
638     } else if (reuse == MAT_INPLACE_MATRIX) {
639       Mat T;
640 
641       PetscCall(MatCreateMPIAIJWithSplitArrays(comm, m, n, PETSC_DECIDE, PETSC_DECIDE, dii, djj, da, oii, ojj, oa, &T));
642       if (sameint && downs) { /* ownership of CSR pointers is transferred to PETSc */
643         hypre_CSRMatrixI(hdiag) = NULL;
644         hypre_CSRMatrixJ(hdiag) = NULL;
645       } else { /* Hack MPIAIJ -> free ij but maybe not a */
646         Mat_MPIAIJ *b = (Mat_MPIAIJ *)(T->data);
647         Mat_SeqAIJ *d = (Mat_SeqAIJ *)(b->A->data);
648 
649         d->free_ij = PETSC_TRUE;
650         d->free_a  = downs ? PETSC_FALSE : PETSC_TRUE;
651       }
652       if (sameint && oowns) { /* ownership of CSR pointers is transferred to PETSc */
653         hypre_CSRMatrixI(hoffd) = NULL;
654         hypre_CSRMatrixJ(hoffd) = NULL;
655       } else { /* Hack MPIAIJ -> free ij but maybe not a */
656         Mat_MPIAIJ *b = (Mat_MPIAIJ *)(T->data);
657         Mat_SeqAIJ *o = (Mat_SeqAIJ *)(b->B->data);
658 
659         o->free_ij = PETSC_TRUE;
660         o->free_a  = oowns ? PETSC_FALSE : PETSC_TRUE;
661       }
662       hypre_CSRMatrixData(hdiag) = NULL;
663       hypre_CSRMatrixData(hoffd) = NULL;
664       PetscCall(MatHeaderReplace(A, &T));
665     }
666   } else {
667     oii = NULL;
668     ojj = NULL;
669     oa  = NULL;
670     if (reuse == MAT_INITIAL_MATRIX) {
671       Mat_SeqAIJ *b;
672 
673       PetscCall(MatCreateSeqAIJWithArrays(comm, m, n, dii, djj, da, B));
674       /* hack SeqAIJ */
675       b          = (Mat_SeqAIJ *)((*B)->data);
676       b->free_a  = PETSC_TRUE;
677       b->free_ij = PETSC_TRUE;
678     } else if (reuse == MAT_INPLACE_MATRIX) {
679       Mat T;
680 
681       PetscCall(MatCreateSeqAIJWithArrays(comm, m, n, dii, djj, da, &T));
682       if (sameint && downs) { /* ownership of CSR pointers is transferred to PETSc */
683         hypre_CSRMatrixI(hdiag) = NULL;
684         hypre_CSRMatrixJ(hdiag) = NULL;
685       } else { /* free ij but maybe not a */
686         Mat_SeqAIJ *b = (Mat_SeqAIJ *)(T->data);
687 
688         b->free_ij = PETSC_TRUE;
689         b->free_a  = downs ? PETSC_FALSE : PETSC_TRUE;
690       }
691       hypre_CSRMatrixData(hdiag) = NULL;
692       PetscCall(MatHeaderReplace(A, &T));
693     }
694   }
695 
696   /* we have to use hypre_Tfree to free the HYPRE arrays
697      that PETSc now owns */
698   if (reuse == MAT_INPLACE_MATRIX) {
699     const char *names[6] = {"_hypre_csr_da", "_hypre_csr_oa", "_hypre_csr_dii", "_hypre_csr_djj", "_hypre_csr_oii", "_hypre_csr_ojj"};
700     // clang-format off
701     void *ptrs[6] = {downs ? da : NULL,
702                      oowns ? oa : NULL,
703                      downs && sameint ? dii : NULL,
704                      downs && sameint ? djj : NULL,
705                      oowns && sameint ? oii : NULL,
706                      oowns && sameint ? ojj : NULL};
707     // clang-format on
708     for (i = 0; i < 6; i++) {
709       PetscContainer c;
710 
711       PetscCall(PetscContainerCreate(comm, &c));
712       PetscCall(PetscContainerSetPointer(c, ptrs[i]));
713       PetscCall(PetscContainerSetUserDestroy(c, hypre_array_destroy));
714       PetscCall(PetscObjectCompose((PetscObject)(*B), names[i], (PetscObject)c));
715       PetscCall(PetscContainerDestroy(&c));
716     }
717   }
718   PetscFunctionReturn(PETSC_SUCCESS);
719 }
720 
721 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
722 {
723   hypre_ParCSRMatrix *tA;
724   hypre_CSRMatrix    *hdiag, *hoffd;
725   Mat_SeqAIJ         *diag, *offd;
726   PetscInt           *garray, i, noffd, dnnz, onnz, *row_starts, *col_starts;
727   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
728   PetscBool           ismpiaij, isseqaij;
729   PetscBool           sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
730   HYPRE_Int          *hdi = NULL, *hdj = NULL, *hoi = NULL, *hoj = NULL;
731   PetscInt           *pdi = NULL, *pdj = NULL, *poi = NULL, *poj = NULL;
732 #if defined(PETSC_HAVE_HYPRE_DEVICE)
733   PetscBool iscuda = PETSC_FALSE;
734 #endif
735 
736   PetscFunctionBegin;
737   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
738   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
739   PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Unsupported type %s", ((PetscObject)A)->type_name);
740   PetscHYPREInitialize();
741   if (ismpiaij) {
742     Mat_MPIAIJ *a = (Mat_MPIAIJ *)(A->data);
743 
744     diag = (Mat_SeqAIJ *)a->A->data;
745     offd = (Mat_SeqAIJ *)a->B->data;
746 #if defined(PETSC_HAVE_CUDA) && defined(PETSC_HAVE_HYPRE_DEVICE) && defined(HYPRE_USING_CUDA)
747     PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPIAIJCUSPARSE, &iscuda));
748     if (iscuda && !A->boundtocpu) {
749       sameint = PETSC_TRUE;
750       PetscCall(MatSeqAIJCUSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
751       PetscCall(MatSeqAIJCUSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
752     } else {
753 #else
754     {
755 #endif
756       pdi = diag->i;
757       pdj = diag->j;
758       poi = offd->i;
759       poj = offd->j;
760       if (sameint) {
761         hdi = (HYPRE_Int *)pdi;
762         hdj = (HYPRE_Int *)pdj;
763         hoi = (HYPRE_Int *)poi;
764         hoj = (HYPRE_Int *)poj;
765       }
766     }
767     garray = a->garray;
768     noffd  = a->B->cmap->N;
769     dnnz   = diag->nz;
770     onnz   = offd->nz;
771   } else {
772     diag = (Mat_SeqAIJ *)A->data;
773     offd = NULL;
774 #if defined(PETSC_HAVE_CUDA) && defined(PETSC_HAVE_HYPRE_DEVICE)
775     PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJCUSPARSE, &iscuda));
776     if (iscuda && !A->boundtocpu) {
777       sameint = PETSC_TRUE;
778       PetscCall(MatSeqAIJCUSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
779     } else {
780 #else
781     {
782 #endif
783       pdi = diag->i;
784       pdj = diag->j;
785       if (sameint) {
786         hdi = (HYPRE_Int *)pdi;
787         hdj = (HYPRE_Int *)pdj;
788       }
789     }
790     garray = NULL;
791     noffd  = 0;
792     dnnz   = diag->nz;
793     onnz   = 0;
794   }
795 
796   /* create a temporary ParCSR */
797   if (HYPRE_AssumedPartitionCheck()) {
798     PetscMPIInt myid;
799 
800     PetscCallMPI(MPI_Comm_rank(comm, &myid));
801     row_starts = A->rmap->range + myid;
802     col_starts = A->cmap->range + myid;
803   } else {
804     row_starts = A->rmap->range;
805     col_starts = A->cmap->range;
806   }
807   tA = hypre_ParCSRMatrixCreate(comm, A->rmap->N, A->cmap->N, (HYPRE_BigInt *)row_starts, (HYPRE_BigInt *)col_starts, noffd, dnnz, onnz);
808 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
809   hypre_ParCSRMatrixSetRowStartsOwner(tA, 0);
810   hypre_ParCSRMatrixSetColStartsOwner(tA, 0);
811 #endif
812 
813   /* set diagonal part */
814   hdiag = hypre_ParCSRMatrixDiag(tA);
815   if (!sameint) { /* malloc CSR pointers */
816     PetscCall(PetscMalloc2(A->rmap->n + 1, &hdi, dnnz, &hdj));
817     for (i = 0; i < A->rmap->n + 1; i++) hdi[i] = (HYPRE_Int)(pdi[i]);
818     for (i = 0; i < dnnz; i++) hdj[i] = (HYPRE_Int)(pdj[i]);
819   }
820   hypre_CSRMatrixI(hdiag)           = hdi;
821   hypre_CSRMatrixJ(hdiag)           = hdj;
822   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex *)diag->a;
823   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
824   hypre_CSRMatrixSetRownnz(hdiag);
825   hypre_CSRMatrixSetDataOwner(hdiag, 0);
826 
827   /* set offdiagonal part */
828   hoffd = hypre_ParCSRMatrixOffd(tA);
829   if (offd) {
830     if (!sameint) { /* malloc CSR pointers */
831       PetscCall(PetscMalloc2(A->rmap->n + 1, &hoi, onnz, &hoj));
832       for (i = 0; i < A->rmap->n + 1; i++) hoi[i] = (HYPRE_Int)(poi[i]);
833       for (i = 0; i < onnz; i++) hoj[i] = (HYPRE_Int)(poj[i]);
834     }
835     hypre_CSRMatrixI(hoffd)           = hoi;
836     hypre_CSRMatrixJ(hoffd)           = hoj;
837     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex *)offd->a;
838     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
839     hypre_CSRMatrixSetRownnz(hoffd);
840     hypre_CSRMatrixSetDataOwner(hoffd, 0);
841   }
842 #if defined(PETSC_HAVE_HYPRE_DEVICE)
843   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, iscuda ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST);
844 #else
845   #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
846   PetscCallExternal(hypre_ParCSRMatrixInitialize, tA);
847   #else
848   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, HYPRE_MEMORY_HOST);
849   #endif
850 #endif
851   hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA), HYPRE_MEMORY_HOST);
852   hypre_ParCSRMatrixSetNumNonzeros(tA);
853   hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt *)garray;
854   if (!hypre_ParCSRMatrixCommPkg(tA)) PetscCallExternal(hypre_MatvecCommPkgCreate, tA);
855   *hA = tA;
856   PetscFunctionReturn(PETSC_SUCCESS);
857 }
858 
859 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
860 {
861   hypre_CSRMatrix *hdiag, *hoffd;
862   PetscBool        ismpiaij, sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
863 #if defined(PETSC_HAVE_HYPRE_DEVICE)
864   PetscBool iscuda = PETSC_FALSE;
865 #endif
866 
867   PetscFunctionBegin;
868   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
869 #if defined(PETSC_HAVE_HYPRE_DEVICE)
870   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
871   if (iscuda) sameint = PETSC_TRUE;
872 #endif
873   hdiag = hypre_ParCSRMatrixDiag(*hA);
874   hoffd = hypre_ParCSRMatrixOffd(*hA);
875   /* free temporary memory allocated by PETSc
876      set pointers to NULL before destroying tA */
877   if (!sameint) {
878     HYPRE_Int *hi, *hj;
879 
880     hi = hypre_CSRMatrixI(hdiag);
881     hj = hypre_CSRMatrixJ(hdiag);
882     PetscCall(PetscFree2(hi, hj));
883     if (ismpiaij) {
884       hi = hypre_CSRMatrixI(hoffd);
885       hj = hypre_CSRMatrixJ(hoffd);
886       PetscCall(PetscFree2(hi, hj));
887     }
888   }
889   hypre_CSRMatrixI(hdiag)    = NULL;
890   hypre_CSRMatrixJ(hdiag)    = NULL;
891   hypre_CSRMatrixData(hdiag) = NULL;
892   if (ismpiaij) {
893     hypre_CSRMatrixI(hoffd)    = NULL;
894     hypre_CSRMatrixJ(hoffd)    = NULL;
895     hypre_CSRMatrixData(hoffd) = NULL;
896   }
897   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
898   hypre_ParCSRMatrixDestroy(*hA);
899   *hA = NULL;
900   PetscFunctionReturn(PETSC_SUCCESS);
901 }
902 
903 /* calls RAP from BoomerAMG:
904    the resulting ParCSR will not own the column and row starts
905    It looks like we don't need to have the diagonal entries ordered first */
906 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
907 {
908 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
909   HYPRE_Int P_owns_col_starts, R_owns_row_starts;
910 #endif
911 
912   PetscFunctionBegin;
913 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
914   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
915   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
916 #endif
917   /* can be replaced by version test later */
918 #if defined(PETSC_HAVE_HYPRE_DEVICE)
919   PetscStackPushExternal("hypre_ParCSRMatrixRAP");
920   *hRAP = hypre_ParCSRMatrixRAP(hR, hA, hP);
921   PetscStackPop;
922 #else
923   PetscCallExternal(hypre_BoomerAMGBuildCoarseOperator, hR, hA, hP, hRAP);
924   PetscCallExternal(hypre_ParCSRMatrixSetNumNonzeros, *hRAP);
925 #endif
926   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
927 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
928   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP, 0);
929   hypre_ParCSRMatrixSetColStartsOwner(*hRAP, 0);
930   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP, 1);
931   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR, 1);
932 #endif
933   PetscFunctionReturn(PETSC_SUCCESS);
934 }
935 
936 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat P, Mat C)
937 {
938   Mat                 B;
939   hypre_ParCSRMatrix *hA, *hP, *hPtAP = NULL;
940   Mat_Product        *product = C->product;
941 
942   PetscFunctionBegin;
943   PetscCall(MatAIJGetParCSR_Private(A, &hA));
944   PetscCall(MatAIJGetParCSR_Private(P, &hP));
945   PetscCall(MatHYPRE_ParCSR_RAP(hP, hA, hP, &hPtAP));
946   PetscCall(MatCreateFromParCSR(hPtAP, MATAIJ, PETSC_OWN_POINTER, &B));
947 
948   PetscCall(MatHeaderMerge(C, &B));
949   C->product = product;
950 
951   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
952   PetscCall(MatAIJRestoreParCSR_Private(P, &hP));
953   PetscFunctionReturn(PETSC_SUCCESS);
954 }
955 
956 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat P, PetscReal fill, Mat C)
957 {
958   PetscFunctionBegin;
959   PetscCall(MatSetType(C, MATAIJ));
960   C->ops->ptapnumeric    = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
961   C->ops->productnumeric = MatProductNumeric_PtAP;
962   PetscFunctionReturn(PETSC_SUCCESS);
963 }
964 
965 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A, Mat P, Mat C)
966 {
967   Mat                 B;
968   Mat_HYPRE          *hP;
969   hypre_ParCSRMatrix *hA = NULL, *Pparcsr, *ptapparcsr = NULL;
970   HYPRE_Int           type;
971   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
972   PetscBool           ishypre;
973 
974   PetscFunctionBegin;
975   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
976   PetscCheck(ishypre, comm, PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
977   hP = (Mat_HYPRE *)P->data;
978   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
979   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
980   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);
981 
982   PetscCall(MatAIJGetParCSR_Private(A, &hA));
983   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, hA, Pparcsr, &ptapparcsr));
984   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
985 
986   /* create temporary matrix and merge to C */
987   PetscCall(MatCreateFromParCSR(ptapparcsr, ((PetscObject)C)->type_name, PETSC_OWN_POINTER, &B));
988   PetscCall(MatHeaderMerge(C, &B));
989   PetscFunctionReturn(PETSC_SUCCESS);
990 }
991 
992 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A, Mat P, Mat C)
993 {
994   Mat                 B;
995   hypre_ParCSRMatrix *Aparcsr, *Pparcsr, *ptapparcsr = NULL;
996   Mat_HYPRE          *hA, *hP;
997   PetscBool           ishypre;
998   HYPRE_Int           type;
999 
1000   PetscFunctionBegin;
1001   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1002   PetscCheck(ishypre, PetscObjectComm((PetscObject)P), PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1003   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1004   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1005   hA = (Mat_HYPRE *)A->data;
1006   hP = (Mat_HYPRE *)P->data;
1007   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1008   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1009   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
1010   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1011   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1012   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);
1013   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, Aparcsr, Pparcsr, &ptapparcsr));
1014   PetscCall(MatCreateFromParCSR(ptapparcsr, MATHYPRE, PETSC_OWN_POINTER, &B));
1015   PetscCall(MatHeaderMerge(C, &B));
1016   PetscFunctionReturn(PETSC_SUCCESS);
1017 }
1018 
1019 /* calls hypre_ParMatmul
1020    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
1021    hypre_ParMatrixCreate does not duplicate the communicator
1022    It looks like we don't need to have the diagonal entries ordered first */
1023 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
1024 {
1025   PetscFunctionBegin;
1026   /* can be replaced by version test later */
1027 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1028   PetscStackPushExternal("hypre_ParCSRMatMat");
1029   *hAB = hypre_ParCSRMatMat(hA, hB);
1030 #else
1031   PetscStackPushExternal("hypre_ParMatmul");
1032   *hAB = hypre_ParMatmul(hA, hB);
1033 #endif
1034   PetscStackPop;
1035   PetscFunctionReturn(PETSC_SUCCESS);
1036 }
1037 
1038 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C)
1039 {
1040   Mat                 D;
1041   hypre_ParCSRMatrix *hA, *hB, *hAB = NULL;
1042   Mat_Product        *product = C->product;
1043 
1044   PetscFunctionBegin;
1045   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1046   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1047   PetscCall(MatHYPRE_ParCSR_MatMatMult(hA, hB, &hAB));
1048   PetscCall(MatCreateFromParCSR(hAB, MATAIJ, PETSC_OWN_POINTER, &D));
1049 
1050   PetscCall(MatHeaderMerge(C, &D));
1051   C->product = product;
1052 
1053   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1054   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1055   PetscFunctionReturn(PETSC_SUCCESS);
1056 }
1057 
1058 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat B, PetscReal fill, Mat C)
1059 {
1060   PetscFunctionBegin;
1061   PetscCall(MatSetType(C, MATAIJ));
1062   C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
1063   C->ops->productnumeric = MatProductNumeric_AB;
1064   PetscFunctionReturn(PETSC_SUCCESS);
1065 }
1066 
1067 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A, Mat B, Mat C)
1068 {
1069   Mat                 D;
1070   hypre_ParCSRMatrix *Aparcsr, *Bparcsr, *ABparcsr = NULL;
1071   Mat_HYPRE          *hA, *hB;
1072   PetscBool           ishypre;
1073   HYPRE_Int           type;
1074   Mat_Product        *product;
1075 
1076   PetscFunctionBegin;
1077   PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHYPRE, &ishypre));
1078   PetscCheck(ishypre, PetscObjectComm((PetscObject)B), PETSC_ERR_USER, "B should be of type %s", MATHYPRE);
1079   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1080   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1081   hA = (Mat_HYPRE *)A->data;
1082   hB = (Mat_HYPRE *)B->data;
1083   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1084   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1085   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hB->ij, &type);
1086   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1087   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1088   PetscCallExternal(HYPRE_IJMatrixGetObject, hB->ij, (void **)&Bparcsr);
1089   PetscCall(MatHYPRE_ParCSR_MatMatMult(Aparcsr, Bparcsr, &ABparcsr));
1090   PetscCall(MatCreateFromParCSR(ABparcsr, MATHYPRE, PETSC_OWN_POINTER, &D));
1091 
1092   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
1093   product    = C->product; /* save it from MatHeaderReplace() */
1094   C->product = NULL;
1095   PetscCall(MatHeaderReplace(C, &D));
1096   C->product             = product;
1097   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
1098   C->ops->productnumeric = MatProductNumeric_AB;
1099   PetscFunctionReturn(PETSC_SUCCESS);
1100 }
1101 
1102 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, Mat D)
1103 {
1104   Mat                 E;
1105   hypre_ParCSRMatrix *hA, *hB, *hC, *hABC = NULL;
1106 
1107   PetscFunctionBegin;
1108   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1109   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1110   PetscCall(MatAIJGetParCSR_Private(C, &hC));
1111   PetscCall(MatHYPRE_ParCSR_RAP(hA, hB, hC, &hABC));
1112   PetscCall(MatCreateFromParCSR(hABC, MATAIJ, PETSC_OWN_POINTER, &E));
1113   PetscCall(MatHeaderMerge(D, &E));
1114   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1115   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1116   PetscCall(MatAIJRestoreParCSR_Private(C, &hC));
1117   PetscFunctionReturn(PETSC_SUCCESS);
1118 }
1119 
1120 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, PetscReal fill, Mat D)
1121 {
1122   PetscFunctionBegin;
1123   PetscCall(MatSetType(D, MATAIJ));
1124   PetscFunctionReturn(PETSC_SUCCESS);
1125 }
1126 
1127 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C)
1128 {
1129   PetscFunctionBegin;
1130   C->ops->productnumeric = MatProductNumeric_AB;
1131   PetscFunctionReturn(PETSC_SUCCESS);
1132 }
1133 
1134 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C)
1135 {
1136   Mat_Product *product = C->product;
1137   PetscBool    Ahypre;
1138 
1139   PetscFunctionBegin;
1140   PetscCall(PetscObjectTypeCompare((PetscObject)product->A, MATHYPRE, &Ahypre));
1141   if (Ahypre) { /* A is a Hypre matrix */
1142     PetscCall(MatSetType(C, MATHYPRE));
1143     C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE;
1144     C->ops->matmultnumeric  = MatMatMultNumeric_HYPRE_HYPRE;
1145     PetscFunctionReturn(PETSC_SUCCESS);
1146   }
1147   PetscFunctionReturn(PETSC_SUCCESS);
1148 }
1149 
1150 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C)
1151 {
1152   PetscFunctionBegin;
1153   C->ops->productnumeric = MatProductNumeric_PtAP;
1154   PetscFunctionReturn(PETSC_SUCCESS);
1155 }
1156 
1157 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C)
1158 {
1159   Mat_Product *product = C->product;
1160   PetscBool    flg;
1161   PetscInt     type        = 0;
1162   const char  *outTypes[4] = {"aij", "seqaij", "mpiaij", "hypre"};
1163   PetscInt     ntype       = 4;
1164   Mat          A           = product->A;
1165   PetscBool    Ahypre;
1166 
1167   PetscFunctionBegin;
1168   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &Ahypre));
1169   if (Ahypre) { /* A is a Hypre matrix */
1170     PetscCall(MatSetType(C, MATHYPRE));
1171     C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1172     C->ops->ptapnumeric     = MatPtAPNumeric_HYPRE_HYPRE;
1173     PetscFunctionReturn(PETSC_SUCCESS);
1174   }
1175 
1176   /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */
1177   /* Get runtime option */
1178   if (product->api_user) {
1179     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatPtAP_HYPRE", "Mat");
1180     PetscCall(PetscOptionsEList("-matptap_hypre_outtype", "MatPtAP outtype", "MatPtAP outtype", outTypes, ntype, outTypes[type], &type, &flg));
1181     PetscOptionsEnd();
1182   } else {
1183     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatProduct_PtAP_HYPRE", "Mat");
1184     PetscCall(PetscOptionsEList("-mat_product_algorithm_hypre_outtype", "MatProduct_PtAP outtype", "MatProduct_PtAP", outTypes, ntype, outTypes[type], &type, &flg));
1185     PetscOptionsEnd();
1186   }
1187 
1188   if (type == 0 || type == 1 || type == 2) {
1189     PetscCall(MatSetType(C, MATAIJ));
1190   } else if (type == 3) {
1191     PetscCall(MatSetType(C, MATHYPRE));
1192   } else SETERRQ(PetscObjectComm((PetscObject)C), PETSC_ERR_SUP, "MatPtAP outtype is not supported");
1193   C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1194   C->ops->ptapnumeric     = MatPtAPNumeric_AIJ_HYPRE;
1195   PetscFunctionReturn(PETSC_SUCCESS);
1196 }
1197 
1198 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C)
1199 {
1200   Mat_Product *product = C->product;
1201 
1202   PetscFunctionBegin;
1203   switch (product->type) {
1204   case MATPRODUCT_AB:
1205     PetscCall(MatProductSetFromOptions_HYPRE_AB(C));
1206     break;
1207   case MATPRODUCT_PtAP:
1208     PetscCall(MatProductSetFromOptions_HYPRE_PtAP(C));
1209     break;
1210   default:
1211     break;
1212   }
1213   PetscFunctionReturn(PETSC_SUCCESS);
1214 }
1215 
1216 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1217 {
1218   PetscFunctionBegin;
1219   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_TRUE));
1220   PetscFunctionReturn(PETSC_SUCCESS);
1221 }
1222 
1223 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1224 {
1225   PetscFunctionBegin;
1226   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_FALSE));
1227   PetscFunctionReturn(PETSC_SUCCESS);
1228 }
1229 
1230 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1231 {
1232   PetscFunctionBegin;
1233   if (y != z) PetscCall(VecCopy(y, z));
1234   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_FALSE));
1235   PetscFunctionReturn(PETSC_SUCCESS);
1236 }
1237 
1238 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1239 {
1240   PetscFunctionBegin;
1241   if (y != z) PetscCall(VecCopy(y, z));
1242   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_TRUE));
1243   PetscFunctionReturn(PETSC_SUCCESS);
1244 }
1245 
1246 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1247 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1248 {
1249   Mat_HYPRE          *hA = (Mat_HYPRE *)A->data;
1250   hypre_ParCSRMatrix *parcsr;
1251   hypre_ParVector    *hx, *hy;
1252 
1253   PetscFunctionBegin;
1254   if (trans) {
1255     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->b, x));
1256     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->x, y));
1257     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->x, y));
1258     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hx);
1259     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hy);
1260   } else {
1261     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->x, x));
1262     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->b, y));
1263     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->b, y));
1264     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hx);
1265     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hy);
1266   }
1267   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1268   if (trans) {
1269     PetscCallExternal(hypre_ParCSRMatrixMatvecT, a, parcsr, hx, b, hy);
1270   } else {
1271     PetscCallExternal(hypre_ParCSRMatrixMatvec, a, parcsr, hx, b, hy);
1272   }
1273   PetscCall(VecHYPRE_IJVectorPopVec(hA->x));
1274   PetscCall(VecHYPRE_IJVectorPopVec(hA->b));
1275   PetscFunctionReturn(PETSC_SUCCESS);
1276 }
1277 
1278 static PetscErrorCode MatDestroy_HYPRE(Mat A)
1279 {
1280   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1281 
1282   PetscFunctionBegin;
1283   PetscCall(VecHYPRE_IJVectorDestroy(&hA->x));
1284   PetscCall(VecHYPRE_IJVectorDestroy(&hA->b));
1285   if (hA->ij) {
1286     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1287     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
1288   }
1289   if (hA->comm) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)A), &hA->comm));
1290 
1291   PetscCall(MatStashDestroy_Private(&A->stash));
1292   PetscCall(PetscFree(hA->array));
1293 
1294   if (hA->cooMat) {
1295     PetscCall(MatDestroy(&hA->cooMat));
1296     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hA->diagJ, hA->memType));
1297     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hA->offdJ, hA->memType));
1298     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hA->diag, hA->memType));
1299   }
1300 
1301   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_aij_C", NULL));
1302   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_is_C", NULL));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_hypre_C", NULL));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaij_hypre_C", NULL));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPRESetPreallocation_C", NULL));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPREGetParCSR_C", NULL));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL));
1309   PetscCall(PetscFree(A->data));
1310   PetscFunctionReturn(PETSC_SUCCESS);
1311 }
1312 
1313 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1314 {
1315   PetscFunctionBegin;
1316   PetscCall(MatHYPRESetPreallocation(A, PETSC_DEFAULT, NULL, PETSC_DEFAULT, NULL));
1317   PetscFunctionReturn(PETSC_SUCCESS);
1318 }
1319 
1320 //TODO FIX hypre_CSRMatrixMatvecOutOfPlace
1321 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1322 static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind)
1323 {
1324   Mat_HYPRE           *hA   = (Mat_HYPRE *)A->data;
1325   HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE;
1326 
1327   PetscFunctionBegin;
1328   A->boundtocpu = bind;
1329   if (hA->ij && hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) {
1330     hypre_ParCSRMatrix *parcsr;
1331     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1332     PetscCallExternal(hypre_ParCSRMatrixMigrate, parcsr, hmem);
1333   }
1334   if (hA->x) PetscCall(VecHYPRE_IJBindToCPU(hA->x, bind));
1335   if (hA->b) PetscCall(VecHYPRE_IJBindToCPU(hA->b, bind));
1336   PetscFunctionReturn(PETSC_SUCCESS);
1337 }
1338 #endif
1339 
1340 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1341 {
1342   Mat_HYPRE   *hA = (Mat_HYPRE *)A->data;
1343   PetscMPIInt  n;
1344   PetscInt     i, j, rstart, ncols, flg;
1345   PetscInt    *row, *col;
1346   PetscScalar *val;
1347 
1348   PetscFunctionBegin;
1349   PetscCheck(mode != MAT_FLUSH_ASSEMBLY, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1350 
1351   if (!A->nooffprocentries) {
1352     while (1) {
1353       PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
1354       if (!flg) break;
1355 
1356       for (i = 0; i < n;) {
1357         /* Now identify the consecutive vals belonging to the same row */
1358         for (j = i, rstart = row[j]; j < n; j++) {
1359           if (row[j] != rstart) break;
1360         }
1361         if (j < n) ncols = j - i;
1362         else ncols = n - i;
1363         /* Now assemble all these values with a single function call */
1364         PetscCall(MatSetValues_HYPRE(A, 1, row + i, ncols, col + i, val + i, A->insertmode));
1365 
1366         i = j;
1367       }
1368     }
1369     PetscCall(MatStashScatterEnd_Private(&A->stash));
1370   }
1371 
1372   PetscCallExternal(HYPRE_IJMatrixAssemble, hA->ij);
1373   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */
1374   /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */
1375   if (!A->sortedfull) {
1376     hypre_AuxParCSRMatrix *aux_matrix;
1377 
1378     /* call destroy just to make sure we do not leak anything */
1379     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1380     PetscCallExternal(hypre_AuxParCSRMatrixDestroy, aux_matrix);
1381     hypre_IJMatrixTranslator(hA->ij) = NULL;
1382 
1383     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1384     PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1385     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1386     if (aux_matrix) {
1387       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1388 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1389       PetscCallExternal(hypre_AuxParCSRMatrixInitialize, aux_matrix);
1390 #else
1391       PetscCallExternal(hypre_AuxParCSRMatrixInitialize_v2, aux_matrix, HYPRE_MEMORY_HOST);
1392 #endif
1393     }
1394   }
1395   {
1396     hypre_ParCSRMatrix *parcsr;
1397 
1398     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1399     if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscCallExternal(hypre_MatvecCommPkgCreate, parcsr);
1400   }
1401   if (!hA->x) PetscCall(VecHYPRE_IJVectorCreate(A->cmap, &hA->x));
1402   if (!hA->b) PetscCall(VecHYPRE_IJVectorCreate(A->rmap, &hA->b));
1403 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1404   PetscCall(MatBindToCPU_HYPRE(A, A->boundtocpu));
1405 #endif
1406   PetscFunctionReturn(PETSC_SUCCESS);
1407 }
1408 
1409 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1410 {
1411   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1412 
1413   PetscFunctionBegin;
1414   PetscCheck(hA->array_available, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Temporary space is in use");
1415 
1416   if (hA->array_size >= size) {
1417     *array = hA->array;
1418   } else {
1419     PetscCall(PetscFree(hA->array));
1420     hA->array_size = size;
1421     PetscCall(PetscMalloc(hA->array_size, &hA->array));
1422     *array = hA->array;
1423   }
1424 
1425   hA->array_available = PETSC_FALSE;
1426   PetscFunctionReturn(PETSC_SUCCESS);
1427 }
1428 
1429 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1430 {
1431   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1432 
1433   PetscFunctionBegin;
1434   *array              = NULL;
1435   hA->array_available = PETSC_TRUE;
1436   PetscFunctionReturn(PETSC_SUCCESS);
1437 }
1438 
1439 static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1440 {
1441   Mat_HYPRE     *hA   = (Mat_HYPRE *)A->data;
1442   PetscScalar   *vals = (PetscScalar *)v;
1443   HYPRE_Complex *sscr;
1444   PetscInt      *cscr[2];
1445   PetscInt       i, nzc;
1446   PetscInt       rst = A->rmap->rstart, ren = A->rmap->rend;
1447   void          *array = NULL;
1448 
1449   PetscFunctionBegin;
1450   PetscCall(MatGetArray_HYPRE(A, sizeof(PetscInt) * (2 * nc) + sizeof(HYPRE_Complex) * nc * nr, &array));
1451   cscr[0] = (PetscInt *)array;
1452   cscr[1] = ((PetscInt *)array) + nc;
1453   sscr    = (HYPRE_Complex *)(((PetscInt *)array) + nc * 2);
1454   for (i = 0, nzc = 0; i < nc; i++) {
1455     if (cols[i] >= 0) {
1456       cscr[0][nzc]   = cols[i];
1457       cscr[1][nzc++] = i;
1458     }
1459   }
1460   if (!nzc) {
1461     PetscCall(MatRestoreArray_HYPRE(A, &array));
1462     PetscFunctionReturn(PETSC_SUCCESS);
1463   }
1464 
1465 #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE)
1466   if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) {
1467     hypre_ParCSRMatrix *parcsr;
1468 
1469     PetscCallExternal(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr);
1470     PetscCallExternal(hypre_ParCSRMatrixMigrate,parcsr, HYPRE_MEMORY_HOST);
1471   }
1472 #endif
1473 
1474   if (ins == ADD_VALUES) {
1475     for (i = 0; i < nr; i++) {
1476       if (rows[i] >= 0) {
1477         PetscInt  j;
1478         HYPRE_Int hnc = (HYPRE_Int)nzc;
1479 
1480         if (!nzc) continue;
1481         /* nonlocal values */
1482         if (rows[i] < rst || rows[i] >= ren) {
1483           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1484           if (hA->donotstash) continue;
1485         }
1486         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1487         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1488         PetscCallExternal(HYPRE_IJMatrixAddToValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1489       }
1490       vals += nc;
1491     }
1492   } else { /* INSERT_VALUES */
1493     for (i = 0; i < nr; i++) {
1494       if (rows[i] >= 0) {
1495         PetscInt  j;
1496         HYPRE_Int hnc = (HYPRE_Int)nzc;
1497 
1498         if (!nzc) continue;
1499         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1500         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1501         /* nonlocal values */
1502         if (rows[i] < rst || rows[i] >= ren) {
1503           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1504           if (!hA->donotstash) PetscCall(MatStashValuesRow_Private(&A->stash, rows[i], nzc, cscr[0], (PetscScalar *)sscr, PETSC_FALSE));
1505         }
1506         /* local values */
1507         else
1508           PetscCallExternal(HYPRE_IJMatrixSetValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1509       }
1510       vals += nc;
1511     }
1512   }
1513 
1514   PetscCall(MatRestoreArray_HYPRE(A, &array));
1515   PetscFunctionReturn(PETSC_SUCCESS);
1516 }
1517 
1518 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1519 {
1520   Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
1521   HYPRE_Int  *hdnnz, *honnz;
1522   PetscInt    i, rs, re, cs, ce, bs;
1523   PetscMPIInt size;
1524 
1525   PetscFunctionBegin;
1526   PetscCall(PetscLayoutSetUp(A->rmap));
1527   PetscCall(PetscLayoutSetUp(A->cmap));
1528   rs = A->rmap->rstart;
1529   re = A->rmap->rend;
1530   cs = A->cmap->rstart;
1531   ce = A->cmap->rend;
1532   if (!hA->ij) {
1533     PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rs, re - 1, cs, ce - 1, &hA->ij);
1534     PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
1535   } else {
1536     HYPRE_BigInt hrs, hre, hcs, hce;
1537     PetscCallExternal(HYPRE_IJMatrixGetLocalRange, hA->ij, &hrs, &hre, &hcs, &hce);
1538     PetscCheck(hre - hrs + 1 == re - rs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local rows: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hrs, hre + 1, rs, re);
1539     PetscCheck(hce - hcs + 1 == ce - cs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local cols: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hcs, hce + 1, cs, ce);
1540   }
1541   PetscCall(MatGetBlockSize(A, &bs));
1542   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10 * bs;
1543   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10 * bs;
1544 
1545   if (!dnnz) {
1546     PetscCall(PetscMalloc1(A->rmap->n, &hdnnz));
1547     for (i = 0; i < A->rmap->n; i++) hdnnz[i] = dnz;
1548   } else {
1549     hdnnz = (HYPRE_Int *)dnnz;
1550   }
1551   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1552   if (size > 1) {
1553     hypre_AuxParCSRMatrix *aux_matrix;
1554     if (!onnz) {
1555       PetscCall(PetscMalloc1(A->rmap->n, &honnz));
1556       for (i = 0; i < A->rmap->n; i++) honnz[i] = onz;
1557     } else honnz = (HYPRE_Int *)onnz;
1558     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1559        they assume the user will input the entire row values, properly sorted
1560        In PETSc, we don't make such an assumption and set this flag to 1,
1561        unless the option MAT_SORTED_FULL is set to true.
1562        Also, to avoid possible memory leaks, we destroy and recreate the translator
1563        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1564        the IJ matrix for us */
1565     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1566     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1567     hypre_IJMatrixTranslator(hA->ij) = NULL;
1568     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, hA->ij, hdnnz, honnz);
1569     aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1570     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !A->sortedfull;
1571   } else {
1572     honnz = NULL;
1573     PetscCallExternal(HYPRE_IJMatrixSetRowSizes, hA->ij, hdnnz);
1574   }
1575 
1576   /* reset assembled flag and call the initialize method */
1577   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1578 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1579   PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1580 #else
1581   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, hA->ij, HYPRE_MEMORY_HOST);
1582 #endif
1583   if (!dnnz) PetscCall(PetscFree(hdnnz));
1584   if (!onnz && honnz) PetscCall(PetscFree(honnz));
1585   /* Match AIJ logic */
1586   A->preallocated = PETSC_TRUE;
1587   A->assembled    = PETSC_FALSE;
1588   PetscFunctionReturn(PETSC_SUCCESS);
1589 }
1590 
1591 /*@C
1592    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1593 
1594    Collective
1595 
1596    Input Parameters:
1597 +  A - the matrix
1598 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1599           (same value is used for all local rows)
1600 .  dnnz - array containing the number of nonzeros in the various rows of the
1601           DIAGONAL portion of the local submatrix (possibly different for each row)
1602           or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `d_nz` is used to specify the nonzero structure.
1603           The size of this array is equal to the number of local rows, i.e `m`.
1604           For matrices that will be factored, you must leave room for (and set)
1605           the diagonal entry even if it is zero.
1606 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1607           submatrix (same value is used for all local rows).
1608 -  onnz - array containing the number of nonzeros in the various rows of the
1609           OFF-DIAGONAL portion of the local submatrix (possibly different for
1610           each row) or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `o_nz` is used to specify the nonzero
1611           structure. The size of this array is equal to the number
1612           of local rows, i.e `m`.
1613 
1614    Level: intermediate
1615 
1616    Note:
1617     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, `onz` and `onnz` are ignored.
1618 
1619 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatMPIAIJSetPreallocation()`, `MATHYPRE`, `MATAIJ`
1620 @*/
1621 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1622 {
1623   PetscFunctionBegin;
1624   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1625   PetscValidType(A, 1);
1626   PetscTryMethod(A, "MatHYPRESetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (A, dnz, dnnz, onz, onnz));
1627   PetscFunctionReturn(PETSC_SUCCESS);
1628 }
1629 
1630 /*@C
1631    MatCreateFromParCSR - Creates a `Mat` from a `hypre_ParCSRMatrix`
1632 
1633    Collective
1634 
1635    Input Parameters:
1636 +  parcsr   - the pointer to the `hypre_ParCSRMatrix`
1637 .  mtype    - matrix type to be created. Currently `MATAIJ`, `MATIS` and `MATHYPRE` are supported.
1638 -  copymode - PETSc copying options, see  `PetscCopyMode`
1639 
1640    Output Parameter:
1641 .  A  - the matrix
1642 
1643    Level: intermediate
1644 
1645 .seealso: [](ch_matrices), `Mat`, `MatHYPRE`, `PetscCopyMode`
1646 @*/
1647 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat *A)
1648 {
1649   Mat        T;
1650   Mat_HYPRE *hA;
1651   MPI_Comm   comm;
1652   PetscInt   rstart, rend, cstart, cend, M, N;
1653   PetscBool  isseqaij, isseqaijmkl, ismpiaij, isaij, ishyp, isis;
1654 
1655   PetscFunctionBegin;
1656   comm = hypre_ParCSRMatrixComm(parcsr);
1657   PetscCall(PetscStrcmp(mtype, MATSEQAIJ, &isseqaij));
1658   PetscCall(PetscStrcmp(mtype, MATSEQAIJMKL, &isseqaijmkl));
1659   PetscCall(PetscStrcmp(mtype, MATMPIAIJ, &ismpiaij));
1660   PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
1661   PetscCall(PetscStrcmp(mtype, MATHYPRE, &ishyp));
1662   PetscCall(PetscStrcmp(mtype, MATIS, &isis));
1663   isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1664   /* TODO */
1665   PetscCheck(isaij || ishyp || isis, comm, PETSC_ERR_SUP, "Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s", mtype, MATAIJ, MATSEQAIJ, MATSEQAIJMKL, MATMPIAIJ, MATIS, MATHYPRE);
1666   /* access ParCSRMatrix */
1667   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1668   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1669   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1670   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1671   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1672   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1673 
1674   /* fix for empty local rows/columns */
1675   if (rend < rstart) rend = rstart;
1676   if (cend < cstart) cend = cstart;
1677 
1678   /* PETSc convention */
1679   rend++;
1680   cend++;
1681   rend = PetscMin(rend, M);
1682   cend = PetscMin(cend, N);
1683 
1684   /* create PETSc matrix with MatHYPRE */
1685   PetscCall(MatCreate(comm, &T));
1686   PetscCall(MatSetSizes(T, rend - rstart, cend - cstart, M, N));
1687   PetscCall(MatSetType(T, MATHYPRE));
1688   hA = (Mat_HYPRE *)(T->data);
1689 
1690   /* create HYPRE_IJMatrix */
1691   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij);
1692   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
1693 
1694   // TODO DEV
1695   /* create new ParCSR object if needed */
1696   if (ishyp && copymode == PETSC_COPY_VALUES) {
1697     hypre_ParCSRMatrix *new_parcsr;
1698 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
1699     hypre_CSRMatrix *hdiag, *hoffd, *ndiag, *noffd;
1700 
1701     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
1702     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1703     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1704     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1705     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1706     PetscCall(PetscArraycpy(hypre_CSRMatrixData(ndiag), hypre_CSRMatrixData(hdiag), hypre_CSRMatrixNumNonzeros(hdiag)));
1707     PetscCall(PetscArraycpy(hypre_CSRMatrixData(noffd), hypre_CSRMatrixData(hoffd), hypre_CSRMatrixNumNonzeros(hoffd)));
1708 #else
1709     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 1);
1710 #endif
1711     parcsr   = new_parcsr;
1712     copymode = PETSC_OWN_POINTER;
1713   }
1714 
1715   /* set ParCSR object */
1716   hypre_IJMatrixObject(hA->ij) = parcsr;
1717   T->preallocated              = PETSC_TRUE;
1718 
1719   /* set assembled flag */
1720   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1721 #if 0
1722   PetscCallExternal(HYPRE_IJMatrixInitialize,hA->ij);
1723 #endif
1724   if (ishyp) {
1725     PetscMPIInt myid = 0;
1726 
1727     /* make sure we always have row_starts and col_starts available */
1728     if (HYPRE_AssumedPartitionCheck()) PetscCallMPI(MPI_Comm_rank(comm, &myid));
1729 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1730     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1731       PetscLayout map;
1732 
1733       PetscCall(MatGetLayouts(T, NULL, &map));
1734       PetscCall(PetscLayoutSetUp(map));
1735       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1736     }
1737     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1738       PetscLayout map;
1739 
1740       PetscCall(MatGetLayouts(T, &map, NULL));
1741       PetscCall(PetscLayoutSetUp(map));
1742       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1743     }
1744 #endif
1745     /* prevent from freeing the pointer */
1746     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1747     *A = T;
1748     PetscCall(MatSetOption(*A, MAT_SORTED_FULL, PETSC_TRUE));
1749     PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY));
1750     PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY));
1751   } else if (isaij) {
1752     if (copymode != PETSC_OWN_POINTER) {
1753       /* prevent from freeing the pointer */
1754       hA->inner_free = PETSC_FALSE;
1755       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INITIAL_MATRIX, A));
1756       PetscCall(MatDestroy(&T));
1757     } else { /* AIJ return type with PETSC_OWN_POINTER */
1758       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INPLACE_MATRIX, &T));
1759       *A = T;
1760     }
1761   } else if (isis) {
1762     PetscCall(MatConvert_HYPRE_IS(T, MATIS, MAT_INITIAL_MATRIX, A));
1763     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1764     PetscCall(MatDestroy(&T));
1765   }
1766   PetscFunctionReturn(PETSC_SUCCESS);
1767 }
1768 
1769 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1770 {
1771   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1772   HYPRE_Int  type;
1773 
1774   PetscFunctionBegin;
1775   PetscCheck(hA->ij, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE_IJMatrix not present");
1776   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1777   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1778   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)parcsr);
1779   PetscFunctionReturn(PETSC_SUCCESS);
1780 }
1781 
1782 /*@C
1783    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1784 
1785    Not Collective
1786 
1787    Input Parameter:
1788 .  A  - the `MATHYPRE` object
1789 
1790    Output Parameter:
1791 .  parcsr  - the pointer to the `hypre_ParCSRMatrix`
1792 
1793    Level: intermediate
1794 
1795 .seealso: [](ch_matrices), `Mat`, `MatHYPRE`, `PetscCopyMode`
1796 @*/
1797 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1798 {
1799   PetscFunctionBegin;
1800   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1801   PetscValidType(A, 1);
1802   PetscUseMethod(A, "MatHYPREGetParCSR_C", (Mat, hypre_ParCSRMatrix **), (A, parcsr));
1803   PetscFunctionReturn(PETSC_SUCCESS);
1804 }
1805 
1806 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1807 {
1808   hypre_ParCSRMatrix *parcsr;
1809   hypre_CSRMatrix    *ha;
1810   PetscInt            rst;
1811 
1812   PetscFunctionBegin;
1813   PetscCheck(A->rmap->n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not implemented with non-square diagonal blocks");
1814   PetscCall(MatGetOwnershipRange(A, &rst, NULL));
1815   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1816   if (missing) *missing = PETSC_FALSE;
1817   if (dd) *dd = -1;
1818   ha = hypre_ParCSRMatrixDiag(parcsr);
1819   if (ha) {
1820     PetscInt   size, i;
1821     HYPRE_Int *ii, *jj;
1822 
1823     size = hypre_CSRMatrixNumRows(ha);
1824     ii   = hypre_CSRMatrixI(ha);
1825     jj   = hypre_CSRMatrixJ(ha);
1826     for (i = 0; i < size; i++) {
1827       PetscInt  j;
1828       PetscBool found = PETSC_FALSE;
1829 
1830       for (j = ii[i]; j < ii[i + 1] && !found; j++) found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1831 
1832       if (!found) {
1833         PetscCall(PetscInfo(A, "Matrix is missing local diagonal entry %" PetscInt_FMT "\n", i));
1834         if (missing) *missing = PETSC_TRUE;
1835         if (dd) *dd = i + rst;
1836         PetscFunctionReturn(PETSC_SUCCESS);
1837       }
1838     }
1839     if (!size) {
1840       PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1841       if (missing) *missing = PETSC_TRUE;
1842       if (dd) *dd = rst;
1843     }
1844   } else {
1845     PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1846     if (missing) *missing = PETSC_TRUE;
1847     if (dd) *dd = rst;
1848   }
1849   PetscFunctionReturn(PETSC_SUCCESS);
1850 }
1851 
1852 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1853 {
1854   hypre_ParCSRMatrix *parcsr;
1855 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1856   hypre_CSRMatrix *ha;
1857 #endif
1858   HYPRE_Complex hs;
1859 
1860   PetscFunctionBegin;
1861   PetscCall(PetscHYPREScalarCast(s, &hs));
1862   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1863 #if PETSC_PKG_HYPRE_VERSION_GE(2, 19, 0)
1864   PetscCallExternal(hypre_ParCSRMatrixScale, parcsr, hs);
1865 #else /* diagonal part */
1866   ha = hypre_ParCSRMatrixDiag(parcsr);
1867   if (ha) {
1868     PetscInt       size, i;
1869     HYPRE_Int     *ii;
1870     HYPRE_Complex *a;
1871 
1872     size = hypre_CSRMatrixNumRows(ha);
1873     a    = hypre_CSRMatrixData(ha);
1874     ii   = hypre_CSRMatrixI(ha);
1875     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1876   }
1877   /* offdiagonal part */
1878   ha = hypre_ParCSRMatrixOffd(parcsr);
1879   if (ha) {
1880     PetscInt       size, i;
1881     HYPRE_Int     *ii;
1882     HYPRE_Complex *a;
1883 
1884     size = hypre_CSRMatrixNumRows(ha);
1885     a    = hypre_CSRMatrixData(ha);
1886     ii   = hypre_CSRMatrixI(ha);
1887     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1888   }
1889 #endif
1890   PetscFunctionReturn(PETSC_SUCCESS);
1891 }
1892 
1893 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1894 {
1895   hypre_ParCSRMatrix *parcsr;
1896   HYPRE_Int          *lrows;
1897   PetscInt            rst, ren, i;
1898 
1899   PetscFunctionBegin;
1900   PetscCheck(!x && !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "To be implemented");
1901   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1902   PetscCall(PetscMalloc1(numRows, &lrows));
1903   PetscCall(MatGetOwnershipRange(A, &rst, &ren));
1904   for (i = 0; i < numRows; i++) {
1905     PetscCheck(rows[i] >= rst && rows[i] < ren, PETSC_COMM_SELF, PETSC_ERR_SUP, "Non-local rows not yet supported");
1906     lrows[i] = rows[i] - rst;
1907   }
1908   PetscCallExternal(hypre_ParCSRMatrixEliminateRowsCols, parcsr, numRows, lrows);
1909   PetscCall(PetscFree(lrows));
1910   PetscFunctionReturn(PETSC_SUCCESS);
1911 }
1912 
1913 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1914 {
1915   PetscFunctionBegin;
1916   if (ha) {
1917     HYPRE_Int     *ii, size;
1918     HYPRE_Complex *a;
1919 
1920     size = hypre_CSRMatrixNumRows(ha);
1921     a    = hypre_CSRMatrixData(ha);
1922     ii   = hypre_CSRMatrixI(ha);
1923 
1924     if (a) PetscCall(PetscArrayzero(a, ii[size]));
1925   }
1926   PetscFunctionReturn(PETSC_SUCCESS);
1927 }
1928 
1929 PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1930 {
1931   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1932 
1933   PetscFunctionBegin;
1934   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) {
1935     PetscCallExternal(HYPRE_IJMatrixSetConstantValues, hA->ij, 0.0);
1936   } else {
1937     hypre_ParCSRMatrix *parcsr;
1938 
1939     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1940     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr)));
1941     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr)));
1942   }
1943   PetscFunctionReturn(PETSC_SUCCESS);
1944 }
1945 
1946 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA, PetscInt N, const PetscInt rows[], HYPRE_Complex diag)
1947 {
1948   PetscInt       ii;
1949   HYPRE_Int     *i, *j;
1950   HYPRE_Complex *a;
1951 
1952   PetscFunctionBegin;
1953   if (!hA) PetscFunctionReturn(PETSC_SUCCESS);
1954 
1955   i = hypre_CSRMatrixI(hA);
1956   j = hypre_CSRMatrixJ(hA);
1957   a = hypre_CSRMatrixData(hA);
1958 
1959   for (ii = 0; ii < N; ii++) {
1960     HYPRE_Int jj, ibeg, iend, irow;
1961 
1962     irow = rows[ii];
1963     ibeg = i[irow];
1964     iend = i[irow + 1];
1965     for (jj = ibeg; jj < iend; jj++)
1966       if (j[jj] == irow) a[jj] = diag;
1967       else a[jj] = 0.0;
1968   }
1969   PetscFunctionReturn(PETSC_SUCCESS);
1970 }
1971 
1972 static PetscErrorCode MatZeroRows_HYPRE(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1973 {
1974   hypre_ParCSRMatrix *parcsr;
1975   PetscInt           *lrows, len;
1976   HYPRE_Complex       hdiag;
1977 
1978   PetscFunctionBegin;
1979   PetscCheck(!x && !b, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
1980   PetscCall(PetscHYPREScalarCast(diag, &hdiag));
1981   /* retrieve the internal matrix */
1982   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1983   /* get locally owned rows */
1984   PetscCall(MatZeroRowsMapLocal_Private(A, N, rows, &len, &lrows));
1985   /* zero diagonal part */
1986   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr), len, lrows, hdiag));
1987   /* zero off-diagonal part */
1988   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr), len, lrows, 0.0));
1989 
1990   PetscCall(PetscFree(lrows));
1991   PetscFunctionReturn(PETSC_SUCCESS);
1992 }
1993 
1994 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat, MatAssemblyType mode)
1995 {
1996   PetscFunctionBegin;
1997   if (mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);
1998 
1999   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
2000   PetscFunctionReturn(PETSC_SUCCESS);
2001 }
2002 
2003 static PetscErrorCode MatGetRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2004 {
2005   hypre_ParCSRMatrix *parcsr;
2006   HYPRE_Int           hnz;
2007 
2008   PetscFunctionBegin;
2009   /* retrieve the internal matrix */
2010   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2011   /* call HYPRE API */
2012   PetscCallExternal(HYPRE_ParCSRMatrixGetRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2013   if (nz) *nz = (PetscInt)hnz;
2014   PetscFunctionReturn(PETSC_SUCCESS);
2015 }
2016 
2017 static PetscErrorCode MatRestoreRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2018 {
2019   hypre_ParCSRMatrix *parcsr;
2020   HYPRE_Int           hnz;
2021 
2022   PetscFunctionBegin;
2023   /* retrieve the internal matrix */
2024   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2025   /* call HYPRE API */
2026   hnz = nz ? (HYPRE_Int)(*nz) : 0;
2027   PetscCallExternal(HYPRE_ParCSRMatrixRestoreRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2028   PetscFunctionReturn(PETSC_SUCCESS);
2029 }
2030 
2031 static PetscErrorCode MatGetValues_HYPRE(Mat A, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2032 {
2033   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2034   PetscInt   i;
2035 
2036   PetscFunctionBegin;
2037   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2038   /* Ignore negative row indices
2039    * And negative column indices should be automatically ignored in hypre
2040    * */
2041   for (i = 0; i < m; i++) {
2042     if (idxm[i] >= 0) {
2043       HYPRE_Int hn = (HYPRE_Int)n;
2044       PetscCallExternal(HYPRE_IJMatrixGetValues, hA->ij, 1, &hn, (HYPRE_BigInt *)&idxm[i], (HYPRE_BigInt *)idxn, (HYPRE_Complex *)(v + i * n));
2045     }
2046   }
2047   PetscFunctionReturn(PETSC_SUCCESS);
2048 }
2049 
2050 static PetscErrorCode MatSetOption_HYPRE(Mat A, MatOption op, PetscBool flg)
2051 {
2052   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2053 
2054   PetscFunctionBegin;
2055   switch (op) {
2056   case MAT_NO_OFF_PROC_ENTRIES:
2057     if (flg) PetscCallExternal(HYPRE_IJMatrixSetMaxOffProcElmts, hA->ij, 0);
2058     break;
2059   case MAT_IGNORE_OFF_PROC_ENTRIES:
2060     hA->donotstash = flg;
2061     break;
2062   default:
2063     break;
2064   }
2065   PetscFunctionReturn(PETSC_SUCCESS);
2066 }
2067 
2068 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
2069 {
2070   PetscViewerFormat format;
2071 
2072   PetscFunctionBegin;
2073   PetscCall(PetscViewerGetFormat(view, &format));
2074   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(PETSC_SUCCESS);
2075   if (format != PETSC_VIEWER_NATIVE) {
2076     Mat                 B;
2077     hypre_ParCSRMatrix *parcsr;
2078     PetscErrorCode (*mview)(Mat, PetscViewer) = NULL;
2079 
2080     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2081     PetscCall(MatCreateFromParCSR(parcsr, MATAIJ, PETSC_USE_POINTER, &B));
2082     PetscCall(MatGetOperation(B, MATOP_VIEW, (void (**)(void)) & mview));
2083     PetscCheck(mview, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing view operation");
2084     PetscCall((*mview)(B, view));
2085     PetscCall(MatDestroy(&B));
2086   } else {
2087     Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
2088     PetscMPIInt size;
2089     PetscBool   isascii;
2090     const char *filename;
2091 
2092     /* HYPRE uses only text files */
2093     PetscCall(PetscObjectTypeCompare((PetscObject)view, PETSCVIEWERASCII, &isascii));
2094     PetscCheck(isascii, PetscObjectComm((PetscObject)view), PETSC_ERR_SUP, "PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII", ((PetscObject)view)->type_name);
2095     PetscCall(PetscViewerFileGetName(view, &filename));
2096     PetscCallExternal(HYPRE_IJMatrixPrint, hA->ij, filename);
2097     PetscCallMPI(MPI_Comm_size(hA->comm, &size));
2098     if (size > 1) {
2099       PetscCall(PetscViewerASCIIPrintf(view, "Matrix files: %s.%05d ... %s.%05d\n", filename, 0, filename, size - 1));
2100     } else {
2101       PetscCall(PetscViewerASCIIPrintf(view, "Matrix file: %s.%05d\n", filename, 0));
2102     }
2103   }
2104   PetscFunctionReturn(PETSC_SUCCESS);
2105 }
2106 
2107 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
2108 {
2109   hypre_ParCSRMatrix *acsr, *bcsr;
2110 
2111   PetscFunctionBegin;
2112   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
2113     PetscCall(MatHYPREGetParCSR_HYPRE(A, &acsr));
2114     PetscCall(MatHYPREGetParCSR_HYPRE(B, &bcsr));
2115     PetscCallExternal(hypre_ParCSRMatrixCopy, acsr, bcsr, 1);
2116     PetscCall(MatSetOption(B, MAT_SORTED_FULL, PETSC_TRUE)); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
2117     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2118     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2119   } else {
2120     PetscCall(MatCopy_Basic(A, B, str));
2121   }
2122   PetscFunctionReturn(PETSC_SUCCESS);
2123 }
2124 
2125 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
2126 {
2127   hypre_ParCSRMatrix *parcsr;
2128   hypre_CSRMatrix    *dmat;
2129   HYPRE_Complex      *a;
2130   HYPRE_Complex      *data = NULL;
2131   HYPRE_Int          *diag = NULL;
2132   PetscInt            i;
2133   PetscBool           cong;
2134 
2135   PetscFunctionBegin;
2136   PetscCall(MatHasCongruentLayouts(A, &cong));
2137   PetscCheck(cong, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only for square matrices with same local distributions of rows and columns");
2138   if (PetscDefined(USE_DEBUG)) {
2139     PetscBool miss;
2140     PetscCall(MatMissingDiagonal(A, &miss, NULL));
2141     PetscCheck(!miss || !A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not implemented when diagonal entries are missing");
2142   }
2143   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2144   dmat = hypre_ParCSRMatrixDiag(parcsr);
2145   if (dmat) {
2146     /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
2147     PetscCall(VecGetArray(d, (PetscScalar **)&a));
2148     diag = hypre_CSRMatrixI(dmat);
2149     data = hypre_CSRMatrixData(dmat);
2150     for (i = 0; i < A->rmap->n; i++) a[i] = data[diag[i]];
2151     PetscCall(VecRestoreArray(d, (PetscScalar **)&a));
2152   }
2153   PetscFunctionReturn(PETSC_SUCCESS);
2154 }
2155 
2156 #include <petscblaslapack.h>
2157 
2158 static PetscErrorCode MatAXPY_HYPRE(Mat Y, PetscScalar a, Mat X, MatStructure str)
2159 {
2160   PetscFunctionBegin;
2161 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2162   {
2163     Mat                 B;
2164     hypre_ParCSRMatrix *x, *y, *z;
2165 
2166     PetscCall(MatHYPREGetParCSR(Y, &y));
2167     PetscCall(MatHYPREGetParCSR(X, &x));
2168     PetscCallExternal(hypre_ParCSRMatrixAdd, 1.0, y, 1.0, x, &z);
2169     PetscCall(MatCreateFromParCSR(z, MATHYPRE, PETSC_OWN_POINTER, &B));
2170     PetscCall(MatHeaderMerge(Y, &B));
2171   }
2172 #else
2173   if (str == SAME_NONZERO_PATTERN) {
2174     hypre_ParCSRMatrix *x, *y;
2175     hypre_CSRMatrix    *xloc, *yloc;
2176     PetscInt            xnnz, ynnz;
2177     HYPRE_Complex      *xarr, *yarr;
2178     PetscBLASInt        one = 1, bnz;
2179 
2180     PetscCall(MatHYPREGetParCSR(Y, &y));
2181     PetscCall(MatHYPREGetParCSR(X, &x));
2182 
2183     /* diagonal block */
2184     xloc = hypre_ParCSRMatrixDiag(x);
2185     yloc = hypre_ParCSRMatrixDiag(y);
2186     xnnz = 0;
2187     ynnz = 0;
2188     xarr = NULL;
2189     yarr = NULL;
2190     if (xloc) {
2191       xarr = hypre_CSRMatrixData(xloc);
2192       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2193     }
2194     if (yloc) {
2195       yarr = hypre_CSRMatrixData(yloc);
2196       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2197     }
2198     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2199     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2200     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2201 
2202     /* off-diagonal block */
2203     xloc = hypre_ParCSRMatrixOffd(x);
2204     yloc = hypre_ParCSRMatrixOffd(y);
2205     xnnz = 0;
2206     ynnz = 0;
2207     xarr = NULL;
2208     yarr = NULL;
2209     if (xloc) {
2210       xarr = hypre_CSRMatrixData(xloc);
2211       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2212     }
2213     if (yloc) {
2214       yarr = hypre_CSRMatrixData(yloc);
2215       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2216     }
2217     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in off-diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2218     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2219     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2220   } else if (str == SUBSET_NONZERO_PATTERN) {
2221     PetscCall(MatAXPY_Basic(Y, a, X, str));
2222   } else {
2223     Mat B;
2224 
2225     PetscCall(MatAXPY_Basic_Preallocate(Y, X, &B));
2226     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
2227     PetscCall(MatHeaderReplace(Y, &B));
2228   }
2229 #endif
2230   PetscFunctionReturn(PETSC_SUCCESS);
2231 }
2232 
2233 /* Attach cooMat to hypre matrix mat. The two matrices will share the same data array */
2234 static PetscErrorCode MatAttachCOOMat_HYPRE(Mat mat, Mat cooMat)
2235 {
2236   Mat_HYPRE           *hmat = (Mat_HYPRE *)mat->data;
2237   hypre_CSRMatrix     *diag, *offd;
2238   hypre_ParCSRMatrix  *parCSR;
2239   HYPRE_MemoryLocation hypreMemtype = HYPRE_MEMORY_HOST;
2240   PetscMemType         petscMemtype;
2241   Mat                  A, B;
2242   PetscScalar         *Aa, *Ba;
2243   PetscMPIInt          size;
2244   MPI_Comm             comm;
2245   PetscLayout          rmap;
2246 
2247   PetscFunctionBegin;
2248   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
2249   PetscCallMPI(MPI_Comm_size(comm, &size));
2250   PetscCall(MatGetLayouts(mat, &rmap, NULL));
2251 
2252   /* Alias cooMat's data array to IJMatrix's */
2253   PetscCallExternal(HYPRE_IJMatrixGetObject, hmat->ij, (void **)&parCSR);
2254   diag = hypre_ParCSRMatrixDiag(parCSR);
2255   offd = hypre_ParCSRMatrixOffd(parCSR);
2256 
2257   hypreMemtype = hypre_CSRMatrixMemoryLocation(diag);
2258   A            = (size == 1) ? cooMat : ((Mat_MPIAIJ *)cooMat->data)->A;
2259   PetscCall(MatSeqAIJGetCSRAndMemType(A, NULL, NULL, &Aa, &petscMemtype));
2260   PetscAssert((PetscMemTypeHost(petscMemtype) && hypreMemtype == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(petscMemtype) && hypreMemtype == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
2261 
2262   hmat->diagJ = hypre_CSRMatrixJ(diag);
2263   PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(diag), hypreMemtype));
2264   hypre_CSRMatrixData(diag)     = (HYPRE_Complex *)Aa;
2265   hypre_CSRMatrixOwnsData(diag) = 0; /* Take ownership of (j,a) away from hypre. As a result, we need to free them on our own */
2266 
2267   if (size > 1) {
2268     B = ((Mat_MPIAIJ *)cooMat->data)->B;
2269     PetscCall(MatSeqAIJGetCSRAndMemType(B, NULL, NULL, &Ba, &petscMemtype));
2270     hmat->offdJ = hypre_CSRMatrixJ(offd);
2271     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(offd), hypreMemtype));
2272     hypre_CSRMatrixData(offd)     = (HYPRE_Complex *)Ba;
2273     hypre_CSRMatrixOwnsData(offd) = 0;
2274   }
2275 
2276   /* Record cooMat for use in MatSetValuesCOO_HYPRE */
2277   hmat->cooMat  = cooMat;
2278   hmat->memType = hypreMemtype;
2279   PetscFunctionReturn(PETSC_SUCCESS);
2280 }
2281 
2282 static PetscErrorCode MatDuplicate_HYPRE(Mat A, MatDuplicateOption op, Mat *B)
2283 {
2284   hypre_ParCSRMatrix *parcsr = NULL;
2285   PetscCopyMode       cpmode;
2286   Mat_HYPRE          *hA;
2287   Mat                 cooMat;
2288 
2289   PetscFunctionBegin;
2290   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2291   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
2292     parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
2293     cpmode = PETSC_OWN_POINTER;
2294   } else {
2295     cpmode = PETSC_COPY_VALUES;
2296   }
2297   PetscCall(MatCreateFromParCSR(parcsr, MATHYPRE, cpmode, B));
2298   hA = (Mat_HYPRE *)A->data;
2299   if (hA->cooMat) {
2300     op = (op == MAT_DO_NOT_COPY_VALUES) ? op : MAT_COPY_VALUES;
2301     /* Cannot simply increase the reference count of hA->cooMat, since B needs to share cooMat's data array */
2302     PetscCall(MatDuplicate(hA->cooMat, op, &cooMat));
2303     PetscCall(PetscObjectSetName((PetscObject)cooMat, "_internal_COO_mat_for_hypre"));
2304     PetscCall(MatAttachCOOMat_HYPRE(*B, cooMat));
2305   }
2306   PetscFunctionReturn(PETSC_SUCCESS);
2307 }
2308 
2309 static PetscErrorCode MatSetPreallocationCOO_HYPRE(Mat mat, PetscCount coo_n, PetscInt coo_i[], PetscInt coo_j[])
2310 {
2311   MPI_Comm    comm;
2312   PetscMPIInt size;
2313   PetscLayout rmap, cmap;
2314   Mat_HYPRE  *hmat;
2315   Mat         cooMat;
2316   MatType     matType = MATAIJ; /* default type of cooMat */
2317 
2318   PetscFunctionBegin;
2319   /* Build an agent matrix cooMat with AIJ format
2320      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
2321    */
2322   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
2323   PetscCallMPI(MPI_Comm_size(comm, &size));
2324   PetscCall(PetscLayoutSetUp(mat->rmap));
2325   PetscCall(PetscLayoutSetUp(mat->cmap));
2326   PetscCall(MatGetLayouts(mat, &rmap, &cmap));
2327 
2328 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2329   if (!mat->boundtocpu) { /* mat will be on device, so will cooMat */
2330   #if defined(HYPRE_USING_HIP)
2331     matType = MATAIJHIPSPARSE;
2332   #elif defined(HYPRE_USING_CUDA)
2333     matType = MATAIJCUSPARSE;
2334   #else
2335     SETERRQ(comm, PETSC_ERR_SUP, "Do not know the HYPRE device");
2336   #endif
2337   }
2338 #endif
2339 
2340   /* Do COO preallocation through cooMat */
2341   hmat = (Mat_HYPRE *)mat->data;
2342   if (hmat->cooMat) {
2343     PetscCall(MatDestroy(&hmat->cooMat));
2344     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hmat->diagJ, hmat->memType));
2345     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hmat->offdJ, hmat->memType));
2346     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hmat->diag, hmat->memType));
2347   }
2348   PetscCall(MatCreate(comm, &cooMat));
2349   PetscCall(MatSetType(cooMat, matType));
2350   PetscCall(MatSetLayouts(cooMat, rmap, cmap));
2351   PetscCall(MatSetOption(cooMat, MAT_IGNORE_OFF_PROC_ENTRIES, hmat->donotstash));
2352   PetscCall(MatSetOption(cooMat, MAT_NO_OFF_PROC_ENTRIES, mat->nooffprocentries));
2353 
2354   /* MatSetPreallocationCOO_SeqAIJ and MatSetPreallocationCOO_MPIAIJ uses this specific
2355      name to automatically put the diagonal entries first */
2356   PetscCall(PetscObjectSetName((PetscObject)cooMat, "_internal_COO_mat_for_hypre"));
2357   PetscCall(MatSetPreallocationCOO(cooMat, coo_n, coo_i, coo_j));
2358   cooMat->assembled = PETSC_TRUE;
2359 
2360   /* Copy the sparsity pattern from cooMat to hypre IJMatrix hmat->ij */
2361   PetscCall(MatSetOption(mat, MAT_SORTED_FULL, PETSC_TRUE));
2362   PetscCall(MatHYPRE_CreateFromMat(cooMat, hmat));      /* Create hmat->ij and preallocate it */
2363   PetscCall(MatHYPRE_IJMatrixCopyIJ(cooMat, hmat->ij)); /* Copy A's (i,j) to hmat->ij */
2364 
2365   mat->preallocated = PETSC_TRUE;
2366   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
2367   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY)); /* Migrate mat to device if it is bound to. Hypre builds its own SpMV context here */
2368 
2369   /* Attach cooMat to mat */
2370   PetscCall(MatAttachCOOMat_HYPRE(mat, cooMat));
2371   PetscFunctionReturn(PETSC_SUCCESS);
2372 }
2373 
2374 static PetscErrorCode MatSetValuesCOO_HYPRE(Mat mat, const PetscScalar v[], InsertMode imode)
2375 {
2376   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;
2377 
2378   PetscFunctionBegin;
2379   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
2380   PetscCall(MatSetValuesCOO(hmat->cooMat, v, imode));
2381   PetscCall(MatViewFromOptions(hmat->cooMat, (PetscObject)mat, "-cooMat_view"));
2382   PetscFunctionReturn(PETSC_SUCCESS);
2383 }
2384 
2385 /*MC
2386    MATHYPRE - "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2387           based on the hypre IJ interface.
2388 
2389    Level: intermediate
2390 
2391 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatHYPRESetPreallocation`
2392 M*/
2393 
2394 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2395 {
2396   Mat_HYPRE *hB;
2397 
2398   PetscFunctionBegin;
2399   PetscCall(PetscNew(&hB));
2400 
2401   hB->inner_free      = PETSC_TRUE;
2402   hB->array_available = PETSC_TRUE;
2403 
2404   B->data = (void *)hB;
2405 
2406   PetscCall(PetscMemzero(B->ops, sizeof(struct _MatOps)));
2407   B->ops->mult                  = MatMult_HYPRE;
2408   B->ops->multtranspose         = MatMultTranspose_HYPRE;
2409   B->ops->multadd               = MatMultAdd_HYPRE;
2410   B->ops->multtransposeadd      = MatMultTransposeAdd_HYPRE;
2411   B->ops->setup                 = MatSetUp_HYPRE;
2412   B->ops->destroy               = MatDestroy_HYPRE;
2413   B->ops->assemblyend           = MatAssemblyEnd_HYPRE;
2414   B->ops->assemblybegin         = MatAssemblyBegin_HYPRE;
2415   B->ops->setvalues             = MatSetValues_HYPRE;
2416   B->ops->missingdiagonal       = MatMissingDiagonal_HYPRE;
2417   B->ops->scale                 = MatScale_HYPRE;
2418   B->ops->zerorowscolumns       = MatZeroRowsColumns_HYPRE;
2419   B->ops->zeroentries           = MatZeroEntries_HYPRE;
2420   B->ops->zerorows              = MatZeroRows_HYPRE;
2421   B->ops->getrow                = MatGetRow_HYPRE;
2422   B->ops->restorerow            = MatRestoreRow_HYPRE;
2423   B->ops->getvalues             = MatGetValues_HYPRE;
2424   B->ops->setoption             = MatSetOption_HYPRE;
2425   B->ops->duplicate             = MatDuplicate_HYPRE;
2426   B->ops->copy                  = MatCopy_HYPRE;
2427   B->ops->view                  = MatView_HYPRE;
2428   B->ops->getdiagonal           = MatGetDiagonal_HYPRE;
2429   B->ops->axpy                  = MatAXPY_HYPRE;
2430   B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE;
2431 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2432   B->ops->bindtocpu = MatBindToCPU_HYPRE;
2433   B->boundtocpu     = PETSC_FALSE;
2434 #endif
2435 
2436   /* build cache for off array entries formed */
2437   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));
2438 
2439   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)B), &hB->comm));
2440   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATHYPRE));
2441   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_aij_C", MatConvert_HYPRE_AIJ));
2442   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_is_C", MatConvert_HYPRE_IS));
2443   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_hypre_C", MatProductSetFromOptions_HYPRE));
2444   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaij_hypre_C", MatProductSetFromOptions_HYPRE));
2445   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPRESetPreallocation_C", MatHYPRESetPreallocation_HYPRE));
2446   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPREGetParCSR_C", MatHYPREGetParCSR_HYPRE));
2447   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_HYPRE));
2448   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetValuesCOO_C", MatSetValuesCOO_HYPRE));
2449 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2450   #if defined(HYPRE_USING_HIP)
2451   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2452   PetscCall(MatSetVecType(B, VECHIP));
2453   #endif
2454   #if defined(HYPRE_USING_CUDA)
2455   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2456   PetscCall(MatSetVecType(B, VECCUDA));
2457   #endif
2458 #endif
2459   PetscHYPREInitialize();
2460   PetscFunctionReturn(PETSC_SUCCESS);
2461 }
2462 
2463 static PetscErrorCode hypre_array_destroy(void *ptr)
2464 {
2465   PetscFunctionBegin;
2466   if (ptr) hypre_TFree(ptr, HYPRE_MEMORY_HOST);
2467   PetscFunctionReturn(PETSC_SUCCESS);
2468 }
2469