xref: /petsc/src/mat/impls/hypre/mhypre.c (revision e91c04dfc8a52dee1965211bb1cc8e5bf775178f)
1 /*
2     Creates hypre ijmatrix from PETSc matrix
3 */
4 
5 #include <petscpkg_version.h>
6 #include <petsc/private/petschypre.h>
7 #include <petscmathypre.h>
8 #include <petsc/private/matimpl.h>
9 #include <petsc/private/deviceimpl.h>
10 #include <../src/mat/impls/hypre/mhypre.h>
11 #include <../src/mat/impls/aij/mpi/mpiaij.h>
12 #include <../src/vec/vec/impls/hypre/vhyp.h>
13 #include <HYPRE.h>
14 #include <HYPRE_utilities.h>
15 #include <_hypre_parcsr_ls.h>
16 #include <_hypre_sstruct_ls.h>
17 
18 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
19   #define hypre_ParCSRMatrixClone(A, B) hypre_ParCSRMatrixCompleteClone(A)
20 #endif
21 
22 static PetscErrorCode MatHYPRE_CreateFromMat(Mat, Mat_HYPRE *);
23 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat, Mat, HYPRE_IJMatrix);
24 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat, HYPRE_IJMatrix);
25 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat, HYPRE_IJMatrix);
26 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat, HYPRE_Complex, Vec, HYPRE_Complex, Vec, PetscBool);
27 static PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode ins);
28 
29 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
30 {
31   PetscInt        i, n_d, n_o;
32   const PetscInt *ia_d, *ia_o;
33   PetscBool       done_d = PETSC_FALSE, done_o = PETSC_FALSE;
34   HYPRE_Int      *nnz_d = NULL, *nnz_o = NULL;
35 
36   PetscFunctionBegin;
37   if (A_d) { /* determine number of nonzero entries in local diagonal part */
38     PetscCall(MatGetRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, &n_d, &ia_d, NULL, &done_d));
39     if (done_d) {
40       PetscCall(PetscMalloc1(n_d, &nnz_d));
41       for (i = 0; i < n_d; i++) nnz_d[i] = ia_d[i + 1] - ia_d[i];
42     }
43     PetscCall(MatRestoreRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, NULL, &ia_d, NULL, &done_d));
44   }
45   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
46     PetscCall(MatGetRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
47     if (done_o) {
48       PetscCall(PetscMalloc1(n_o, &nnz_o));
49       for (i = 0; i < n_o; i++) nnz_o[i] = ia_o[i + 1] - ia_o[i];
50     }
51     PetscCall(MatRestoreRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
52   }
53   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
54     if (!done_o) { /* only diagonal part */
55       PetscCall(PetscCalloc1(n_d, &nnz_o));
56     }
57 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
58     { /* If we don't do this, the columns of the matrix will be all zeros! */
59       hypre_AuxParCSRMatrix *aux_matrix;
60       aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
61       hypre_AuxParCSRMatrixDestroy(aux_matrix);
62       hypre_IJMatrixTranslator(ij) = NULL;
63       PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
64       /* it seems they partially fixed it in 2.19.0 */
65   #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
66       aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
67       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
68   #endif
69     }
70 #else
71     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
72 #endif
73     PetscCall(PetscFree(nnz_d));
74     PetscCall(PetscFree(nnz_o));
75   }
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
80 {
81   PetscInt rstart, rend, cstart, cend;
82 
83   PetscFunctionBegin;
84   PetscCall(PetscLayoutSetUp(A->rmap));
85   PetscCall(PetscLayoutSetUp(A->cmap));
86   rstart = A->rmap->rstart;
87   rend   = A->rmap->rend;
88   cstart = A->cmap->rstart;
89   cend   = A->cmap->rend;
90   PetscHYPREInitialize();
91   if (hA->ij) {
92     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
93     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
94   }
95   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij);
96   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
97   {
98     PetscBool       same;
99     Mat             A_d, A_o;
100     const PetscInt *colmap;
101     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &same));
102     if (same) {
103       PetscCall(MatMPIAIJGetSeqAIJ(A, &A_d, &A_o, &colmap));
104       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
105       PetscFunctionReturn(PETSC_SUCCESS);
106     }
107     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &same));
108     if (same) {
109       PetscCall(MatMPIBAIJGetSeqBAIJ(A, &A_d, &A_o, &colmap));
110       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
111       PetscFunctionReturn(PETSC_SUCCESS);
112     }
113     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &same));
114     if (same) {
115       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
116       PetscFunctionReturn(PETSC_SUCCESS);
117     }
118     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQBAIJ, &same));
119     if (same) {
120       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
121       PetscFunctionReturn(PETSC_SUCCESS);
122     }
123   }
124   PetscFunctionReturn(PETSC_SUCCESS);
125 }
126 
127 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ(Mat A, HYPRE_IJMatrix ij)
128 {
129   PetscBool flg;
130 
131   PetscFunctionBegin;
132 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
133   PetscCallExternal(HYPRE_IJMatrixInitialize, ij);
134 #else
135   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, ij, HYPRE_MEMORY_HOST);
136 #endif
137   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &flg));
138   if (flg) {
139     PetscCall(MatHYPRE_IJMatrixCopyIJ_MPIAIJ(A, ij));
140     PetscFunctionReturn(PETSC_SUCCESS);
141   }
142   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
143   if (flg) {
144     PetscCall(MatHYPRE_IJMatrixCopyIJ_SeqAIJ(A, ij));
145     PetscFunctionReturn(PETSC_SUCCESS);
146   }
147   PetscCheck(PETSC_FALSE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for matrix type %s", ((PetscObject)A)->type_name);
148   PetscFunctionReturn(PETSC_SUCCESS);
149 }
150 
151 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
152 {
153   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ *)A->data;
154   HYPRE_Int              type;
155   hypre_ParCSRMatrix    *par_matrix;
156   hypre_AuxParCSRMatrix *aux_matrix;
157   hypre_CSRMatrix       *hdiag;
158   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
159 
160   PetscFunctionBegin;
161   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
162   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
163   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
164   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
165   /*
166        this is the Hack part where we monkey directly with the hypre datastructures
167   */
168   if (sameint) {
169     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, A->rmap->n + 1));
170     PetscCall(PetscArraycpy(hdiag->j, pdiag->j, pdiag->nz));
171   } else {
172     PetscInt i;
173 
174     for (i = 0; i < A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
175     for (i = 0; i < pdiag->nz; i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
176   }
177 
178   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
179   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
180   PetscFunctionReturn(PETSC_SUCCESS);
181 }
182 
183 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
184 {
185   Mat_MPIAIJ            *pA = (Mat_MPIAIJ *)A->data;
186   Mat_SeqAIJ            *pdiag, *poffd;
187   PetscInt               i, *garray = pA->garray, *jj, cstart, *pjj;
188   HYPRE_Int             *hjj, type;
189   hypre_ParCSRMatrix    *par_matrix;
190   hypre_AuxParCSRMatrix *aux_matrix;
191   hypre_CSRMatrix       *hdiag, *hoffd;
192   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
193 
194   PetscFunctionBegin;
195   pdiag = (Mat_SeqAIJ *)pA->A->data;
196   poffd = (Mat_SeqAIJ *)pA->B->data;
197   /* cstart is only valid for square MPIAIJ laid out in the usual way */
198   PetscCall(MatGetOwnershipRange(A, &cstart, NULL));
199 
200   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
201   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
202   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
203   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
204   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
205 
206   if (sameint) {
207     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, pA->A->rmap->n + 1));
208   } else {
209     for (i = 0; i < pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
210   }
211 
212   hjj = hdiag->j;
213   pjj = pdiag->j;
214 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
215   for (i = 0; i < pdiag->nz; i++) hjj[i] = pjj[i];
216 #else
217   for (i = 0; i < pdiag->nz; i++) hjj[i] = cstart + pjj[i];
218 #endif
219   if (sameint) {
220     PetscCall(PetscArraycpy(hoffd->i, poffd->i, pA->A->rmap->n + 1));
221   } else {
222     for (i = 0; i < pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)poffd->i[i];
223   }
224 
225   jj = (PetscInt *)hoffd->j;
226 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
227   PetscCallExternal(hypre_CSRMatrixBigInitialize, hoffd);
228   jj = (PetscInt *)hoffd->big_j;
229 #endif
230   pjj = poffd->j;
231   for (i = 0; i < poffd->nz; i++) jj[i] = garray[pjj[i]];
232 
233   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
234   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
235   PetscFunctionReturn(PETSC_SUCCESS);
236 }
237 
238 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat *B)
239 {
240   Mat_HYPRE             *mhA = (Mat_HYPRE *)A->data;
241   Mat                    lA;
242   ISLocalToGlobalMapping rl2g, cl2g;
243   IS                     is;
244   hypre_ParCSRMatrix    *hA;
245   hypre_CSRMatrix       *hdiag, *hoffd;
246   MPI_Comm               comm;
247   HYPRE_Complex         *hdd, *hod, *aa;
248   PetscScalar           *data;
249   HYPRE_BigInt          *col_map_offd;
250   HYPRE_Int             *hdi, *hdj, *hoi, *hoj;
251   PetscInt              *ii, *jj, *iptr, *jptr;
252   PetscInt               cum, dr, dc, oc, str, stc, nnz, i, jd, jo, M, N;
253   HYPRE_Int              type;
254   MatType                lmattype   = NULL;
255   PetscBool              freeparcsr = PETSC_FALSE;
256 
257   PetscFunctionBegin;
258   comm = PetscObjectComm((PetscObject)A);
259   PetscCallExternal(HYPRE_IJMatrixGetObjectType, mhA->ij, &type);
260   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
261   PetscCallExternal(HYPRE_IJMatrixGetObject, mhA->ij, (void **)&hA);
262 #if defined(PETSC_HAVE_HYPRE_DEVICE)
263   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(mhA->ij)) {
264     /* Support by copying back on the host and copy to GPU
265        Kind of inefficient, but this is the best we can do now */
266   #if defined(HYPRE_USING_HIP)
267     lmattype = MATSEQAIJHIPSPARSE;
268   #elif defined(HYPRE_USING_CUDA)
269     lmattype = MATSEQAIJCUSPARSE;
270   #endif
271     hA         = hypre_ParCSRMatrixClone_v2(hA, 1, HYPRE_MEMORY_HOST);
272     freeparcsr = PETSC_TRUE;
273   }
274 #endif
275   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
276   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
277   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
278   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
279   hdiag = hypre_ParCSRMatrixDiag(hA);
280   hoffd = hypre_ParCSRMatrixOffd(hA);
281   dr    = hypre_CSRMatrixNumRows(hdiag);
282   dc    = hypre_CSRMatrixNumCols(hdiag);
283   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
284   hdi   = hypre_CSRMatrixI(hdiag);
285   hdj   = hypre_CSRMatrixJ(hdiag);
286   hdd   = hypre_CSRMatrixData(hdiag);
287   oc    = hypre_CSRMatrixNumCols(hoffd);
288   nnz += hypre_CSRMatrixNumNonzeros(hoffd);
289   hoi = hypre_CSRMatrixI(hoffd);
290   hoj = hypre_CSRMatrixJ(hoffd);
291   hod = hypre_CSRMatrixData(hoffd);
292   if (reuse != MAT_REUSE_MATRIX) {
293     PetscInt *aux;
294 
295     /* generate l2g maps for rows and cols */
296     PetscCall(ISCreateStride(comm, dr, str, 1, &is));
297     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
298     PetscCall(ISDestroy(&is));
299     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
300     PetscCall(PetscMalloc1(dc + oc, &aux));
301     for (i = 0; i < dc; i++) aux[i] = i + stc;
302     for (i = 0; i < oc; i++) aux[i + dc] = col_map_offd[i];
303     PetscCall(ISCreateGeneral(comm, dc + oc, aux, PETSC_OWN_POINTER, &is));
304     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
305     PetscCall(ISDestroy(&is));
306     /* create MATIS object */
307     PetscCall(MatCreate(comm, B));
308     PetscCall(MatSetSizes(*B, dr, dc, M, N));
309     PetscCall(MatSetType(*B, MATIS));
310     PetscCall(MatSetLocalToGlobalMapping(*B, rl2g, cl2g));
311     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
312     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
313 
314     /* allocate CSR for local matrix */
315     PetscCall(PetscMalloc1(dr + 1, &iptr));
316     PetscCall(PetscMalloc1(nnz, &jptr));
317     PetscCall(PetscMalloc1(nnz, &data));
318   } else {
319     PetscInt  nr;
320     PetscBool done;
321     PetscCall(MatISGetLocalMat(*B, &lA));
322     PetscCall(MatGetRowIJ(lA, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&iptr, (const PetscInt **)&jptr, &done));
323     PetscCheck(nr == dr, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of rows in local mat! %" PetscInt_FMT " != %" PetscInt_FMT, nr, dr);
324     PetscCheck(iptr[nr] >= nnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros in local mat! reuse %" PetscInt_FMT " requested %" PetscInt_FMT, iptr[nr], nnz);
325     PetscCall(MatSeqAIJGetArrayWrite(lA, &data));
326   }
327   /* merge local matrices */
328   ii  = iptr;
329   jj  = jptr;
330   aa  = (HYPRE_Complex *)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
331   *ii = *(hdi++) + *(hoi++);
332   for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) {
333     PetscScalar *aold = (PetscScalar *)aa;
334     PetscInt    *jold = jj, nc = jd + jo;
335     for (; jd < *hdi; jd++) {
336       *jj++ = *hdj++;
337       *aa++ = *hdd++;
338     }
339     for (; jo < *hoi; jo++) {
340       *jj++ = *hoj++ + dc;
341       *aa++ = *hod++;
342     }
343     *(++ii) = *(hdi++) + *(hoi++);
344     PetscCall(PetscSortIntWithScalarArray(jd + jo - nc, jold, aold));
345   }
346   for (; cum < dr; cum++) *(++ii) = nnz;
347   if (reuse != MAT_REUSE_MATRIX) {
348     Mat_SeqAIJ *a;
349 
350     PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, dc + oc, iptr, jptr, data, &lA));
351     /* hack SeqAIJ */
352     a          = (Mat_SeqAIJ *)lA->data;
353     a->free_a  = PETSC_TRUE;
354     a->free_ij = PETSC_TRUE;
355     if (lmattype) PetscCall(MatConvert(lA, lmattype, MAT_INPLACE_MATRIX, &lA));
356     PetscCall(MatISSetLocalMat(*B, lA));
357     PetscCall(MatDestroy(&lA));
358   } else {
359     PetscCall(MatSeqAIJRestoreArrayWrite(lA, &data));
360   }
361   PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY));
362   PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY));
363   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, B));
364   if (freeparcsr) PetscCallExternal(hypre_ParCSRMatrixDestroy, hA);
365   PetscFunctionReturn(PETSC_SUCCESS);
366 }
367 
368 static PetscErrorCode MatHYPRE_DestroyCOOMat(Mat mat)
369 {
370   Mat_HYPRE *hA = (Mat_HYPRE *)mat->data;
371 
372   PetscFunctionBegin;
373   if (hA->cooMat) { /* If cooMat is present we need to destroy the column indices */
374     PetscCall(MatDestroy(&hA->cooMat));
375     if (hA->cooMatAttached) {
376       hypre_CSRMatrix     *csr;
377       hypre_ParCSRMatrix  *parcsr;
378       HYPRE_MemoryLocation mem;
379 
380       PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
381       csr = hypre_ParCSRMatrixDiag(parcsr);
382       if (csr) {
383         mem = hypre_CSRMatrixMemoryLocation(csr);
384         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
385         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
386       }
387       csr = hypre_ParCSRMatrixOffd(parcsr);
388       if (csr) {
389         mem = hypre_CSRMatrixMemoryLocation(csr);
390         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
391         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
392       }
393     }
394   }
395   hA->cooMatAttached = PETSC_FALSE;
396   PetscFunctionReturn(PETSC_SUCCESS);
397 }
398 
399 static PetscErrorCode MatHYPRE_CreateCOOMat(Mat mat)
400 {
401   MPI_Comm    comm;
402   PetscMPIInt size;
403   PetscLayout rmap, cmap;
404   Mat_HYPRE  *hmat    = (Mat_HYPRE *)mat->data;
405   MatType     matType = MATAIJ; /* default type of cooMat */
406 
407   PetscFunctionBegin;
408   /* Build an agent matrix cooMat with AIJ format
409      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
410    */
411   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
412   PetscCallMPI(MPI_Comm_size(comm, &size));
413   PetscCall(PetscLayoutSetUp(mat->rmap));
414   PetscCall(PetscLayoutSetUp(mat->cmap));
415   PetscCall(MatGetLayouts(mat, &rmap, &cmap));
416 
417 #if defined(PETSC_HAVE_HYPRE_DEVICE)
418   if (!mat->boundtocpu) { /* mat will be on device, so will cooMat */
419   #if defined(HYPRE_USING_HIP)
420     matType = MATAIJHIPSPARSE;
421   #elif defined(HYPRE_USING_CUDA)
422     matType = MATAIJCUSPARSE;
423   #else
424     SETERRQ(comm, PETSC_ERR_SUP, "Do not know the HYPRE device");
425   #endif
426   }
427 #endif
428 
429   /* Do COO preallocation through cooMat */
430   PetscCall(MatHYPRE_DestroyCOOMat(mat));
431   PetscCall(MatCreate(comm, &hmat->cooMat));
432   PetscCall(MatSetType(hmat->cooMat, matType));
433   PetscCall(MatSetLayouts(hmat->cooMat, rmap, cmap));
434 
435   /* allocate local matrices if needed */
436   PetscCall(MatMPIAIJSetPreallocation(hmat->cooMat, 0, NULL, 0, NULL));
437   PetscFunctionReturn(PETSC_SUCCESS);
438 }
439 
440 /* Attach cooMat data array to hypre matrix.
441    When AIJCUPMSPARSE will support raw device pointers and not THRUSTARRAY
442    we should swap the arrays: i.e., attach hypre matrix array to cooMat
443    This is because hypre should be in charge of handling the memory,
444    cooMat is only a way to reuse PETSc COO code.
445    attaching the memory will then be done at MatSetValuesCOO time and it will dynamically
446    support hypre matrix migrating to host.
447 */
448 static PetscErrorCode MatHYPRE_AttachCOOMat(Mat mat)
449 {
450   Mat_HYPRE           *hmat = (Mat_HYPRE *)mat->data;
451   hypre_CSRMatrix     *diag, *offd;
452   hypre_ParCSRMatrix  *parCSR;
453   HYPRE_MemoryLocation hmem = HYPRE_MEMORY_HOST;
454   PetscMemType         pmem;
455   Mat                  A, B;
456   PetscScalar         *a;
457   PetscMPIInt          size;
458   MPI_Comm             comm;
459 
460   PetscFunctionBegin;
461   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
462   if (hmat->cooMatAttached) PetscFunctionReturn(PETSC_SUCCESS);
463   PetscCheck(hmat->cooMat->preallocated, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix is not preallocated");
464   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
465   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
466   PetscCallMPI(MPI_Comm_size(comm, &size));
467 
468   /* Alias cooMat's data array to IJMatrix's */
469   PetscCallExternal(HYPRE_IJMatrixGetObject, hmat->ij, (void **)&parCSR);
470   diag = hypre_ParCSRMatrixDiag(parCSR);
471   offd = hypre_ParCSRMatrixOffd(parCSR);
472 
473   A = (size == 1) ? hmat->cooMat : ((Mat_MPIAIJ *)hmat->cooMat->data)->A;
474   B = (size == 1) ? NULL : ((Mat_MPIAIJ *)hmat->cooMat->data)->B;
475 
476   PetscCall(PetscObjectSetName((PetscObject)A, "_internal_COO_mat_for_hypre"));
477   hmem = hypre_CSRMatrixMemoryLocation(diag);
478   PetscCall(MatSeqAIJGetCSRAndMemType(A, NULL, NULL, &a, &pmem));
479   PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
480   PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(diag), hmem));
481   hypre_CSRMatrixData(diag)     = (HYPRE_Complex *)a;
482   hypre_CSRMatrixOwnsData(diag) = 0; /* Take ownership of (j,a) away from hypre. As a result, we need to free them on our own */
483 
484   if (B) {
485     hmem = hypre_CSRMatrixMemoryLocation(offd);
486     PetscCall(MatSeqAIJGetCSRAndMemType(B, NULL, NULL, &a, &pmem));
487     PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
488     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(offd), hmem));
489     hypre_CSRMatrixData(offd)     = (HYPRE_Complex *)a;
490     hypre_CSRMatrixOwnsData(offd) = 0;
491   }
492   hmat->cooMatAttached = PETSC_TRUE;
493   PetscFunctionReturn(PETSC_SUCCESS);
494 }
495 
496 // Build COO's coordinate list i[], j[] based on CSR's i[], j[] arrays and the number of local rows 'n'
497 static PetscErrorCode CSRtoCOO_Private(PetscInt n, const PetscInt ii[], const PetscInt jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
498 {
499   PetscInt *cooi, *cooj;
500 
501   PetscFunctionBegin;
502   *ncoo = ii[n];
503   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
504   for (PetscInt i = 0; i < n; i++) {
505     for (PetscInt j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
506   }
507   PetscCall(PetscArraycpy(cooj, jj, *ncoo));
508   *coo_i = cooi;
509   *coo_j = cooj;
510   PetscFunctionReturn(PETSC_SUCCESS);
511 }
512 
513 // Similar to CSRtoCOO_Private, but the CSR's i[], j[] are of type HYPRE_Int
514 static PetscErrorCode CSRtoCOO_HYPRE_Int_Private(PetscInt n, const HYPRE_Int ii[], const HYPRE_Int jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
515 {
516   PetscInt *cooi, *cooj;
517 
518   PetscFunctionBegin;
519   *ncoo = ii[n];
520   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
521   for (PetscInt i = 0; i < n; i++) {
522     for (HYPRE_Int j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
523   }
524   for (PetscCount i = 0; i < *ncoo; i++) cooj[i] = jj[i];
525   *coo_i = cooi;
526   *coo_j = cooj;
527   PetscFunctionReturn(PETSC_SUCCESS);
528 }
529 
530 // Build a COO data structure for the seqaij matrix, as if the nonzeros are laid out in the same order as in the CSR
531 static PetscErrorCode MatSeqAIJGetCOO_Private(Mat A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
532 {
533   PetscInt        n;
534   const PetscInt *ii, *jj;
535   PetscBool       done;
536 
537   PetscFunctionBegin;
538   PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
539   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatGetRowIJ");
540   PetscCall(CSRtoCOO_Private(n, ii, jj, ncoo, coo_i, coo_j));
541   PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
542   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatRestoreRowIJ");
543   PetscFunctionReturn(PETSC_SUCCESS);
544 }
545 
546 // Build a COO data structure for the hypreCSRMatrix, as if the nonzeros are laid out in the same order as in the hypreCSRMatrix
547 static PetscErrorCode hypreCSRMatrixGetCOO_Private(hypre_CSRMatrix *A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
548 {
549   PetscInt             n = hypre_CSRMatrixNumRows(A);
550   HYPRE_Int           *ii, *jj;
551   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
552 
553   PetscFunctionBegin;
554 #if defined(PETSC_HAVE_HYPRE_DEVICE)
555   mem = hypre_CSRMatrixMemoryLocation(A);
556   if (mem != HYPRE_MEMORY_HOST) {
557     PetscCount nnz = hypre_CSRMatrixNumNonzeros(A);
558     PetscCall(PetscMalloc2(n + 1, &ii, nnz, &jj));
559     hypre_TMemcpy(ii, hypre_CSRMatrixI(A), HYPRE_Int, n + 1, HYPRE_MEMORY_HOST, mem);
560     hypre_TMemcpy(jj, hypre_CSRMatrixJ(A), HYPRE_Int, nnz, HYPRE_MEMORY_HOST, mem);
561   } else {
562 #else
563   {
564 #endif
565     ii = hypre_CSRMatrixI(A);
566     jj = hypre_CSRMatrixJ(A);
567   }
568   PetscCall(CSRtoCOO_HYPRE_Int_Private(n, ii, jj, ncoo, coo_i, coo_j));
569   if (mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree2(ii, jj));
570   PetscFunctionReturn(PETSC_SUCCESS);
571 }
572 
573 static PetscErrorCode MatSetValuesCOOFromCSRMatrix_Private(Mat A, hypre_CSRMatrix *H)
574 {
575   PetscBool            iscpu = PETSC_TRUE;
576   PetscScalar         *a;
577   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
578 
579   PetscFunctionBegin;
580 #if defined(PETSC_HAVE_HYPRE_DEVICE)
581   mem = hypre_CSRMatrixMemoryLocation(H);
582   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &iscpu));
583 #endif
584   if (iscpu && mem != HYPRE_MEMORY_HOST) {
585     PetscCount nnz = hypre_CSRMatrixNumNonzeros(H);
586     PetscCall(PetscMalloc1(nnz, &a));
587     hypre_TMemcpy(a, hypre_CSRMatrixData(H), PetscScalar, nnz, HYPRE_MEMORY_HOST, mem);
588   } else {
589     a = (PetscScalar *)hypre_CSRMatrixData(H);
590   }
591   PetscCall(MatSetValuesCOO(A, a, INSERT_VALUES));
592   if (iscpu && mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree(a));
593   PetscFunctionReturn(PETSC_SUCCESS);
594 }
595 
596 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
597 {
598   MPI_Comm   comm = PetscObjectComm((PetscObject)A);
599   Mat        M = NULL, dH = NULL, oH = NULL, dA = NULL, oA = NULL;
600   PetscBool  ismpiaij, issbaij, isbaij;
601   Mat_HYPRE *hA;
602 
603   PetscFunctionBegin;
604   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &issbaij, MATSEQSBAIJ, MATMPIBAIJ, ""));
605   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &isbaij, MATSEQBAIJ, MATMPIBAIJ, ""));
606   if (isbaij || issbaij) { /* handle BAIJ and SBAIJ */
607     PetscBool ismpi;
608     MatType   newtype;
609 
610     PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &ismpi, MATMPISBAIJ, MATMPIBAIJ, ""));
611     newtype = ismpi ? MATMPIAIJ : MATSEQAIJ;
612     if (reuse == MAT_REUSE_MATRIX) {
613       PetscCall(MatConvert(*B, newtype, MAT_INPLACE_MATRIX, B));
614       PetscCall(MatConvert(A, newtype, MAT_REUSE_MATRIX, B));
615       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
616     } else if (reuse == MAT_INITIAL_MATRIX) {
617       PetscCall(MatConvert(A, newtype, MAT_INITIAL_MATRIX, B));
618       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
619     } else {
620       PetscCall(MatConvert(A, newtype, MAT_INPLACE_MATRIX, &A));
621       PetscCall(MatConvert(A, MATHYPRE, MAT_INPLACE_MATRIX, &A));
622     }
623     PetscFunctionReturn(PETSC_SUCCESS);
624   }
625 
626   dA = A;
627   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
628   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(A, &dA, &oA, NULL));
629 
630   if (reuse != MAT_REUSE_MATRIX) {
631     PetscCount coo_n;
632     PetscInt  *coo_i, *coo_j;
633 
634     PetscCall(MatCreate(comm, &M));
635     PetscCall(MatSetType(M, MATHYPRE));
636     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
637     PetscCall(MatSetOption(M, MAT_SORTED_FULL, PETSC_TRUE));
638     PetscCall(MatSetOption(M, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
639 
640     hA = (Mat_HYPRE *)M->data;
641     PetscCall(MatHYPRE_CreateFromMat(A, hA));
642     PetscCall(MatHYPRE_IJMatrixCopyIJ(A, hA->ij));
643 
644     PetscCall(MatHYPRE_CreateCOOMat(M));
645 
646     dH = hA->cooMat;
647     PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
648     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));
649 
650     PetscCall(PetscObjectSetName((PetscObject)dH, "_internal_COO_mat_for_hypre"));
651     PetscCall(MatSeqAIJGetCOO_Private(dA, &coo_n, &coo_i, &coo_j));
652     PetscCall(MatSetPreallocationCOO(dH, coo_n, coo_i, coo_j));
653     PetscCall(PetscFree2(coo_i, coo_j));
654     if (oH) {
655       PetscCall(PetscLayoutDestroy(&oH->cmap));
656       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oH), oA->cmap->n, oA->cmap->n, 1, &oH->cmap));
657       PetscCall(MatSeqAIJGetCOO_Private(oA, &coo_n, &coo_i, &coo_j));
658       PetscCall(MatSetPreallocationCOO(oH, coo_n, coo_i, coo_j));
659       PetscCall(PetscFree2(coo_i, coo_j));
660     }
661     hA->cooMat->assembled = PETSC_TRUE;
662 
663     M->preallocated = PETSC_TRUE;
664     PetscCall(MatAssemblyBegin(M, MAT_FINAL_ASSEMBLY));
665     PetscCall(MatAssemblyEnd(M, MAT_FINAL_ASSEMBLY));
666 
667     PetscCall(MatHYPRE_AttachCOOMat(M));
668     if (reuse == MAT_INITIAL_MATRIX) *B = M;
669   } else M = *B;
670 
671   hA = (Mat_HYPRE *)M->data;
672   PetscCheck(hA->cooMat, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
673 
674   dH = hA->cooMat;
675   PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
676   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));
677 
678   PetscScalar *a;
679   PetscCall(MatSeqAIJGetCSRAndMemType(dA, NULL, NULL, &a, NULL));
680   PetscCall(MatSetValuesCOO(dH, a, INSERT_VALUES));
681   if (oH) {
682     PetscCall(MatSeqAIJGetCSRAndMemType(oA, NULL, NULL, &a, NULL));
683     PetscCall(MatSetValuesCOO(oH, a, INSERT_VALUES));
684   }
685 
686   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
687   PetscFunctionReturn(PETSC_SUCCESS);
688 }
689 
690 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
691 {
692   Mat                 M, dA = NULL, oA = NULL;
693   hypre_ParCSRMatrix *parcsr;
694   hypre_CSRMatrix    *dH, *oH;
695   MPI_Comm            comm;
696   PetscBool           ismpiaij, isseqaij;
697 
698   PetscFunctionBegin;
699   comm = PetscObjectComm((PetscObject)A);
700   if (reuse == MAT_REUSE_MATRIX) {
701     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATMPIAIJ, &ismpiaij));
702     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATSEQAIJ, &isseqaij));
703     PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Only MATMPIAIJ or MATSEQAIJ base types are supported");
704   }
705   PetscCall(MatHYPREGetParCSR(A, &parcsr));
706 #if defined(PETSC_HAVE_HYPRE_DEVICE)
707   if (HYPRE_MEMORY_DEVICE == hypre_ParCSRMatrixMemoryLocation(parcsr)) {
708     PetscBool isaij;
709 
710     PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
711     if (isaij) {
712       PetscMPIInt size;
713 
714       PetscCallMPI(MPI_Comm_size(comm, &size));
715   #if defined(HYPRE_USING_HIP)
716       mtype = size > 1 ? MATMPIAIJHIPSPARSE : MATSEQAIJHIPSPARSE;
717   #elif defined(HYPRE_USING_CUDA)
718       mtype = size > 1 ? MATMPIAIJCUSPARSE : MATSEQAIJCUSPARSE;
719   #else
720       mtype = size > 1 ? MATMPIAIJ : MATSEQAIJ;
721   #endif
722     }
723   }
724 #endif
725   dH = hypre_ParCSRMatrixDiag(parcsr);
726   oH = hypre_ParCSRMatrixOffd(parcsr);
727   if (reuse != MAT_REUSE_MATRIX) {
728     PetscCount coo_n;
729     PetscInt  *coo_i, *coo_j;
730 
731     PetscCall(MatCreate(comm, &M));
732     PetscCall(MatSetType(M, mtype));
733     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
734     PetscCall(MatMPIAIJSetPreallocation(M, 0, NULL, 0, NULL));
735 
736     dA = M;
737     PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
738     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));
739 
740     PetscCall(hypreCSRMatrixGetCOO_Private(dH, &coo_n, &coo_i, &coo_j));
741     PetscCall(MatSetPreallocationCOO(dA, coo_n, coo_i, coo_j));
742     PetscCall(PetscFree2(coo_i, coo_j));
743     if (ismpiaij) {
744       HYPRE_Int nc = hypre_CSRMatrixNumCols(oH);
745 
746       PetscCall(PetscLayoutDestroy(&oA->cmap));
747       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oA), nc, nc, 1, &oA->cmap));
748       PetscCall(hypreCSRMatrixGetCOO_Private(oH, &coo_n, &coo_i, &coo_j));
749       PetscCall(MatSetPreallocationCOO(oA, coo_n, coo_i, coo_j));
750       PetscCall(PetscFree2(coo_i, coo_j));
751 
752       /* garray */
753       Mat_MPIAIJ   *aij    = (Mat_MPIAIJ *)M->data;
754       HYPRE_BigInt *harray = hypre_ParCSRMatrixColMapOffd(parcsr);
755       PetscInt     *garray;
756 
757       PetscCall(PetscFree(aij->garray));
758       PetscCall(PetscMalloc1(nc, &garray));
759       for (HYPRE_Int i = 0; i < nc; i++) garray[i] = (PetscInt)harray[i];
760       aij->garray = garray;
761       PetscCall(MatSetUpMultiply_MPIAIJ(M));
762     }
763     if (reuse == MAT_INITIAL_MATRIX) *B = M;
764   } else M = *B;
765 
766   dA = M;
767   PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
768   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));
769   PetscCall(MatSetValuesCOOFromCSRMatrix_Private(dA, dH));
770   if (oA) PetscCall(MatSetValuesCOOFromCSRMatrix_Private(oA, oH));
771   M->assembled = PETSC_TRUE;
772   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
773   PetscFunctionReturn(PETSC_SUCCESS);
774 }
775 
776 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
777 {
778   hypre_ParCSRMatrix *tA;
779   hypre_CSRMatrix    *hdiag, *hoffd;
780   Mat_SeqAIJ         *diag, *offd;
781   PetscInt           *garray, i, noffd, dnnz, onnz, *row_starts, *col_starts;
782   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
783   PetscBool           ismpiaij, isseqaij;
784   PetscBool           sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
785   HYPRE_Int          *hdi = NULL, *hdj = NULL, *hoi = NULL, *hoj = NULL;
786   PetscInt           *pdi = NULL, *pdj = NULL, *poi = NULL, *poj = NULL;
787   PetscBool           iscuda, iship;
788 #if defined(PETSC_HAVE_DEVICE) && defined(PETSC_HAVE_HYPRE_DEVICE)
789   PetscBool boundtocpu = A->boundtocpu;
790 #else
791   PetscBool boundtocpu = PETSC_TRUE;
792 #endif
793 
794   PetscFunctionBegin;
795   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
796   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
797   PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Unsupported type %s", ((PetscObject)A)->type_name);
798   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJHIPSPARSE, MATMPIAIJCUSPARSE, ""));
799   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iship, MATSEQAIJCUSPARSE, MATMPIAIJHIPSPARSE, ""));
800   PetscHYPREInitialize();
801   if (ismpiaij) {
802     Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
803 
804     diag = (Mat_SeqAIJ *)a->A->data;
805     offd = (Mat_SeqAIJ *)a->B->data;
806     if (!boundtocpu && (iscuda || iship)) {
807 #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
808       if (iscuda) {
809         sameint = PETSC_TRUE;
810         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
811         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
812       }
813 #endif
814 #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
815       if (iship) {
816         sameint = PETSC_TRUE;
817         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
818         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
819       }
820 #endif
821     } else {
822       boundtocpu = PETSC_TRUE;
823       pdi        = diag->i;
824       pdj        = diag->j;
825       poi        = offd->i;
826       poj        = offd->j;
827       if (sameint) {
828         hdi = (HYPRE_Int *)pdi;
829         hdj = (HYPRE_Int *)pdj;
830         hoi = (HYPRE_Int *)poi;
831         hoj = (HYPRE_Int *)poj;
832       }
833     }
834     garray = a->garray;
835     noffd  = a->B->cmap->N;
836     dnnz   = diag->nz;
837     onnz   = offd->nz;
838   } else {
839     diag = (Mat_SeqAIJ *)A->data;
840     offd = NULL;
841     if (!boundtocpu && (iscuda || iship)) {
842 #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
843       if (iscuda) {
844         sameint = PETSC_TRUE;
845         PetscCall(MatSeqAIJCUSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
846       }
847 #endif
848 #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
849       if (iship) {
850         sameint = PETSC_TRUE;
851         PetscCall(MatSeqAIJHIPSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
852       }
853 #endif
854     } else {
855       boundtocpu = PETSC_TRUE;
856       pdi        = diag->i;
857       pdj        = diag->j;
858       if (sameint) {
859         hdi = (HYPRE_Int *)pdi;
860         hdj = (HYPRE_Int *)pdj;
861       }
862     }
863     garray = NULL;
864     noffd  = 0;
865     dnnz   = diag->nz;
866     onnz   = 0;
867   }
868 
869   /* create a temporary ParCSR */
870   if (HYPRE_AssumedPartitionCheck()) {
871     PetscMPIInt myid;
872 
873     PetscCallMPI(MPI_Comm_rank(comm, &myid));
874     row_starts = A->rmap->range + myid;
875     col_starts = A->cmap->range + myid;
876   } else {
877     row_starts = A->rmap->range;
878     col_starts = A->cmap->range;
879   }
880   tA = hypre_ParCSRMatrixCreate(comm, A->rmap->N, A->cmap->N, (HYPRE_BigInt *)row_starts, (HYPRE_BigInt *)col_starts, noffd, dnnz, onnz);
881 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
882   hypre_ParCSRMatrixSetRowStartsOwner(tA, 0);
883   hypre_ParCSRMatrixSetColStartsOwner(tA, 0);
884 #endif
885 
886   /* set diagonal part */
887   hdiag = hypre_ParCSRMatrixDiag(tA);
888   if (!sameint) { /* malloc CSR pointers */
889     PetscCall(PetscMalloc2(A->rmap->n + 1, &hdi, dnnz, &hdj));
890     for (i = 0; i < A->rmap->n + 1; i++) hdi[i] = (HYPRE_Int)pdi[i];
891     for (i = 0; i < dnnz; i++) hdj[i] = (HYPRE_Int)pdj[i];
892   }
893   hypre_CSRMatrixI(hdiag)           = hdi;
894   hypre_CSRMatrixJ(hdiag)           = hdj;
895   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex *)diag->a;
896   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
897   hypre_CSRMatrixSetRownnz(hdiag);
898   hypre_CSRMatrixSetDataOwner(hdiag, 0);
899 
900   /* set off-diagonal part */
901   hoffd = hypre_ParCSRMatrixOffd(tA);
902   if (offd) {
903     if (!sameint) { /* malloc CSR pointers */
904       PetscCall(PetscMalloc2(A->rmap->n + 1, &hoi, onnz, &hoj));
905       for (i = 0; i < A->rmap->n + 1; i++) hoi[i] = (HYPRE_Int)poi[i];
906       for (i = 0; i < onnz; i++) hoj[i] = (HYPRE_Int)poj[i];
907     }
908     hypre_CSRMatrixI(hoffd)           = hoi;
909     hypre_CSRMatrixJ(hoffd)           = hoj;
910     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex *)offd->a;
911     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
912     hypre_CSRMatrixSetRownnz(hoffd);
913     hypre_CSRMatrixSetDataOwner(hoffd, 0);
914   }
915 #if defined(PETSC_HAVE_HYPRE_DEVICE)
916   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, !boundtocpu ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST);
917 #else
918   #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
919   PetscCallExternal(hypre_ParCSRMatrixInitialize, tA);
920   #else
921   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, HYPRE_MEMORY_HOST);
922   #endif
923 #endif
924   hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA), HYPRE_MEMORY_HOST);
925   hypre_ParCSRMatrixSetNumNonzeros(tA);
926   hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt *)garray;
927   if (!hypre_ParCSRMatrixCommPkg(tA)) PetscCallExternal(hypre_MatvecCommPkgCreate, tA);
928   *hA = tA;
929   PetscFunctionReturn(PETSC_SUCCESS);
930 }
931 
932 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
933 {
934   hypre_CSRMatrix *hdiag, *hoffd;
935   PetscBool        ismpiaij, sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
936 #if defined(PETSC_HAVE_HYPRE_DEVICE)
937   PetscBool iscuda = PETSC_FALSE;
938 #endif
939 
940   PetscFunctionBegin;
941   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
942 #if defined(PETSC_HAVE_HYPRE_DEVICE)
943   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
944   if (iscuda) sameint = PETSC_TRUE;
945 #endif
946   hdiag = hypre_ParCSRMatrixDiag(*hA);
947   hoffd = hypre_ParCSRMatrixOffd(*hA);
948   /* free temporary memory allocated by PETSc
949      set pointers to NULL before destroying tA */
950   if (!sameint) {
951     HYPRE_Int *hi, *hj;
952 
953     hi = hypre_CSRMatrixI(hdiag);
954     hj = hypre_CSRMatrixJ(hdiag);
955     PetscCall(PetscFree2(hi, hj));
956     if (ismpiaij) {
957       hi = hypre_CSRMatrixI(hoffd);
958       hj = hypre_CSRMatrixJ(hoffd);
959       PetscCall(PetscFree2(hi, hj));
960     }
961   }
962   hypre_CSRMatrixI(hdiag)    = NULL;
963   hypre_CSRMatrixJ(hdiag)    = NULL;
964   hypre_CSRMatrixData(hdiag) = NULL;
965   if (ismpiaij) {
966     hypre_CSRMatrixI(hoffd)    = NULL;
967     hypre_CSRMatrixJ(hoffd)    = NULL;
968     hypre_CSRMatrixData(hoffd) = NULL;
969   }
970   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
971   hypre_ParCSRMatrixDestroy(*hA);
972   *hA = NULL;
973   PetscFunctionReturn(PETSC_SUCCESS);
974 }
975 
976 /* calls RAP from BoomerAMG:
977    the resulting ParCSR will not own the column and row starts
978    It looks like we don't need to have the diagonal entries ordered first */
979 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
980 {
981 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
982   HYPRE_Int P_owns_col_starts, R_owns_row_starts;
983 #endif
984 
985   PetscFunctionBegin;
986 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
987   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
988   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
989 #endif
990   /* can be replaced by version test later */
991 #if defined(PETSC_HAVE_HYPRE_DEVICE)
992   PetscStackPushExternal("hypre_ParCSRMatrixRAP");
993   *hRAP = hypre_ParCSRMatrixRAP(hR, hA, hP);
994   PetscStackPop;
995 #else
996   PetscCallExternal(hypre_BoomerAMGBuildCoarseOperator, hR, hA, hP, hRAP);
997   PetscCallExternal(hypre_ParCSRMatrixSetNumNonzeros, *hRAP);
998 #endif
999   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
1000 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1001   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP, 0);
1002   hypre_ParCSRMatrixSetColStartsOwner(*hRAP, 0);
1003   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP, 1);
1004   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR, 1);
1005 #endif
1006   PetscFunctionReturn(PETSC_SUCCESS);
1007 }
1008 
1009 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat P, Mat C)
1010 {
1011   Mat                 B;
1012   hypre_ParCSRMatrix *hA, *hP, *hPtAP = NULL;
1013   Mat_Product        *product = C->product;
1014 
1015   PetscFunctionBegin;
1016   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1017   PetscCall(MatAIJGetParCSR_Private(P, &hP));
1018   PetscCall(MatHYPRE_ParCSR_RAP(hP, hA, hP, &hPtAP));
1019   PetscCall(MatCreateFromParCSR(hPtAP, MATAIJ, PETSC_OWN_POINTER, &B));
1020 
1021   PetscCall(MatHeaderMerge(C, &B));
1022   C->product = product;
1023 
1024   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1025   PetscCall(MatAIJRestoreParCSR_Private(P, &hP));
1026   PetscFunctionReturn(PETSC_SUCCESS);
1027 }
1028 
1029 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat P, PetscReal fill, Mat C)
1030 {
1031   PetscFunctionBegin;
1032   PetscCall(MatSetType(C, MATAIJ));
1033   C->ops->ptapnumeric    = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
1034   C->ops->productnumeric = MatProductNumeric_PtAP;
1035   PetscFunctionReturn(PETSC_SUCCESS);
1036 }
1037 
1038 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A, Mat P, Mat C)
1039 {
1040   Mat                 B;
1041   Mat_HYPRE          *hP;
1042   hypre_ParCSRMatrix *hA = NULL, *Pparcsr, *ptapparcsr = NULL;
1043   HYPRE_Int           type;
1044   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
1045   PetscBool           ishypre;
1046 
1047   PetscFunctionBegin;
1048   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1049   PetscCheck(ishypre, comm, PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1050   hP = (Mat_HYPRE *)P->data;
1051   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
1052   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1053   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);
1054 
1055   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1056   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, hA, Pparcsr, &ptapparcsr));
1057   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1058 
1059   /* create temporary matrix and merge to C */
1060   PetscCall(MatCreateFromParCSR(ptapparcsr, ((PetscObject)C)->type_name, PETSC_OWN_POINTER, &B));
1061   PetscCall(MatHeaderMerge(C, &B));
1062   PetscFunctionReturn(PETSC_SUCCESS);
1063 }
1064 
1065 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A, Mat P, Mat C)
1066 {
1067   Mat                 B;
1068   hypre_ParCSRMatrix *Aparcsr, *Pparcsr, *ptapparcsr = NULL;
1069   Mat_HYPRE          *hA, *hP;
1070   PetscBool           ishypre;
1071   HYPRE_Int           type;
1072 
1073   PetscFunctionBegin;
1074   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1075   PetscCheck(ishypre, PetscObjectComm((PetscObject)P), PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1076   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1077   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1078   hA = (Mat_HYPRE *)A->data;
1079   hP = (Mat_HYPRE *)P->data;
1080   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1081   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1082   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
1083   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1084   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1085   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);
1086   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, Aparcsr, Pparcsr, &ptapparcsr));
1087   PetscCall(MatCreateFromParCSR(ptapparcsr, MATHYPRE, PETSC_OWN_POINTER, &B));
1088   PetscCall(MatHeaderMerge(C, &B));
1089   PetscFunctionReturn(PETSC_SUCCESS);
1090 }
1091 
1092 /* calls hypre_ParMatmul
1093    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
1094    hypre_ParMatrixCreate does not duplicate the communicator
1095    It looks like we don't need to have the diagonal entries ordered first */
1096 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
1097 {
1098   PetscFunctionBegin;
1099   /* can be replaced by version test later */
1100 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1101   PetscStackPushExternal("hypre_ParCSRMatMat");
1102   *hAB = hypre_ParCSRMatMat(hA, hB);
1103 #else
1104   PetscStackPushExternal("hypre_ParMatmul");
1105   *hAB = hypre_ParMatmul(hA, hB);
1106 #endif
1107   PetscStackPop;
1108   PetscFunctionReturn(PETSC_SUCCESS);
1109 }
1110 
1111 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C)
1112 {
1113   Mat                 D;
1114   hypre_ParCSRMatrix *hA, *hB, *hAB = NULL;
1115   Mat_Product        *product = C->product;
1116 
1117   PetscFunctionBegin;
1118   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1119   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1120   PetscCall(MatHYPRE_ParCSR_MatMatMult(hA, hB, &hAB));
1121   PetscCall(MatCreateFromParCSR(hAB, MATAIJ, PETSC_OWN_POINTER, &D));
1122 
1123   PetscCall(MatHeaderMerge(C, &D));
1124   C->product = product;
1125 
1126   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1127   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1128   PetscFunctionReturn(PETSC_SUCCESS);
1129 }
1130 
1131 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat B, PetscReal fill, Mat C)
1132 {
1133   PetscFunctionBegin;
1134   PetscCall(MatSetType(C, MATAIJ));
1135   C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
1136   C->ops->productnumeric = MatProductNumeric_AB;
1137   PetscFunctionReturn(PETSC_SUCCESS);
1138 }
1139 
1140 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A, Mat B, Mat C)
1141 {
1142   Mat                 D;
1143   hypre_ParCSRMatrix *Aparcsr, *Bparcsr, *ABparcsr = NULL;
1144   Mat_HYPRE          *hA, *hB;
1145   PetscBool           ishypre;
1146   HYPRE_Int           type;
1147   Mat_Product        *product;
1148 
1149   PetscFunctionBegin;
1150   PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHYPRE, &ishypre));
1151   PetscCheck(ishypre, PetscObjectComm((PetscObject)B), PETSC_ERR_USER, "B should be of type %s", MATHYPRE);
1152   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1153   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1154   hA = (Mat_HYPRE *)A->data;
1155   hB = (Mat_HYPRE *)B->data;
1156   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1157   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1158   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hB->ij, &type);
1159   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1160   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1161   PetscCallExternal(HYPRE_IJMatrixGetObject, hB->ij, (void **)&Bparcsr);
1162   PetscCall(MatHYPRE_ParCSR_MatMatMult(Aparcsr, Bparcsr, &ABparcsr));
1163   PetscCall(MatCreateFromParCSR(ABparcsr, MATHYPRE, PETSC_OWN_POINTER, &D));
1164 
1165   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
1166   product    = C->product; /* save it from MatHeaderReplace() */
1167   C->product = NULL;
1168   PetscCall(MatHeaderReplace(C, &D));
1169   C->product             = product;
1170   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
1171   C->ops->productnumeric = MatProductNumeric_AB;
1172   PetscFunctionReturn(PETSC_SUCCESS);
1173 }
1174 
1175 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, Mat D)
1176 {
1177   Mat                 E;
1178   hypre_ParCSRMatrix *hA, *hB, *hC, *hABC = NULL;
1179 
1180   PetscFunctionBegin;
1181   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1182   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1183   PetscCall(MatAIJGetParCSR_Private(C, &hC));
1184   PetscCall(MatHYPRE_ParCSR_RAP(hA, hB, hC, &hABC));
1185   PetscCall(MatCreateFromParCSR(hABC, MATAIJ, PETSC_OWN_POINTER, &E));
1186   PetscCall(MatHeaderMerge(D, &E));
1187   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1188   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1189   PetscCall(MatAIJRestoreParCSR_Private(C, &hC));
1190   PetscFunctionReturn(PETSC_SUCCESS);
1191 }
1192 
1193 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, PetscReal fill, Mat D)
1194 {
1195   PetscFunctionBegin;
1196   PetscCall(MatSetType(D, MATAIJ));
1197   PetscFunctionReturn(PETSC_SUCCESS);
1198 }
1199 
1200 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C)
1201 {
1202   PetscFunctionBegin;
1203   C->ops->productnumeric = MatProductNumeric_AB;
1204   PetscFunctionReturn(PETSC_SUCCESS);
1205 }
1206 
1207 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C)
1208 {
1209   Mat_Product *product = C->product;
1210   PetscBool    Ahypre;
1211 
1212   PetscFunctionBegin;
1213   PetscCall(PetscObjectTypeCompare((PetscObject)product->A, MATHYPRE, &Ahypre));
1214   if (Ahypre) { /* A is a Hypre matrix */
1215     PetscCall(MatSetType(C, MATHYPRE));
1216     C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE;
1217     C->ops->matmultnumeric  = MatMatMultNumeric_HYPRE_HYPRE;
1218     PetscFunctionReturn(PETSC_SUCCESS);
1219   }
1220   PetscFunctionReturn(PETSC_SUCCESS);
1221 }
1222 
1223 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C)
1224 {
1225   PetscFunctionBegin;
1226   C->ops->productnumeric = MatProductNumeric_PtAP;
1227   PetscFunctionReturn(PETSC_SUCCESS);
1228 }
1229 
1230 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C)
1231 {
1232   Mat_Product *product = C->product;
1233   PetscBool    flg;
1234   PetscInt     type        = 0;
1235   const char  *outTypes[4] = {"aij", "seqaij", "mpiaij", "hypre"};
1236   PetscInt     ntype       = 4;
1237   Mat          A           = product->A;
1238   PetscBool    Ahypre;
1239 
1240   PetscFunctionBegin;
1241   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &Ahypre));
1242   if (Ahypre) { /* A is a Hypre matrix */
1243     PetscCall(MatSetType(C, MATHYPRE));
1244     C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1245     C->ops->ptapnumeric     = MatPtAPNumeric_HYPRE_HYPRE;
1246     PetscFunctionReturn(PETSC_SUCCESS);
1247   }
1248 
1249   /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */
1250   /* Get runtime option */
1251   if (product->api_user) {
1252     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatPtAP_HYPRE", "Mat");
1253     PetscCall(PetscOptionsEList("-matptap_hypre_outtype", "MatPtAP outtype", "MatPtAP outtype", outTypes, ntype, outTypes[type], &type, &flg));
1254     PetscOptionsEnd();
1255   } else {
1256     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatProduct_PtAP_HYPRE", "Mat");
1257     PetscCall(PetscOptionsEList("-mat_product_algorithm_hypre_outtype", "MatProduct_PtAP outtype", "MatProduct_PtAP", outTypes, ntype, outTypes[type], &type, &flg));
1258     PetscOptionsEnd();
1259   }
1260 
1261   if (type == 0 || type == 1 || type == 2) {
1262     PetscCall(MatSetType(C, MATAIJ));
1263   } else if (type == 3) {
1264     PetscCall(MatSetType(C, MATHYPRE));
1265   } else SETERRQ(PetscObjectComm((PetscObject)C), PETSC_ERR_SUP, "MatPtAP outtype is not supported");
1266   C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1267   C->ops->ptapnumeric     = MatPtAPNumeric_AIJ_HYPRE;
1268   PetscFunctionReturn(PETSC_SUCCESS);
1269 }
1270 
1271 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C)
1272 {
1273   Mat_Product *product = C->product;
1274 
1275   PetscFunctionBegin;
1276   switch (product->type) {
1277   case MATPRODUCT_AB:
1278     PetscCall(MatProductSetFromOptions_HYPRE_AB(C));
1279     break;
1280   case MATPRODUCT_PtAP:
1281     PetscCall(MatProductSetFromOptions_HYPRE_PtAP(C));
1282     break;
1283   default:
1284     break;
1285   }
1286   PetscFunctionReturn(PETSC_SUCCESS);
1287 }
1288 
1289 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1290 {
1291   PetscFunctionBegin;
1292   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_TRUE));
1293   PetscFunctionReturn(PETSC_SUCCESS);
1294 }
1295 
1296 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1297 {
1298   PetscFunctionBegin;
1299   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_FALSE));
1300   PetscFunctionReturn(PETSC_SUCCESS);
1301 }
1302 
1303 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1304 {
1305   PetscFunctionBegin;
1306   if (y != z) PetscCall(VecCopy(y, z));
1307   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_FALSE));
1308   PetscFunctionReturn(PETSC_SUCCESS);
1309 }
1310 
1311 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1312 {
1313   PetscFunctionBegin;
1314   if (y != z) PetscCall(VecCopy(y, z));
1315   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_TRUE));
1316   PetscFunctionReturn(PETSC_SUCCESS);
1317 }
1318 
1319 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1320 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1321 {
1322   Mat_HYPRE          *hA = (Mat_HYPRE *)A->data;
1323   hypre_ParCSRMatrix *parcsr;
1324   hypre_ParVector    *hx, *hy;
1325 
1326   PetscFunctionBegin;
1327   if (trans) {
1328     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->b, x));
1329     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->x, y));
1330     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->x, y));
1331     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hx);
1332     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hy);
1333   } else {
1334     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->x, x));
1335     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->b, y));
1336     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->b, y));
1337     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hx);
1338     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hy);
1339   }
1340   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1341   if (trans) {
1342     PetscCallExternal(hypre_ParCSRMatrixMatvecT, a, parcsr, hx, b, hy);
1343   } else {
1344     PetscCallExternal(hypre_ParCSRMatrixMatvec, a, parcsr, hx, b, hy);
1345   }
1346   PetscCall(VecHYPRE_IJVectorPopVec(hA->x));
1347   PetscCall(VecHYPRE_IJVectorPopVec(hA->b));
1348   PetscFunctionReturn(PETSC_SUCCESS);
1349 }
1350 
1351 static PetscErrorCode MatDestroy_HYPRE(Mat A)
1352 {
1353   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1354 
1355   PetscFunctionBegin;
1356   PetscCall(VecHYPRE_IJVectorDestroy(&hA->x));
1357   PetscCall(VecHYPRE_IJVectorDestroy(&hA->b));
1358   PetscCall(MatHYPRE_DestroyCOOMat(A)); /* must be called before destroying the individual CSR */
1359   if (hA->ij) {
1360     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1361     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
1362   }
1363   if (hA->comm) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)A), &hA->comm));
1364 
1365   PetscCall(MatStashDestroy_Private(&A->stash));
1366   PetscCall(PetscFree(hA->array));
1367   if (hA->rows_d) PetscStackCallExternalVoid("hypre_Free", hypre_Free(hA->rows_d, HYPRE_MEMORY_DEVICE));
1368 
1369   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_aij_C", NULL));
1370   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_is_C", NULL));
1371   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_hypre_C", NULL));
1372   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaij_hypre_C", NULL));
1373   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", NULL));
1374   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", NULL));
1375   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijcusparse_hypre_C", NULL));
1376   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", NULL));
1377   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPRESetPreallocation_C", NULL));
1378   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPREGetParCSR_C", NULL));
1379   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL));
1380   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL));
1381   PetscCall(PetscFree(A->data));
1382   PetscFunctionReturn(PETSC_SUCCESS);
1383 }
1384 
1385 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1386 {
1387   PetscFunctionBegin;
1388   if (!A->preallocated) PetscCall(MatHYPRESetPreallocation(A, PETSC_DEFAULT, NULL, PETSC_DEFAULT, NULL));
1389   PetscFunctionReturn(PETSC_SUCCESS);
1390 }
1391 
1392 //TODO FIX hypre_CSRMatrixMatvecOutOfPlace
1393 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1394 static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind)
1395 {
1396   Mat_HYPRE           *hA   = (Mat_HYPRE *)A->data;
1397   HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE;
1398 
1399   PetscFunctionBegin;
1400   A->boundtocpu = bind;
1401   if (hA->ij && hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) {
1402     hypre_ParCSRMatrix *parcsr;
1403     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1404     PetscCallExternal(hypre_ParCSRMatrixMigrate, parcsr, hmem);
1405   }
1406   if (hA->x) PetscCall(VecHYPRE_IJBindToCPU(hA->x, bind));
1407   if (hA->b) PetscCall(VecHYPRE_IJBindToCPU(hA->b, bind));
1408   PetscFunctionReturn(PETSC_SUCCESS);
1409 }
1410 #endif
1411 
1412 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1413 {
1414   Mat_HYPRE   *hA = (Mat_HYPRE *)A->data;
1415   PetscMPIInt  n;
1416   PetscInt     i, j, rstart, ncols, flg;
1417   PetscInt    *row, *col;
1418   PetscScalar *val;
1419 
1420   PetscFunctionBegin;
1421   PetscCheck(mode != MAT_FLUSH_ASSEMBLY, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1422 
1423   if (!A->nooffprocentries) {
1424     while (1) {
1425       PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
1426       if (!flg) break;
1427 
1428       for (i = 0; i < n;) {
1429         /* Now identify the consecutive vals belonging to the same row */
1430         for (j = i, rstart = row[j]; j < n; j++) {
1431           if (row[j] != rstart) break;
1432         }
1433         if (j < n) ncols = j - i;
1434         else ncols = n - i;
1435         /* Now assemble all these values with a single function call */
1436         PetscCall(MatSetValues_HYPRE(A, 1, row + i, ncols, col + i, val + i, A->insertmode));
1437 
1438         i = j;
1439       }
1440     }
1441     PetscCall(MatStashScatterEnd_Private(&A->stash));
1442   }
1443 
1444   PetscCallExternal(HYPRE_IJMatrixAssemble, hA->ij);
1445   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */
1446   /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */
1447   if (!A->sortedfull) {
1448     hypre_AuxParCSRMatrix *aux_matrix;
1449 
1450     /* call destroy just to make sure we do not leak anything */
1451     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1452     PetscCallExternal(hypre_AuxParCSRMatrixDestroy, aux_matrix);
1453     hypre_IJMatrixTranslator(hA->ij) = NULL;
1454 
1455     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1456     PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1457     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1458     if (aux_matrix) {
1459       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1460 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1461       PetscCallExternal(hypre_AuxParCSRMatrixInitialize, aux_matrix);
1462 #else
1463       PetscCallExternal(hypre_AuxParCSRMatrixInitialize_v2, aux_matrix, HYPRE_MEMORY_HOST);
1464 #endif
1465     }
1466   }
1467   {
1468     hypre_ParCSRMatrix *parcsr;
1469 
1470     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1471     if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscCallExternal(hypre_MatvecCommPkgCreate, parcsr);
1472   }
1473   if (!hA->x) PetscCall(VecHYPRE_IJVectorCreate(A->cmap, &hA->x));
1474   if (!hA->b) PetscCall(VecHYPRE_IJVectorCreate(A->rmap, &hA->b));
1475 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1476   PetscCall(MatBindToCPU_HYPRE(A, A->boundtocpu));
1477 #endif
1478   PetscFunctionReturn(PETSC_SUCCESS);
1479 }
1480 
1481 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1482 {
1483   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1484 
1485   PetscFunctionBegin;
1486   PetscCheck(hA->array_available, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Temporary space is in use");
1487 
1488   if (hA->array_size >= size) {
1489     *array = hA->array;
1490   } else {
1491     PetscCall(PetscFree(hA->array));
1492     hA->array_size = size;
1493     PetscCall(PetscMalloc(hA->array_size, &hA->array));
1494     *array = hA->array;
1495   }
1496 
1497   hA->array_available = PETSC_FALSE;
1498   PetscFunctionReturn(PETSC_SUCCESS);
1499 }
1500 
1501 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1502 {
1503   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1504 
1505   PetscFunctionBegin;
1506   *array              = NULL;
1507   hA->array_available = PETSC_TRUE;
1508   PetscFunctionReturn(PETSC_SUCCESS);
1509 }
1510 
1511 static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1512 {
1513   Mat_HYPRE     *hA   = (Mat_HYPRE *)A->data;
1514   PetscScalar   *vals = (PetscScalar *)v;
1515   HYPRE_Complex *sscr;
1516   PetscInt      *cscr[2];
1517   PetscInt       i, nzc;
1518   PetscInt       rst = A->rmap->rstart, ren = A->rmap->rend;
1519   void          *array = NULL;
1520 
1521   PetscFunctionBegin;
1522   PetscCall(MatGetArray_HYPRE(A, sizeof(PetscInt) * (2 * nc) + sizeof(HYPRE_Complex) * nc * nr, &array));
1523   cscr[0] = (PetscInt *)array;
1524   cscr[1] = ((PetscInt *)array) + nc;
1525   sscr    = (HYPRE_Complex *)(((PetscInt *)array) + nc * 2);
1526   for (i = 0, nzc = 0; i < nc; i++) {
1527     if (cols[i] >= 0) {
1528       cscr[0][nzc]   = cols[i];
1529       cscr[1][nzc++] = i;
1530     }
1531   }
1532   if (!nzc) {
1533     PetscCall(MatRestoreArray_HYPRE(A, &array));
1534     PetscFunctionReturn(PETSC_SUCCESS);
1535   }
1536 
1537 #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE)
1538   if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) {
1539     hypre_ParCSRMatrix *parcsr;
1540 
1541     PetscCallExternal(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr);
1542     PetscCallExternal(hypre_ParCSRMatrixMigrate,parcsr, HYPRE_MEMORY_HOST);
1543   }
1544 #endif
1545 
1546   if (ins == ADD_VALUES) {
1547     for (i = 0; i < nr; i++) {
1548       if (rows[i] >= 0) {
1549         PetscInt  j;
1550         HYPRE_Int hnc = (HYPRE_Int)nzc;
1551 
1552         if (!nzc) continue;
1553         /* nonlocal values */
1554         if (rows[i] < rst || rows[i] >= ren) {
1555           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1556           if (hA->donotstash) continue;
1557         }
1558         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1559         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1560         PetscCallExternal(HYPRE_IJMatrixAddToValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1561       }
1562       vals += nc;
1563     }
1564   } else { /* INSERT_VALUES */
1565     for (i = 0; i < nr; i++) {
1566       if (rows[i] >= 0) {
1567         PetscInt  j;
1568         HYPRE_Int hnc = (HYPRE_Int)nzc;
1569 
1570         if (!nzc) continue;
1571         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1572         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1573         /* nonlocal values */
1574         if (rows[i] < rst || rows[i] >= ren) {
1575           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1576           if (!hA->donotstash) PetscCall(MatStashValuesRow_Private(&A->stash, rows[i], nzc, cscr[0], (PetscScalar *)sscr, PETSC_FALSE));
1577         }
1578         /* local values */
1579         else
1580           PetscCallExternal(HYPRE_IJMatrixSetValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1581       }
1582       vals += nc;
1583     }
1584   }
1585 
1586   PetscCall(MatRestoreArray_HYPRE(A, &array));
1587   PetscFunctionReturn(PETSC_SUCCESS);
1588 }
1589 
1590 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1591 {
1592   Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
1593   HYPRE_Int  *hdnnz, *honnz;
1594   PetscInt    i, rs, re, cs, ce, bs;
1595   PetscMPIInt size;
1596 
1597   PetscFunctionBegin;
1598   PetscCall(PetscLayoutSetUp(A->rmap));
1599   PetscCall(PetscLayoutSetUp(A->cmap));
1600   rs = A->rmap->rstart;
1601   re = A->rmap->rend;
1602   cs = A->cmap->rstart;
1603   ce = A->cmap->rend;
1604   if (!hA->ij) {
1605     PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rs, re - 1, cs, ce - 1, &hA->ij);
1606     PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
1607   } else {
1608     HYPRE_BigInt hrs, hre, hcs, hce;
1609     PetscCallExternal(HYPRE_IJMatrixGetLocalRange, hA->ij, &hrs, &hre, &hcs, &hce);
1610     PetscCheck(hre - hrs + 1 == re - rs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local rows: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hrs, hre + 1, rs, re);
1611     PetscCheck(hce - hcs + 1 == ce - cs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local cols: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hcs, hce + 1, cs, ce);
1612   }
1613   PetscCall(MatHYPRE_DestroyCOOMat(A));
1614   PetscCall(MatGetBlockSize(A, &bs));
1615   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10 * bs;
1616   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10 * bs;
1617 
1618   if (!dnnz) {
1619     PetscCall(PetscMalloc1(A->rmap->n, &hdnnz));
1620     for (i = 0; i < A->rmap->n; i++) hdnnz[i] = dnz;
1621   } else {
1622     hdnnz = (HYPRE_Int *)dnnz;
1623   }
1624   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1625   if (size > 1) {
1626     hypre_AuxParCSRMatrix *aux_matrix;
1627     if (!onnz) {
1628       PetscCall(PetscMalloc1(A->rmap->n, &honnz));
1629       for (i = 0; i < A->rmap->n; i++) honnz[i] = onz;
1630     } else honnz = (HYPRE_Int *)onnz;
1631     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1632        they assume the user will input the entire row values, properly sorted
1633        In PETSc, we don't make such an assumption and set this flag to 1,
1634        unless the option MAT_SORTED_FULL is set to true.
1635        Also, to avoid possible memory leaks, we destroy and recreate the translator
1636        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1637        the IJ matrix for us */
1638     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1639     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1640     hypre_IJMatrixTranslator(hA->ij) = NULL;
1641     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, hA->ij, hdnnz, honnz);
1642     aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1643     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !A->sortedfull;
1644   } else {
1645     honnz = NULL;
1646     PetscCallExternal(HYPRE_IJMatrixSetRowSizes, hA->ij, hdnnz);
1647   }
1648 
1649   /* reset assembled flag and call the initialize method */
1650   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1651 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1652   PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1653 #else
1654   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, hA->ij, HYPRE_MEMORY_HOST);
1655 #endif
1656   if (!dnnz) PetscCall(PetscFree(hdnnz));
1657   if (!onnz && honnz) PetscCall(PetscFree(honnz));
1658   /* Match AIJ logic */
1659   A->preallocated = PETSC_TRUE;
1660   A->assembled    = PETSC_FALSE;
1661   PetscFunctionReturn(PETSC_SUCCESS);
1662 }
1663 
1664 /*@C
1665   MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1666 
1667   Collective
1668 
1669   Input Parameters:
1670 + A    - the matrix
1671 . dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1672           (same value is used for all local rows)
1673 . dnnz - array containing the number of nonzeros in the various rows of the
1674           DIAGONAL portion of the local submatrix (possibly different for each row)
1675           or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `d_nz` is used to specify the nonzero structure.
1676           The size of this array is equal to the number of local rows, i.e `m`.
1677           For matrices that will be factored, you must leave room for (and set)
1678           the diagonal entry even if it is zero.
1679 . onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1680           submatrix (same value is used for all local rows).
1681 - onnz - array containing the number of nonzeros in the various rows of the
1682           OFF-DIAGONAL portion of the local submatrix (possibly different for
1683           each row) or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `o_nz` is used to specify the nonzero
1684           structure. The size of this array is equal to the number
1685           of local rows, i.e `m`.
1686 
1687   Level: intermediate
1688 
1689   Note:
1690   If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, `onz` and `onnz` are ignored.
1691 
1692 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatMPIAIJSetPreallocation()`, `MATHYPRE`, `MATAIJ`
1693 @*/
1694 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1695 {
1696   PetscFunctionBegin;
1697   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1698   PetscValidType(A, 1);
1699   PetscTryMethod(A, "MatHYPRESetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (A, dnz, dnnz, onz, onnz));
1700   PetscFunctionReturn(PETSC_SUCCESS);
1701 }
1702 
1703 /*@C
1704   MatCreateFromParCSR - Creates a `Mat` from a `hypre_ParCSRMatrix`
1705 
1706   Collective
1707 
1708   Input Parameters:
1709 + parcsr   - the pointer to the `hypre_ParCSRMatrix`
1710 . mtype    - matrix type to be created. Currently `MATAIJ`, `MATIS` and `MATHYPRE` are supported.
1711 - copymode - PETSc copying options, see  `PetscCopyMode`
1712 
1713   Output Parameter:
1714 . A - the matrix
1715 
1716   Level: intermediate
1717 
1718 .seealso: [](ch_matrices), `Mat`, `MatHYPRE`, `PetscCopyMode`
1719 @*/
1720 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat *A)
1721 {
1722   Mat        T;
1723   Mat_HYPRE *hA;
1724   MPI_Comm   comm;
1725   PetscInt   rstart, rend, cstart, cend, M, N;
1726   PetscBool  isseqaij, isseqaijmkl, ismpiaij, isaij, ishyp, isis;
1727 
1728   PetscFunctionBegin;
1729   comm = hypre_ParCSRMatrixComm(parcsr);
1730   PetscCall(PetscStrcmp(mtype, MATSEQAIJ, &isseqaij));
1731   PetscCall(PetscStrcmp(mtype, MATSEQAIJMKL, &isseqaijmkl));
1732   PetscCall(PetscStrcmp(mtype, MATMPIAIJ, &ismpiaij));
1733   PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
1734   PetscCall(PetscStrcmp(mtype, MATHYPRE, &ishyp));
1735   PetscCall(PetscStrcmp(mtype, MATIS, &isis));
1736   isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1737   /* TODO */
1738   PetscCheck(isaij || ishyp || isis, comm, PETSC_ERR_SUP, "Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s", mtype, MATAIJ, MATSEQAIJ, MATSEQAIJMKL, MATMPIAIJ, MATIS, MATHYPRE);
1739   /* access ParCSRMatrix */
1740   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1741   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1742   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1743   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1744   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1745   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1746 
1747   /* fix for empty local rows/columns */
1748   if (rend < rstart) rend = rstart;
1749   if (cend < cstart) cend = cstart;
1750 
1751   /* PETSc convention */
1752   rend++;
1753   cend++;
1754   rend = PetscMin(rend, M);
1755   cend = PetscMin(cend, N);
1756 
1757   /* create PETSc matrix with MatHYPRE */
1758   PetscCall(MatCreate(comm, &T));
1759   PetscCall(MatSetSizes(T, rend - rstart, cend - cstart, M, N));
1760   PetscCall(MatSetType(T, MATHYPRE));
1761   hA = (Mat_HYPRE *)T->data;
1762 
1763   /* create HYPRE_IJMatrix */
1764   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij);
1765   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
1766 
1767   /* create new ParCSR object if needed */
1768   if (ishyp && copymode == PETSC_COPY_VALUES) {
1769     hypre_ParCSRMatrix *new_parcsr;
1770 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
1771     hypre_CSRMatrix *hdiag, *hoffd, *ndiag, *noffd;
1772 
1773     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
1774     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1775     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1776     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1777     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1778     PetscCall(PetscArraycpy(hypre_CSRMatrixData(ndiag), hypre_CSRMatrixData(hdiag), hypre_CSRMatrixNumNonzeros(hdiag)));
1779     PetscCall(PetscArraycpy(hypre_CSRMatrixData(noffd), hypre_CSRMatrixData(hoffd), hypre_CSRMatrixNumNonzeros(hoffd)));
1780 #else
1781     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 1);
1782 #endif
1783     parcsr   = new_parcsr;
1784     copymode = PETSC_OWN_POINTER;
1785   }
1786 
1787   /* set ParCSR object */
1788   hypre_IJMatrixObject(hA->ij) = parcsr;
1789   T->preallocated              = PETSC_TRUE;
1790 
1791   /* set assembled flag */
1792   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1793 #if 0
1794   PetscCallExternal(HYPRE_IJMatrixInitialize,hA->ij);
1795 #endif
1796   if (ishyp) {
1797     PetscMPIInt myid = 0;
1798 
1799     /* make sure we always have row_starts and col_starts available */
1800     if (HYPRE_AssumedPartitionCheck()) PetscCallMPI(MPI_Comm_rank(comm, &myid));
1801 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1802     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1803       PetscLayout map;
1804 
1805       PetscCall(MatGetLayouts(T, NULL, &map));
1806       PetscCall(PetscLayoutSetUp(map));
1807       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1808     }
1809     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1810       PetscLayout map;
1811 
1812       PetscCall(MatGetLayouts(T, &map, NULL));
1813       PetscCall(PetscLayoutSetUp(map));
1814       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1815     }
1816 #endif
1817     /* prevent from freeing the pointer */
1818     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1819     *A = T;
1820     PetscCall(MatSetOption(*A, MAT_SORTED_FULL, PETSC_TRUE));
1821     PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY));
1822     PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY));
1823   } else if (isaij) {
1824     if (copymode != PETSC_OWN_POINTER) {
1825       /* prevent from freeing the pointer */
1826       hA->inner_free = PETSC_FALSE;
1827       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INITIAL_MATRIX, A));
1828       PetscCall(MatDestroy(&T));
1829     } else { /* AIJ return type with PETSC_OWN_POINTER */
1830       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INPLACE_MATRIX, &T));
1831       *A = T;
1832     }
1833   } else if (isis) {
1834     PetscCall(MatConvert_HYPRE_IS(T, MATIS, MAT_INITIAL_MATRIX, A));
1835     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1836     PetscCall(MatDestroy(&T));
1837   }
1838   PetscFunctionReturn(PETSC_SUCCESS);
1839 }
1840 
1841 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1842 {
1843   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1844   HYPRE_Int  type;
1845 
1846   PetscFunctionBegin;
1847   PetscCheck(hA->ij, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE_IJMatrix not present");
1848   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1849   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1850   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)parcsr);
1851   PetscFunctionReturn(PETSC_SUCCESS);
1852 }
1853 
1854 /*@C
1855   MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1856 
1857   Not Collective, No Fortran Support
1858 
1859   Input Parameter:
1860 . A - the `MATHYPRE` object
1861 
1862   Output Parameter:
1863 . parcsr - the pointer to the `hypre_ParCSRMatrix`
1864 
1865   Level: intermediate
1866 
1867 .seealso: [](ch_matrices), `Mat`, `MatHYPRE`, `PetscCopyMode`
1868 @*/
1869 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1870 {
1871   PetscFunctionBegin;
1872   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1873   PetscValidType(A, 1);
1874   PetscUseMethod(A, "MatHYPREGetParCSR_C", (Mat, hypre_ParCSRMatrix **), (A, parcsr));
1875   PetscFunctionReturn(PETSC_SUCCESS);
1876 }
1877 
1878 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1879 {
1880   hypre_ParCSRMatrix *parcsr;
1881   hypre_CSRMatrix    *ha;
1882   PetscInt            rst;
1883 
1884   PetscFunctionBegin;
1885   PetscCheck(A->rmap->n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not implemented with non-square diagonal blocks");
1886   PetscCall(MatGetOwnershipRange(A, &rst, NULL));
1887   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1888   if (missing) *missing = PETSC_FALSE;
1889   if (dd) *dd = -1;
1890   ha = hypre_ParCSRMatrixDiag(parcsr);
1891   if (ha) {
1892     PetscInt   size, i;
1893     HYPRE_Int *ii, *jj;
1894 
1895     size = hypre_CSRMatrixNumRows(ha);
1896     ii   = hypre_CSRMatrixI(ha);
1897     jj   = hypre_CSRMatrixJ(ha);
1898     for (i = 0; i < size; i++) {
1899       PetscInt  j;
1900       PetscBool found = PETSC_FALSE;
1901 
1902       for (j = ii[i]; j < ii[i + 1] && !found; j++) found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1903 
1904       if (!found) {
1905         PetscCall(PetscInfo(A, "Matrix is missing local diagonal entry %" PetscInt_FMT "\n", i));
1906         if (missing) *missing = PETSC_TRUE;
1907         if (dd) *dd = i + rst;
1908         PetscFunctionReturn(PETSC_SUCCESS);
1909       }
1910     }
1911     if (!size) {
1912       PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1913       if (missing) *missing = PETSC_TRUE;
1914       if (dd) *dd = rst;
1915     }
1916   } else {
1917     PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1918     if (missing) *missing = PETSC_TRUE;
1919     if (dd) *dd = rst;
1920   }
1921   PetscFunctionReturn(PETSC_SUCCESS);
1922 }
1923 
1924 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1925 {
1926   hypre_ParCSRMatrix *parcsr;
1927 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1928   hypre_CSRMatrix *ha;
1929 #endif
1930   HYPRE_Complex hs;
1931 
1932   PetscFunctionBegin;
1933   PetscCall(PetscHYPREScalarCast(s, &hs));
1934   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1935 #if PETSC_PKG_HYPRE_VERSION_GE(2, 19, 0)
1936   PetscCallExternal(hypre_ParCSRMatrixScale, parcsr, hs);
1937 #else /* diagonal part */
1938   ha = hypre_ParCSRMatrixDiag(parcsr);
1939   if (ha) {
1940     PetscInt       size, i;
1941     HYPRE_Int     *ii;
1942     HYPRE_Complex *a;
1943 
1944     size = hypre_CSRMatrixNumRows(ha);
1945     a    = hypre_CSRMatrixData(ha);
1946     ii   = hypre_CSRMatrixI(ha);
1947     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1948   }
1949   /* off-diagonal part */
1950   ha = hypre_ParCSRMatrixOffd(parcsr);
1951   if (ha) {
1952     PetscInt       size, i;
1953     HYPRE_Int     *ii;
1954     HYPRE_Complex *a;
1955 
1956     size = hypre_CSRMatrixNumRows(ha);
1957     a    = hypre_CSRMatrixData(ha);
1958     ii   = hypre_CSRMatrixI(ha);
1959     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1960   }
1961 #endif
1962   PetscFunctionReturn(PETSC_SUCCESS);
1963 }
1964 
1965 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1966 {
1967   hypre_ParCSRMatrix *parcsr;
1968   HYPRE_Int          *lrows;
1969   PetscInt            rst, ren, i;
1970 
1971   PetscFunctionBegin;
1972   PetscCheck(!x && !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "To be implemented");
1973   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1974   PetscCall(PetscMalloc1(numRows, &lrows));
1975   PetscCall(MatGetOwnershipRange(A, &rst, &ren));
1976   for (i = 0; i < numRows; i++) {
1977     PetscCheck(rows[i] >= rst && rows[i] < ren, PETSC_COMM_SELF, PETSC_ERR_SUP, "Non-local rows not yet supported");
1978     lrows[i] = rows[i] - rst;
1979   }
1980   PetscCallExternal(hypre_ParCSRMatrixEliminateRowsCols, parcsr, numRows, lrows);
1981   PetscCall(PetscFree(lrows));
1982   PetscFunctionReturn(PETSC_SUCCESS);
1983 }
1984 
1985 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1986 {
1987   PetscFunctionBegin;
1988   if (ha) {
1989     HYPRE_Int     *ii, size;
1990     HYPRE_Complex *a;
1991 
1992     size = hypre_CSRMatrixNumRows(ha);
1993     a    = hypre_CSRMatrixData(ha);
1994     ii   = hypre_CSRMatrixI(ha);
1995 
1996     if (a) PetscCall(PetscArrayzero(a, ii[size]));
1997   }
1998   PetscFunctionReturn(PETSC_SUCCESS);
1999 }
2000 
2001 static PetscErrorCode MatZeroEntries_HYPRE(Mat A)
2002 {
2003   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2004 
2005   PetscFunctionBegin;
2006   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) {
2007     PetscCallExternal(HYPRE_IJMatrixSetConstantValues, hA->ij, 0.0);
2008   } else {
2009     hypre_ParCSRMatrix *parcsr;
2010 
2011     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2012     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr)));
2013     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr)));
2014   }
2015   PetscFunctionReturn(PETSC_SUCCESS);
2016 }
2017 
2018 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA, PetscInt N, const PetscInt rows[], HYPRE_Complex diag)
2019 {
2020   PetscInt       ii;
2021   HYPRE_Int     *i, *j;
2022   HYPRE_Complex *a;
2023 
2024   PetscFunctionBegin;
2025   if (!hA) PetscFunctionReturn(PETSC_SUCCESS);
2026 
2027   i = hypre_CSRMatrixI(hA);
2028   j = hypre_CSRMatrixJ(hA);
2029   a = hypre_CSRMatrixData(hA);
2030 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2031   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hA)) {
2032   #if defined(HYPRE_USING_CUDA)
2033     MatZeroRows_CUDA(N, rows, i, j, a, diag);
2034   #elif defined(HYPRE_USING_HIP)
2035     MatZeroRows_HIP(N, rows, i, j, a, diag);
2036   #elif defined(PETSC_HAVE_KOKKOS)
2037     MatZeroRows_Kokkos(N, rows, i, j, a, diag);
2038   #else
2039     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for MatZeroRows on a hypre matrix in this memory location");
2040   #endif
2041   } else
2042 #endif
2043   {
2044     for (ii = 0; ii < N; ii++) {
2045       HYPRE_Int jj, ibeg, iend, irow;
2046 
2047       irow = rows[ii];
2048       ibeg = i[irow];
2049       iend = i[irow + 1];
2050       for (jj = ibeg; jj < iend; jj++)
2051         if (j[jj] == irow) a[jj] = diag;
2052         else a[jj] = 0.0;
2053     }
2054   }
2055   PetscFunctionReturn(PETSC_SUCCESS);
2056 }
2057 
2058 static PetscErrorCode MatZeroRows_HYPRE(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2059 {
2060   hypre_ParCSRMatrix *parcsr;
2061   PetscInt           *lrows, len, *lrows2;
2062   HYPRE_Complex       hdiag;
2063 
2064   PetscFunctionBegin;
2065   PetscCheck(!x && !b, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
2066   PetscCall(PetscHYPREScalarCast(diag, &hdiag));
2067   /* retrieve the internal matrix */
2068   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2069   /* get locally owned rows */
2070   PetscCall(MatZeroRowsMapLocal_Private(A, N, rows, &len, &lrows));
2071 
2072 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2073   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hypre_ParCSRMatrixDiag(parcsr))) {
2074     Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2075     PetscInt   m;
2076     PetscCall(MatGetLocalSize(A, &m, NULL));
2077     if (!hA->rows_d) {
2078       hA->rows_d = hypre_TAlloc(PetscInt, m, HYPRE_MEMORY_DEVICE);
2079       if (m) PetscCheck(hA->rows_d, PETSC_COMM_SELF, PETSC_ERR_MEM, "HYPRE_TAlloc failed");
2080     }
2081     PetscCheck(len <= m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Too many rows in rows[]");
2082     PetscStackCallExternalVoid("hypre_Memcpy", hypre_Memcpy(hA->rows_d, lrows, sizeof(PetscInt) * len, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST));
2083     lrows2 = hA->rows_d;
2084   } else
2085 #endif
2086   {
2087     lrows2 = lrows;
2088   }
2089 
2090   /* zero diagonal part */
2091   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr), len, lrows2, hdiag));
2092   /* zero off-diagonal part */
2093   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr), len, lrows2, 0.0));
2094 
2095   PetscCall(PetscFree(lrows));
2096   PetscFunctionReturn(PETSC_SUCCESS);
2097 }
2098 
2099 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat, MatAssemblyType mode)
2100 {
2101   PetscFunctionBegin;
2102   if (mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);
2103 
2104   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
2105   PetscFunctionReturn(PETSC_SUCCESS);
2106 }
2107 
2108 static PetscErrorCode MatGetRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2109 {
2110   hypre_ParCSRMatrix *parcsr;
2111   HYPRE_Int           hnz;
2112 
2113   PetscFunctionBegin;
2114   /* retrieve the internal matrix */
2115   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2116   /* call HYPRE API */
2117   PetscCallExternal(HYPRE_ParCSRMatrixGetRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2118   if (nz) *nz = (PetscInt)hnz;
2119   PetscFunctionReturn(PETSC_SUCCESS);
2120 }
2121 
2122 static PetscErrorCode MatRestoreRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2123 {
2124   hypre_ParCSRMatrix *parcsr;
2125   HYPRE_Int           hnz;
2126 
2127   PetscFunctionBegin;
2128   /* retrieve the internal matrix */
2129   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2130   /* call HYPRE API */
2131   hnz = nz ? (HYPRE_Int)(*nz) : 0;
2132   PetscCallExternal(HYPRE_ParCSRMatrixRestoreRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2133   PetscFunctionReturn(PETSC_SUCCESS);
2134 }
2135 
2136 static PetscErrorCode MatGetValues_HYPRE(Mat A, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2137 {
2138   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2139   PetscInt   i;
2140 
2141   PetscFunctionBegin;
2142   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2143   /* Ignore negative row indices
2144    * And negative column indices should be automatically ignored in hypre
2145    * */
2146   for (i = 0; i < m; i++) {
2147     if (idxm[i] >= 0) {
2148       HYPRE_Int hn = (HYPRE_Int)n;
2149       PetscCallExternal(HYPRE_IJMatrixGetValues, hA->ij, 1, &hn, (HYPRE_BigInt *)&idxm[i], (HYPRE_BigInt *)idxn, (HYPRE_Complex *)(v + i * n));
2150     }
2151   }
2152   PetscFunctionReturn(PETSC_SUCCESS);
2153 }
2154 
2155 static PetscErrorCode MatSetOption_HYPRE(Mat A, MatOption op, PetscBool flg)
2156 {
2157   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2158 
2159   PetscFunctionBegin;
2160   switch (op) {
2161   case MAT_NO_OFF_PROC_ENTRIES:
2162     if (flg) PetscCallExternal(HYPRE_IJMatrixSetMaxOffProcElmts, hA->ij, 0);
2163     break;
2164   case MAT_IGNORE_OFF_PROC_ENTRIES:
2165     hA->donotstash = flg;
2166     break;
2167   default:
2168     break;
2169   }
2170   PetscFunctionReturn(PETSC_SUCCESS);
2171 }
2172 
2173 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
2174 {
2175   PetscViewerFormat format;
2176 
2177   PetscFunctionBegin;
2178   PetscCall(PetscViewerGetFormat(view, &format));
2179   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(PETSC_SUCCESS);
2180   if (format != PETSC_VIEWER_NATIVE) {
2181     Mat                 B;
2182     hypre_ParCSRMatrix *parcsr;
2183     PetscErrorCode (*mview)(Mat, PetscViewer) = NULL;
2184 
2185     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2186     PetscCall(MatCreateFromParCSR(parcsr, MATAIJ, PETSC_USE_POINTER, &B));
2187     PetscCall(MatGetOperation(B, MATOP_VIEW, (void (**)(void))&mview));
2188     PetscCheck(mview, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing view operation");
2189     PetscCall((*mview)(B, view));
2190     PetscCall(MatDestroy(&B));
2191   } else {
2192     Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
2193     PetscMPIInt size;
2194     PetscBool   isascii;
2195     const char *filename;
2196 
2197     /* HYPRE uses only text files */
2198     PetscCall(PetscObjectTypeCompare((PetscObject)view, PETSCVIEWERASCII, &isascii));
2199     PetscCheck(isascii, PetscObjectComm((PetscObject)view), PETSC_ERR_SUP, "PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII", ((PetscObject)view)->type_name);
2200     PetscCall(PetscViewerFileGetName(view, &filename));
2201     PetscCallExternal(HYPRE_IJMatrixPrint, hA->ij, filename);
2202     PetscCallMPI(MPI_Comm_size(hA->comm, &size));
2203     if (size > 1) {
2204       PetscCall(PetscViewerASCIIPrintf(view, "Matrix files: %s.%05d ... %s.%05d\n", filename, 0, filename, size - 1));
2205     } else {
2206       PetscCall(PetscViewerASCIIPrintf(view, "Matrix file: %s.%05d\n", filename, 0));
2207     }
2208   }
2209   PetscFunctionReturn(PETSC_SUCCESS);
2210 }
2211 
2212 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
2213 {
2214   hypre_ParCSRMatrix *acsr, *bcsr;
2215 
2216   PetscFunctionBegin;
2217   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
2218     PetscCall(MatHYPREGetParCSR_HYPRE(A, &acsr));
2219     PetscCall(MatHYPREGetParCSR_HYPRE(B, &bcsr));
2220     PetscCallExternal(hypre_ParCSRMatrixCopy, acsr, bcsr, 1);
2221     PetscCall(MatSetOption(B, MAT_SORTED_FULL, PETSC_TRUE)); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
2222     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2223     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2224   } else {
2225     PetscCall(MatCopy_Basic(A, B, str));
2226   }
2227   PetscFunctionReturn(PETSC_SUCCESS);
2228 }
2229 
2230 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
2231 {
2232   hypre_ParCSRMatrix *parcsr;
2233   hypre_CSRMatrix    *dmat;
2234   HYPRE_Complex      *a;
2235   PetscBool           cong;
2236 
2237   PetscFunctionBegin;
2238   PetscCall(MatHasCongruentLayouts(A, &cong));
2239   PetscCheck(cong, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only for square matrices with same local distributions of rows and columns");
2240   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2241   dmat = hypre_ParCSRMatrixDiag(parcsr);
2242   if (dmat) {
2243 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2244     HYPRE_MemoryLocation mem = hypre_CSRMatrixMemoryLocation(dmat);
2245 #else
2246     HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
2247 #endif
2248 
2249     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecGetArrayWriteAndMemType(d, (PetscScalar **)&a, NULL));
2250     else PetscCall(VecGetArrayWrite(d, (PetscScalar **)&a));
2251     hypre_CSRMatrixExtractDiagonal(dmat, a, 0);
2252     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecRestoreArrayWriteAndMemType(d, (PetscScalar **)&a));
2253     else PetscCall(VecRestoreArrayWrite(d, (PetscScalar **)&a));
2254   }
2255   PetscFunctionReturn(PETSC_SUCCESS);
2256 }
2257 
2258 #include <petscblaslapack.h>
2259 
2260 static PetscErrorCode MatAXPY_HYPRE(Mat Y, PetscScalar a, Mat X, MatStructure str)
2261 {
2262   PetscFunctionBegin;
2263 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2264   {
2265     Mat                 B;
2266     hypre_ParCSRMatrix *x, *y, *z;
2267 
2268     PetscCall(MatHYPREGetParCSR(Y, &y));
2269     PetscCall(MatHYPREGetParCSR(X, &x));
2270     PetscCallExternal(hypre_ParCSRMatrixAdd, 1.0, y, 1.0, x, &z);
2271     PetscCall(MatCreateFromParCSR(z, MATHYPRE, PETSC_OWN_POINTER, &B));
2272     PetscCall(MatHeaderMerge(Y, &B));
2273   }
2274 #else
2275   if (str == SAME_NONZERO_PATTERN) {
2276     hypre_ParCSRMatrix *x, *y;
2277     hypre_CSRMatrix    *xloc, *yloc;
2278     PetscInt            xnnz, ynnz;
2279     HYPRE_Complex      *xarr, *yarr;
2280     PetscBLASInt        one = 1, bnz;
2281 
2282     PetscCall(MatHYPREGetParCSR(Y, &y));
2283     PetscCall(MatHYPREGetParCSR(X, &x));
2284 
2285     /* diagonal block */
2286     xloc = hypre_ParCSRMatrixDiag(x);
2287     yloc = hypre_ParCSRMatrixDiag(y);
2288     xnnz = 0;
2289     ynnz = 0;
2290     xarr = NULL;
2291     yarr = NULL;
2292     if (xloc) {
2293       xarr = hypre_CSRMatrixData(xloc);
2294       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2295     }
2296     if (yloc) {
2297       yarr = hypre_CSRMatrixData(yloc);
2298       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2299     }
2300     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2301     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2302     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2303 
2304     /* off-diagonal block */
2305     xloc = hypre_ParCSRMatrixOffd(x);
2306     yloc = hypre_ParCSRMatrixOffd(y);
2307     xnnz = 0;
2308     ynnz = 0;
2309     xarr = NULL;
2310     yarr = NULL;
2311     if (xloc) {
2312       xarr = hypre_CSRMatrixData(xloc);
2313       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2314     }
2315     if (yloc) {
2316       yarr = hypre_CSRMatrixData(yloc);
2317       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2318     }
2319     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in off-diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2320     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2321     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2322   } else if (str == SUBSET_NONZERO_PATTERN) {
2323     PetscCall(MatAXPY_Basic(Y, a, X, str));
2324   } else {
2325     Mat B;
2326 
2327     PetscCall(MatAXPY_Basic_Preallocate(Y, X, &B));
2328     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
2329     PetscCall(MatHeaderReplace(Y, &B));
2330   }
2331 #endif
2332   PetscFunctionReturn(PETSC_SUCCESS);
2333 }
2334 
2335 static PetscErrorCode MatDuplicate_HYPRE(Mat A, MatDuplicateOption op, Mat *B)
2336 {
2337   hypre_ParCSRMatrix *parcsr = NULL;
2338   PetscCopyMode       cpmode;
2339   Mat_HYPRE          *hA;
2340 
2341   PetscFunctionBegin;
2342   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2343   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
2344     parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
2345     cpmode = PETSC_OWN_POINTER;
2346   } else {
2347     cpmode = PETSC_COPY_VALUES;
2348   }
2349   PetscCall(MatCreateFromParCSR(parcsr, MATHYPRE, cpmode, B));
2350   hA = (Mat_HYPRE *)A->data;
2351   if (hA->cooMat) {
2352     Mat_HYPRE *hB = (Mat_HYPRE *)((*B)->data);
2353     op            = (op == MAT_DO_NOT_COPY_VALUES) ? op : MAT_COPY_VALUES;
2354     /* Cannot simply increase the reference count of hA->cooMat, since B needs to share cooMat's data array */
2355     PetscCall(MatDuplicate(hA->cooMat, op, &hB->cooMat));
2356     PetscCall(MatHYPRE_AttachCOOMat(*B));
2357   }
2358   PetscFunctionReturn(PETSC_SUCCESS);
2359 }
2360 
2361 static PetscErrorCode MatSetPreallocationCOO_HYPRE(Mat mat, PetscCount coo_n, PetscInt coo_i[], PetscInt coo_j[])
2362 {
2363   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;
2364 
2365   PetscFunctionBegin;
2366   /* Build an agent matrix cooMat with AIJ format
2367      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
2368    */
2369   PetscCall(MatHYPRE_CreateCOOMat(mat));
2370   PetscCall(MatSetOption(hmat->cooMat, MAT_IGNORE_OFF_PROC_ENTRIES, hmat->donotstash));
2371   PetscCall(MatSetOption(hmat->cooMat, MAT_NO_OFF_PROC_ENTRIES, mat->nooffprocentries));
2372 
2373   /* MatSetPreallocationCOO_SeqAIJ and MatSetPreallocationCOO_MPIAIJ uses this specific
2374      name to automatically put the diagonal entries first */
2375   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
2376   PetscCall(MatSetPreallocationCOO(hmat->cooMat, coo_n, coo_i, coo_j));
2377   hmat->cooMat->assembled = PETSC_TRUE;
2378 
2379   /* Copy the sparsity pattern from cooMat to hypre IJMatrix hmat->ij */
2380   PetscCall(MatSetOption(mat, MAT_SORTED_FULL, PETSC_TRUE));
2381   PetscCall(MatHYPRE_CreateFromMat(hmat->cooMat, hmat));      /* Create hmat->ij and preallocate it */
2382   PetscCall(MatHYPRE_IJMatrixCopyIJ(hmat->cooMat, hmat->ij)); /* Copy A's (i,j) to hmat->ij */
2383 
2384   mat->preallocated = PETSC_TRUE;
2385   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
2386   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY)); /* Migrate mat to device if it is bound to. Hypre builds its own SpMV context here */
2387 
2388   /* Attach cooMat to mat */
2389   PetscCall(MatHYPRE_AttachCOOMat(mat));
2390   PetscFunctionReturn(PETSC_SUCCESS);
2391 }
2392 
2393 static PetscErrorCode MatSetValuesCOO_HYPRE(Mat mat, const PetscScalar v[], InsertMode imode)
2394 {
2395   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;
2396 
2397   PetscFunctionBegin;
2398   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
2399   PetscCall(MatSetValuesCOO(hmat->cooMat, v, imode));
2400   PetscCall(MatViewFromOptions(hmat->cooMat, (PetscObject)mat, "-cooMat_view"));
2401   PetscFunctionReturn(PETSC_SUCCESS);
2402 }
2403 
2404 /*MC
2405    MATHYPRE - "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2406           based on the hypre IJ interface.
2407 
2408    Level: intermediate
2409 
2410 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatHYPRESetPreallocation`
2411 M*/
2412 
2413 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2414 {
2415   Mat_HYPRE *hB;
2416 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2417   HYPRE_MemoryLocation memory_location;
2418 #endif
2419 
2420   PetscFunctionBegin;
2421   PetscHYPREInitialize();
2422   PetscCall(PetscNew(&hB));
2423 
2424   hB->inner_free      = PETSC_TRUE;
2425   hB->array_available = PETSC_TRUE;
2426 
2427   B->data = (void *)hB;
2428 
2429   PetscCall(PetscMemzero(B->ops, sizeof(struct _MatOps)));
2430   B->ops->mult                  = MatMult_HYPRE;
2431   B->ops->multtranspose         = MatMultTranspose_HYPRE;
2432   B->ops->multadd               = MatMultAdd_HYPRE;
2433   B->ops->multtransposeadd      = MatMultTransposeAdd_HYPRE;
2434   B->ops->setup                 = MatSetUp_HYPRE;
2435   B->ops->destroy               = MatDestroy_HYPRE;
2436   B->ops->assemblyend           = MatAssemblyEnd_HYPRE;
2437   B->ops->assemblybegin         = MatAssemblyBegin_HYPRE;
2438   B->ops->setvalues             = MatSetValues_HYPRE;
2439   B->ops->missingdiagonal       = MatMissingDiagonal_HYPRE;
2440   B->ops->scale                 = MatScale_HYPRE;
2441   B->ops->zerorowscolumns       = MatZeroRowsColumns_HYPRE;
2442   B->ops->zeroentries           = MatZeroEntries_HYPRE;
2443   B->ops->zerorows              = MatZeroRows_HYPRE;
2444   B->ops->getrow                = MatGetRow_HYPRE;
2445   B->ops->restorerow            = MatRestoreRow_HYPRE;
2446   B->ops->getvalues             = MatGetValues_HYPRE;
2447   B->ops->setoption             = MatSetOption_HYPRE;
2448   B->ops->duplicate             = MatDuplicate_HYPRE;
2449   B->ops->copy                  = MatCopy_HYPRE;
2450   B->ops->view                  = MatView_HYPRE;
2451   B->ops->getdiagonal           = MatGetDiagonal_HYPRE;
2452   B->ops->axpy                  = MatAXPY_HYPRE;
2453   B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE;
2454 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2455   B->ops->bindtocpu = MatBindToCPU_HYPRE;
2456   /* Get hypre's default memory location. Users can control this using the corresponding HYPRE_SetMemoryLocation API */
2457   PetscCallExternal(HYPRE_GetMemoryLocation, &memory_location);
2458   B->boundtocpu = (memory_location == HYPRE_MEMORY_HOST) ? PETSC_TRUE : PETSC_FALSE;
2459 #endif
2460 
2461   /* build cache for off array entries formed */
2462   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));
2463 
2464   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)B), &hB->comm));
2465   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATHYPRE));
2466   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_aij_C", MatConvert_HYPRE_AIJ));
2467   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_is_C", MatConvert_HYPRE_IS));
2468   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_hypre_C", MatProductSetFromOptions_HYPRE));
2469   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaij_hypre_C", MatProductSetFromOptions_HYPRE));
2470   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPRESetPreallocation_C", MatHYPRESetPreallocation_HYPRE));
2471   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPREGetParCSR_C", MatHYPREGetParCSR_HYPRE));
2472   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_HYPRE));
2473   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetValuesCOO_C", MatSetValuesCOO_HYPRE));
2474 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2475   #if defined(HYPRE_USING_HIP)
2476   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2477   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2478   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2479   PetscCall(MatSetVecType(B, VECHIP));
2480   #endif
2481   #if defined(HYPRE_USING_CUDA)
2482   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2483   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2484   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2485   PetscCall(MatSetVecType(B, VECCUDA));
2486   #endif
2487 #endif
2488   PetscFunctionReturn(PETSC_SUCCESS);
2489 }
2490