xref: /petsc/src/mat/impls/hypre/mhypre.c (revision 98d129c30f3ee9fdddc40fdbc5a989b7be64f888)
1 /*
2     Creates hypre ijmatrix from PETSc matrix
3 */
4 
5 #include <petscpkg_version.h>
6 #include <petsc/private/petschypre.h>
7 #include <petscmathypre.h>
8 #include <petsc/private/matimpl.h>
9 #include <petsc/private/deviceimpl.h>
10 #include <../src/mat/impls/hypre/mhypre.h>
11 #include <../src/mat/impls/aij/mpi/mpiaij.h>
12 #include <../src/vec/vec/impls/hypre/vhyp.h>
13 #include <HYPRE.h>
14 #include <HYPRE_utilities.h>
15 #include <_hypre_parcsr_ls.h>
16 #include <_hypre_sstruct_ls.h>
17 
18 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
19   #define hypre_ParCSRMatrixClone(A, B) hypre_ParCSRMatrixCompleteClone(A)
20 #endif
21 
22 static PetscErrorCode MatHYPRE_CreateFromMat(Mat, Mat_HYPRE *);
23 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat, Mat, HYPRE_IJMatrix);
24 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat, HYPRE_IJMatrix);
25 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat, HYPRE_IJMatrix);
26 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat, HYPRE_Complex, Vec, HYPRE_Complex, Vec, PetscBool);
27 static PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode ins);
28 
29 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
30 {
31   PetscInt        i, n_d, n_o;
32   const PetscInt *ia_d, *ia_o;
33   PetscBool       done_d = PETSC_FALSE, done_o = PETSC_FALSE;
34   HYPRE_Int      *nnz_d = NULL, *nnz_o = NULL;
35 
36   PetscFunctionBegin;
37   if (A_d) { /* determine number of nonzero entries in local diagonal part */
38     PetscCall(MatGetRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, &n_d, &ia_d, NULL, &done_d));
39     if (done_d) {
40       PetscCall(PetscMalloc1(n_d, &nnz_d));
41       for (i = 0; i < n_d; i++) nnz_d[i] = ia_d[i + 1] - ia_d[i];
42     }
43     PetscCall(MatRestoreRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, NULL, &ia_d, NULL, &done_d));
44   }
45   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
46     PetscCall(MatGetRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
47     if (done_o) {
48       PetscCall(PetscMalloc1(n_o, &nnz_o));
49       for (i = 0; i < n_o; i++) nnz_o[i] = ia_o[i + 1] - ia_o[i];
50     }
51     PetscCall(MatRestoreRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
52   }
53   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
54     if (!done_o) { /* only diagonal part */
55       PetscCall(PetscCalloc1(n_d, &nnz_o));
56     }
57 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
58     { /* If we don't do this, the columns of the matrix will be all zeros! */
59       hypre_AuxParCSRMatrix *aux_matrix;
60       aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
61       hypre_AuxParCSRMatrixDestroy(aux_matrix);
62       hypre_IJMatrixTranslator(ij) = NULL;
63       PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
64       /* it seems they partially fixed it in 2.19.0 */
65   #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
66       aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
67       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
68   #endif
69     }
70 #else
71     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
72 #endif
73     PetscCall(PetscFree(nnz_d));
74     PetscCall(PetscFree(nnz_o));
75   }
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
80 {
81   PetscInt rstart, rend, cstart, cend;
82 
83   PetscFunctionBegin;
84   PetscCall(PetscLayoutSetUp(A->rmap));
85   PetscCall(PetscLayoutSetUp(A->cmap));
86   rstart = A->rmap->rstart;
87   rend   = A->rmap->rend;
88   cstart = A->cmap->rstart;
89   cend   = A->cmap->rend;
90   PetscHYPREInitialize();
91   if (hA->ij) {
92     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
93     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
94   }
95   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij);
96   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
97   {
98     PetscBool       same;
99     Mat             A_d, A_o;
100     const PetscInt *colmap;
101     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &same));
102     if (same) {
103       PetscCall(MatMPIAIJGetSeqAIJ(A, &A_d, &A_o, &colmap));
104       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
105       PetscFunctionReturn(PETSC_SUCCESS);
106     }
107     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &same));
108     if (same) {
109       PetscCall(MatMPIBAIJGetSeqBAIJ(A, &A_d, &A_o, &colmap));
110       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
111       PetscFunctionReturn(PETSC_SUCCESS);
112     }
113     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &same));
114     if (same) {
115       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
116       PetscFunctionReturn(PETSC_SUCCESS);
117     }
118     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQBAIJ, &same));
119     if (same) {
120       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
121       PetscFunctionReturn(PETSC_SUCCESS);
122     }
123   }
124   PetscFunctionReturn(PETSC_SUCCESS);
125 }
126 
127 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ(Mat A, HYPRE_IJMatrix ij)
128 {
129   PetscBool flg;
130 
131   PetscFunctionBegin;
132 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
133   PetscCallExternal(HYPRE_IJMatrixInitialize, ij);
134 #else
135   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, ij, HYPRE_MEMORY_HOST);
136 #endif
137   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &flg));
138   if (flg) {
139     PetscCall(MatHYPRE_IJMatrixCopyIJ_MPIAIJ(A, ij));
140     PetscFunctionReturn(PETSC_SUCCESS);
141   }
142   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
143   if (flg) {
144     PetscCall(MatHYPRE_IJMatrixCopyIJ_SeqAIJ(A, ij));
145     PetscFunctionReturn(PETSC_SUCCESS);
146   }
147   PetscCheck(PETSC_FALSE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for matrix type %s", ((PetscObject)A)->type_name);
148   PetscFunctionReturn(PETSC_SUCCESS);
149 }
150 
151 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
152 {
153   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ *)A->data;
154   HYPRE_Int              type;
155   hypre_ParCSRMatrix    *par_matrix;
156   hypre_AuxParCSRMatrix *aux_matrix;
157   hypre_CSRMatrix       *hdiag;
158   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
159 
160   PetscFunctionBegin;
161   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
162   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
163   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
164   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
165   /*
166        this is the Hack part where we monkey directly with the hypre datastructures
167   */
168   if (sameint) {
169     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, A->rmap->n + 1));
170     PetscCall(PetscArraycpy(hdiag->j, pdiag->j, pdiag->nz));
171   } else {
172     PetscInt i;
173 
174     for (i = 0; i < A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
175     for (i = 0; i < pdiag->nz; i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
176   }
177 
178   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
179   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
180   PetscFunctionReturn(PETSC_SUCCESS);
181 }
182 
183 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
184 {
185   Mat_MPIAIJ            *pA = (Mat_MPIAIJ *)A->data;
186   Mat_SeqAIJ            *pdiag, *poffd;
187   PetscInt               i, *garray = pA->garray, *jj, cstart, *pjj;
188   HYPRE_Int             *hjj, type;
189   hypre_ParCSRMatrix    *par_matrix;
190   hypre_AuxParCSRMatrix *aux_matrix;
191   hypre_CSRMatrix       *hdiag, *hoffd;
192   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
193 
194   PetscFunctionBegin;
195   pdiag = (Mat_SeqAIJ *)pA->A->data;
196   poffd = (Mat_SeqAIJ *)pA->B->data;
197   /* cstart is only valid for square MPIAIJ laid out in the usual way */
198   PetscCall(MatGetOwnershipRange(A, &cstart, NULL));
199 
200   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
201   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
202   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
203   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
204   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
205 
206   if (sameint) {
207     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, pA->A->rmap->n + 1));
208   } else {
209     for (i = 0; i < pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
210   }
211 
212   hjj = hdiag->j;
213   pjj = pdiag->j;
214 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
215   for (i = 0; i < pdiag->nz; i++) hjj[i] = pjj[i];
216 #else
217   for (i = 0; i < pdiag->nz; i++) hjj[i] = cstart + pjj[i];
218 #endif
219   if (sameint) {
220     PetscCall(PetscArraycpy(hoffd->i, poffd->i, pA->A->rmap->n + 1));
221   } else {
222     for (i = 0; i < pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)poffd->i[i];
223   }
224 
225   jj = (PetscInt *)hoffd->j;
226 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
227   PetscCallExternal(hypre_CSRMatrixBigInitialize, hoffd);
228   jj = (PetscInt *)hoffd->big_j;
229 #endif
230   pjj = poffd->j;
231   for (i = 0; i < poffd->nz; i++) jj[i] = garray[pjj[i]];
232 
233   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
234   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
235   PetscFunctionReturn(PETSC_SUCCESS);
236 }
237 
238 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat *B)
239 {
240   Mat_HYPRE             *mhA = (Mat_HYPRE *)A->data;
241   Mat                    lA;
242   ISLocalToGlobalMapping rl2g, cl2g;
243   IS                     is;
244   hypre_ParCSRMatrix    *hA;
245   hypre_CSRMatrix       *hdiag, *hoffd;
246   MPI_Comm               comm;
247   HYPRE_Complex         *hdd, *hod, *aa;
248   PetscScalar           *data;
249   HYPRE_BigInt          *col_map_offd;
250   HYPRE_Int             *hdi, *hdj, *hoi, *hoj;
251   PetscInt              *ii, *jj, *iptr, *jptr;
252   PetscInt               cum, dr, dc, oc, str, stc, nnz, i, jd, jo, M, N;
253   HYPRE_Int              type;
254   MatType                lmattype   = NULL;
255   PetscBool              freeparcsr = PETSC_FALSE;
256 
257   PetscFunctionBegin;
258   comm = PetscObjectComm((PetscObject)A);
259   PetscCallExternal(HYPRE_IJMatrixGetObjectType, mhA->ij, &type);
260   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
261   PetscCallExternal(HYPRE_IJMatrixGetObject, mhA->ij, (void **)&hA);
262 #if defined(PETSC_HAVE_HYPRE_DEVICE)
263   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(mhA->ij)) {
264     /* Support by copying back on the host and copy to GPU
265        Kind of inefficient, but this is the best we can do now */
266   #if defined(HYPRE_USING_HIP)
267     lmattype = MATSEQAIJHIPSPARSE;
268   #elif defined(HYPRE_USING_CUDA)
269     lmattype = MATSEQAIJCUSPARSE;
270   #endif
271     hA         = hypre_ParCSRMatrixClone_v2(hA, 1, HYPRE_MEMORY_HOST);
272     freeparcsr = PETSC_TRUE;
273   }
274 #endif
275   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
276   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
277   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
278   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
279   hdiag = hypre_ParCSRMatrixDiag(hA);
280   hoffd = hypre_ParCSRMatrixOffd(hA);
281   dr    = hypre_CSRMatrixNumRows(hdiag);
282   dc    = hypre_CSRMatrixNumCols(hdiag);
283   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
284   hdi   = hypre_CSRMatrixI(hdiag);
285   hdj   = hypre_CSRMatrixJ(hdiag);
286   hdd   = hypre_CSRMatrixData(hdiag);
287   oc    = hypre_CSRMatrixNumCols(hoffd);
288   nnz += hypre_CSRMatrixNumNonzeros(hoffd);
289   hoi = hypre_CSRMatrixI(hoffd);
290   hoj = hypre_CSRMatrixJ(hoffd);
291   hod = hypre_CSRMatrixData(hoffd);
292   if (reuse != MAT_REUSE_MATRIX) {
293     PetscInt *aux;
294 
295     /* generate l2g maps for rows and cols */
296     PetscCall(ISCreateStride(comm, dr, str, 1, &is));
297     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
298     PetscCall(ISDestroy(&is));
299     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
300     PetscCall(PetscMalloc1(dc + oc, &aux));
301     for (i = 0; i < dc; i++) aux[i] = i + stc;
302     for (i = 0; i < oc; i++) aux[i + dc] = col_map_offd[i];
303     PetscCall(ISCreateGeneral(comm, dc + oc, aux, PETSC_OWN_POINTER, &is));
304     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
305     PetscCall(ISDestroy(&is));
306     /* create MATIS object */
307     PetscCall(MatCreate(comm, B));
308     PetscCall(MatSetSizes(*B, dr, dc, M, N));
309     PetscCall(MatSetType(*B, MATIS));
310     PetscCall(MatSetLocalToGlobalMapping(*B, rl2g, cl2g));
311     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
312     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
313 
314     /* allocate CSR for local matrix */
315     PetscCall(PetscMalloc1(dr + 1, &iptr));
316     PetscCall(PetscMalloc1(nnz, &jptr));
317     PetscCall(PetscMalloc1(nnz, &data));
318   } else {
319     PetscInt  nr;
320     PetscBool done;
321     PetscCall(MatISGetLocalMat(*B, &lA));
322     PetscCall(MatGetRowIJ(lA, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&iptr, (const PetscInt **)&jptr, &done));
323     PetscCheck(nr == dr, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of rows in local mat! %" PetscInt_FMT " != %" PetscInt_FMT, nr, dr);
324     PetscCheck(iptr[nr] >= nnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros in local mat! reuse %" PetscInt_FMT " requested %" PetscInt_FMT, iptr[nr], nnz);
325     PetscCall(MatSeqAIJGetArrayWrite(lA, &data));
326   }
327   /* merge local matrices */
328   ii  = iptr;
329   jj  = jptr;
330   aa  = (HYPRE_Complex *)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
331   *ii = *(hdi++) + *(hoi++);
332   for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) {
333     PetscScalar *aold = (PetscScalar *)aa;
334     PetscInt    *jold = jj, nc = jd + jo;
335     for (; jd < *hdi; jd++) {
336       *jj++ = *hdj++;
337       *aa++ = *hdd++;
338     }
339     for (; jo < *hoi; jo++) {
340       *jj++ = *hoj++ + dc;
341       *aa++ = *hod++;
342     }
343     *(++ii) = *(hdi++) + *(hoi++);
344     PetscCall(PetscSortIntWithScalarArray(jd + jo - nc, jold, aold));
345   }
346   for (; cum < dr; cum++) *(++ii) = nnz;
347   if (reuse != MAT_REUSE_MATRIX) {
348     Mat_SeqAIJ *a;
349 
350     PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, dc + oc, iptr, jptr, data, &lA));
351     /* hack SeqAIJ */
352     a          = (Mat_SeqAIJ *)lA->data;
353     a->free_a  = PETSC_TRUE;
354     a->free_ij = PETSC_TRUE;
355     if (lmattype) PetscCall(MatConvert(lA, lmattype, MAT_INPLACE_MATRIX, &lA));
356     PetscCall(MatISSetLocalMat(*B, lA));
357     PetscCall(MatDestroy(&lA));
358   } else {
359     PetscCall(MatSeqAIJRestoreArrayWrite(lA, &data));
360   }
361   PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY));
362   PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY));
363   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, B));
364   if (freeparcsr) PetscCallExternal(hypre_ParCSRMatrixDestroy, hA);
365   PetscFunctionReturn(PETSC_SUCCESS);
366 }
367 
368 static PetscErrorCode MatHYPRE_DestroyCOOMat(Mat mat)
369 {
370   Mat_HYPRE *hA = (Mat_HYPRE *)mat->data;
371 
372   PetscFunctionBegin;
373   if (hA->cooMat) { /* If cooMat is present we need to destroy the column indices */
374     PetscCall(MatDestroy(&hA->cooMat));
375     if (hA->cooMatAttached) {
376       hypre_CSRMatrix     *csr;
377       hypre_ParCSRMatrix  *parcsr;
378       HYPRE_MemoryLocation mem;
379 
380       PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
381       csr = hypre_ParCSRMatrixDiag(parcsr);
382       if (csr) {
383         mem = hypre_CSRMatrixMemoryLocation(csr);
384         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
385         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
386       }
387       csr = hypre_ParCSRMatrixOffd(parcsr);
388       if (csr) {
389         mem = hypre_CSRMatrixMemoryLocation(csr);
390         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
391         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
392       }
393     }
394   }
395   hA->cooMatAttached = PETSC_FALSE;
396   PetscFunctionReturn(PETSC_SUCCESS);
397 }
398 
399 static PetscErrorCode MatHYPRE_CreateCOOMat(Mat mat)
400 {
401   MPI_Comm    comm;
402   PetscMPIInt size;
403   PetscLayout rmap, cmap;
404   Mat_HYPRE  *hmat    = (Mat_HYPRE *)mat->data;
405   MatType     matType = MATAIJ; /* default type of cooMat */
406 
407   PetscFunctionBegin;
408   /* Build an agent matrix cooMat with AIJ format
409      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
410    */
411   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
412   PetscCallMPI(MPI_Comm_size(comm, &size));
413   PetscCall(PetscLayoutSetUp(mat->rmap));
414   PetscCall(PetscLayoutSetUp(mat->cmap));
415   PetscCall(MatGetLayouts(mat, &rmap, &cmap));
416 
417 #if defined(PETSC_HAVE_HYPRE_DEVICE)
418   if (!mat->boundtocpu) { /* mat will be on device, so will cooMat */
419   #if defined(HYPRE_USING_HIP)
420     matType = MATAIJHIPSPARSE;
421   #elif defined(HYPRE_USING_CUDA)
422     matType = MATAIJCUSPARSE;
423   #else
424     SETERRQ(comm, PETSC_ERR_SUP, "Do not know the HYPRE device");
425   #endif
426   }
427 #endif
428 
429   /* Do COO preallocation through cooMat */
430   PetscCall(MatHYPRE_DestroyCOOMat(mat));
431   PetscCall(MatCreate(comm, &hmat->cooMat));
432   PetscCall(MatSetType(hmat->cooMat, matType));
433   PetscCall(MatSetLayouts(hmat->cooMat, rmap, cmap));
434 
435   /* allocate local matrices if needed */
436   PetscCall(MatMPIAIJSetPreallocation(hmat->cooMat, 0, NULL, 0, NULL));
437   PetscFunctionReturn(PETSC_SUCCESS);
438 }
439 
440 /* Attach cooMat data array to hypre matrix.
441    When AIJCUPMSPARSE will support raw device pointers and not THRUSTARRAY
442    we should swap the arrays: i.e., attach hypre matrix array to cooMat
443    This is because hypre should be in charge of handling the memory,
444    cooMat is only a way to reuse PETSc COO code.
445    attaching the memory will then be done at MatSetValuesCOO time and it will dynamically
446    support hypre matrix migrating to host.
447 */
448 static PetscErrorCode MatHYPRE_AttachCOOMat(Mat mat)
449 {
450   Mat_HYPRE           *hmat = (Mat_HYPRE *)mat->data;
451   hypre_CSRMatrix     *diag, *offd;
452   hypre_ParCSRMatrix  *parCSR;
453   HYPRE_MemoryLocation hmem = HYPRE_MEMORY_HOST;
454   PetscMemType         pmem;
455   Mat                  A, B;
456   PetscScalar         *a;
457   PetscMPIInt          size;
458   MPI_Comm             comm;
459 
460   PetscFunctionBegin;
461   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
462   if (hmat->cooMatAttached) PetscFunctionReturn(PETSC_SUCCESS);
463   PetscCheck(hmat->cooMat->preallocated, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix is not preallocated");
464   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
465   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
466   PetscCallMPI(MPI_Comm_size(comm, &size));
467 
468   /* Alias cooMat's data array to IJMatrix's */
469   PetscCallExternal(HYPRE_IJMatrixGetObject, hmat->ij, (void **)&parCSR);
470   diag = hypre_ParCSRMatrixDiag(parCSR);
471   offd = hypre_ParCSRMatrixOffd(parCSR);
472 
473   A = (size == 1) ? hmat->cooMat : ((Mat_MPIAIJ *)hmat->cooMat->data)->A;
474   B = (size == 1) ? NULL : ((Mat_MPIAIJ *)hmat->cooMat->data)->B;
475 
476   PetscCall(PetscObjectSetName((PetscObject)A, "_internal_COO_mat_for_hypre"));
477   hmem = hypre_CSRMatrixMemoryLocation(diag);
478   PetscCall(MatSeqAIJGetCSRAndMemType(A, NULL, NULL, &a, &pmem));
479   PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
480   PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(diag), hmem));
481   hypre_CSRMatrixData(diag)     = (HYPRE_Complex *)a;
482   hypre_CSRMatrixOwnsData(diag) = 0; /* Take ownership of (j,a) away from hypre. As a result, we need to free them on our own */
483 
484   if (B) {
485     hmem = hypre_CSRMatrixMemoryLocation(offd);
486     PetscCall(MatSeqAIJGetCSRAndMemType(B, NULL, NULL, &a, &pmem));
487     PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
488     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(offd), hmem));
489     hypre_CSRMatrixData(offd)     = (HYPRE_Complex *)a;
490     hypre_CSRMatrixOwnsData(offd) = 0;
491   }
492   hmat->cooMatAttached = PETSC_TRUE;
493   PetscFunctionReturn(PETSC_SUCCESS);
494 }
495 
496 static PetscErrorCode CSRtoCOO_Private(PetscInt n, const PetscInt ii[], const PetscInt jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
497 {
498   PetscInt *cooi, *cooj;
499 
500   PetscFunctionBegin;
501   *ncoo = ii[n];
502   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
503   for (PetscInt i = 0; i < n; i++) {
504     for (PetscInt j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
505   }
506   PetscCall(PetscArraycpy(cooj, jj, *ncoo));
507   *coo_i = cooi;
508   *coo_j = cooj;
509   PetscFunctionReturn(PETSC_SUCCESS);
510 }
511 
512 static PetscErrorCode CSRtoCOO_HYPRE_Int_Private(PetscInt n, const HYPRE_Int ii[], const HYPRE_Int jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
513 {
514   PetscInt *cooi, *cooj;
515 
516   PetscFunctionBegin;
517   *ncoo = ii[n];
518   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
519   for (PetscInt i = 0; i < n; i++) {
520     for (HYPRE_Int j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
521   }
522   for (PetscCount i = 0; i < *ncoo; i++) cooj[i] = jj[i];
523   *coo_i = cooi;
524   *coo_j = cooj;
525   PetscFunctionReturn(PETSC_SUCCESS);
526 }
527 
528 static PetscErrorCode MatSeqAIJGetCOO_Private(Mat A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
529 {
530   PetscInt        n;
531   const PetscInt *ii, *jj;
532   PetscBool       done;
533 
534   PetscFunctionBegin;
535   PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
536   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatGetRowIJ");
537   PetscCall(CSRtoCOO_Private(n, ii, jj, ncoo, coo_i, coo_j));
538   PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
539   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatRestoreRowIJ");
540   PetscFunctionReturn(PETSC_SUCCESS);
541 }
542 
543 static PetscErrorCode hypreCSRMatrixGetCOO_Private(hypre_CSRMatrix *A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
544 {
545   PetscInt             n = hypre_CSRMatrixNumRows(A);
546   HYPRE_Int           *ii, *jj;
547   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
548 
549   PetscFunctionBegin;
550 #if defined(PETSC_HAVE_HYPRE_DEVICE)
551   mem = hypre_CSRMatrixMemoryLocation(A);
552   if (mem != HYPRE_MEMORY_HOST) {
553     PetscCount nnz = hypre_CSRMatrixNumNonzeros(A);
554     PetscCall(PetscMalloc2(n + 1, &ii, nnz, &jj));
555     hypre_TMemcpy(ii, hypre_CSRMatrixI(A), HYPRE_Int, n + 1, HYPRE_MEMORY_HOST, mem);
556     hypre_TMemcpy(jj, hypre_CSRMatrixJ(A), HYPRE_Int, nnz, HYPRE_MEMORY_HOST, mem);
557   } else {
558 #else
559   {
560 #endif
561     ii = hypre_CSRMatrixI(A);
562     jj = hypre_CSRMatrixJ(A);
563   }
564   PetscCall(CSRtoCOO_HYPRE_Int_Private(n, ii, jj, ncoo, coo_i, coo_j));
565   if (mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree2(ii, jj));
566   PetscFunctionReturn(PETSC_SUCCESS);
567 }
568 
569 static PetscErrorCode MatSetValuesCOOFromCSRMatrix_Private(Mat A, hypre_CSRMatrix *H)
570 {
571   PetscBool            iscpu = PETSC_TRUE;
572   PetscScalar         *a;
573   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
574 
575   PetscFunctionBegin;
576 #if defined(PETSC_HAVE_HYPRE_DEVICE)
577   mem = hypre_CSRMatrixMemoryLocation(H);
578   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &iscpu));
579 #endif
580   if (iscpu && mem != HYPRE_MEMORY_HOST) {
581     PetscCount nnz = hypre_CSRMatrixNumNonzeros(H);
582     PetscCall(PetscMalloc1(nnz, &a));
583     hypre_TMemcpy(a, hypre_CSRMatrixData(H), PetscScalar, nnz, HYPRE_MEMORY_HOST, mem);
584   } else {
585     a = (PetscScalar *)hypre_CSRMatrixData(H);
586   }
587   PetscCall(MatSetValuesCOO(A, a, INSERT_VALUES));
588   if (iscpu && mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree(a));
589   PetscFunctionReturn(PETSC_SUCCESS);
590 }
591 
592 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
593 {
594   MPI_Comm   comm = PetscObjectComm((PetscObject)A);
595   Mat        M = NULL, dH = NULL, oH = NULL, dA = NULL, oA = NULL;
596   PetscBool  ismpiaij, issbaij, isbaij;
597   Mat_HYPRE *hA;
598 
599   PetscFunctionBegin;
600   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &issbaij, MATSEQSBAIJ, MATMPIBAIJ, ""));
601   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &isbaij, MATSEQBAIJ, MATMPIBAIJ, ""));
602   if (isbaij || issbaij) { /* handle BAIJ and SBAIJ */
603     PetscBool ismpi;
604     MatType   newtype;
605 
606     PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &ismpi, MATMPISBAIJ, MATMPIBAIJ, ""));
607     newtype = ismpi ? MATMPIAIJ : MATSEQAIJ;
608     if (reuse == MAT_REUSE_MATRIX) {
609       PetscCall(MatConvert(*B, newtype, MAT_INPLACE_MATRIX, B));
610       PetscCall(MatConvert(A, newtype, MAT_REUSE_MATRIX, B));
611       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
612     } else if (reuse == MAT_INITIAL_MATRIX) {
613       PetscCall(MatConvert(A, newtype, MAT_INITIAL_MATRIX, B));
614       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
615     } else {
616       PetscCall(MatConvert(A, newtype, MAT_INPLACE_MATRIX, &A));
617       PetscCall(MatConvert(A, MATHYPRE, MAT_INPLACE_MATRIX, &A));
618     }
619     PetscFunctionReturn(PETSC_SUCCESS);
620   }
621 
622   dA = A;
623   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
624   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(A, &dA, &oA, NULL));
625 
626   if (reuse != MAT_REUSE_MATRIX) {
627     PetscCount coo_n;
628     PetscInt  *coo_i, *coo_j;
629 
630     PetscCall(MatCreate(comm, &M));
631     PetscCall(MatSetType(M, MATHYPRE));
632     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
633     PetscCall(MatSetOption(M, MAT_SORTED_FULL, PETSC_TRUE));
634     PetscCall(MatSetOption(M, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
635 
636     hA = (Mat_HYPRE *)M->data;
637     PetscCall(MatHYPRE_CreateFromMat(A, hA));
638     PetscCall(MatHYPRE_IJMatrixCopyIJ(A, hA->ij));
639 
640     PetscCall(MatHYPRE_CreateCOOMat(M));
641 
642     dH = hA->cooMat;
643     PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
644     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));
645 
646     PetscCall(PetscObjectSetName((PetscObject)dH, "_internal_COO_mat_for_hypre"));
647     PetscCall(MatSeqAIJGetCOO_Private(dA, &coo_n, &coo_i, &coo_j));
648     PetscCall(MatSetPreallocationCOO(dH, coo_n, coo_i, coo_j));
649     PetscCall(PetscFree2(coo_i, coo_j));
650     if (oH) {
651       PetscCall(PetscLayoutDestroy(&oH->cmap));
652       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oH), oA->cmap->n, oA->cmap->n, 1, &oH->cmap));
653       PetscCall(MatSeqAIJGetCOO_Private(oA, &coo_n, &coo_i, &coo_j));
654       PetscCall(MatSetPreallocationCOO(oH, coo_n, coo_i, coo_j));
655       PetscCall(PetscFree2(coo_i, coo_j));
656     }
657     hA->cooMat->assembled = PETSC_TRUE;
658 
659     M->preallocated = PETSC_TRUE;
660     PetscCall(MatAssemblyBegin(M, MAT_FINAL_ASSEMBLY));
661     PetscCall(MatAssemblyEnd(M, MAT_FINAL_ASSEMBLY));
662 
663     PetscCall(MatHYPRE_AttachCOOMat(M));
664     if (reuse == MAT_INITIAL_MATRIX) *B = M;
665   } else M = *B;
666 
667   hA = (Mat_HYPRE *)M->data;
668   PetscCheck(hA->cooMat, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
669 
670   dH = hA->cooMat;
671   PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
672   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));
673 
674   PetscScalar *a;
675   PetscCall(MatSeqAIJGetCSRAndMemType(dA, NULL, NULL, &a, NULL));
676   PetscCall(MatSetValuesCOO(dH, a, INSERT_VALUES));
677   if (oH) {
678     PetscCall(MatSeqAIJGetCSRAndMemType(oA, NULL, NULL, &a, NULL));
679     PetscCall(MatSetValuesCOO(oH, a, INSERT_VALUES));
680   }
681 
682   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
683   PetscFunctionReturn(PETSC_SUCCESS);
684 }
685 
686 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
687 {
688   Mat                 M, dA = NULL, oA = NULL;
689   hypre_ParCSRMatrix *parcsr;
690   hypre_CSRMatrix    *dH, *oH;
691   MPI_Comm            comm;
692   PetscBool           ismpiaij, isseqaij;
693 
694   PetscFunctionBegin;
695   comm = PetscObjectComm((PetscObject)A);
696   if (reuse == MAT_REUSE_MATRIX) {
697     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATMPIAIJ, &ismpiaij));
698     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATSEQAIJ, &isseqaij));
699     PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Only MATMPIAIJ or MATSEQAIJ base types are supported");
700   }
701   PetscCall(MatHYPREGetParCSR(A, &parcsr));
702 #if defined(PETSC_HAVE_HYPRE_DEVICE)
703   if (HYPRE_MEMORY_DEVICE == hypre_ParCSRMatrixMemoryLocation(parcsr)) {
704     PetscBool isaij;
705 
706     PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
707     if (isaij) {
708       PetscMPIInt size;
709 
710       PetscCallMPI(MPI_Comm_size(comm, &size));
711   #if defined(HYPRE_USING_HIP)
712       mtype = size > 1 ? MATMPIAIJHIPSPARSE : MATSEQAIJHIPSPARSE;
713   #elif defined(HYPRE_USING_CUDA)
714       mtype = size > 1 ? MATMPIAIJCUSPARSE : MATSEQAIJCUSPARSE;
715   #else
716       mtype = size > 1 ? MATMPIAIJ : MATSEQAIJ;
717   #endif
718     }
719   }
720 #endif
721   dH = hypre_ParCSRMatrixDiag(parcsr);
722   oH = hypre_ParCSRMatrixOffd(parcsr);
723   if (reuse != MAT_REUSE_MATRIX) {
724     PetscCount coo_n;
725     PetscInt  *coo_i, *coo_j;
726 
727     PetscCall(MatCreate(comm, &M));
728     PetscCall(MatSetType(M, mtype));
729     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
730     PetscCall(MatMPIAIJSetPreallocation(M, 0, NULL, 0, NULL));
731 
732     dA = M;
733     PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
734     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));
735 
736     PetscCall(hypreCSRMatrixGetCOO_Private(dH, &coo_n, &coo_i, &coo_j));
737     PetscCall(MatSetPreallocationCOO(dA, coo_n, coo_i, coo_j));
738     PetscCall(PetscFree2(coo_i, coo_j));
739     if (ismpiaij) {
740       HYPRE_Int nc = hypre_CSRMatrixNumCols(oH);
741 
742       PetscCall(PetscLayoutDestroy(&oA->cmap));
743       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oA), nc, nc, 1, &oA->cmap));
744       PetscCall(hypreCSRMatrixGetCOO_Private(oH, &coo_n, &coo_i, &coo_j));
745       PetscCall(MatSetPreallocationCOO(oA, coo_n, coo_i, coo_j));
746       PetscCall(PetscFree2(coo_i, coo_j));
747 
748       /* garray */
749       Mat_MPIAIJ   *aij    = (Mat_MPIAIJ *)M->data;
750       HYPRE_BigInt *harray = hypre_ParCSRMatrixColMapOffd(parcsr);
751       PetscInt     *garray;
752 
753       PetscCall(PetscFree(aij->garray));
754       PetscCall(PetscMalloc1(nc, &garray));
755       for (HYPRE_Int i = 0; i < nc; i++) garray[i] = (PetscInt)harray[i];
756       aij->garray = garray;
757       PetscCall(MatSetUpMultiply_MPIAIJ(M));
758     }
759     if (reuse == MAT_INITIAL_MATRIX) *B = M;
760   } else M = *B;
761 
762   dA = M;
763   PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
764   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));
765   PetscCall(MatSetValuesCOOFromCSRMatrix_Private(dA, dH));
766   if (oA) PetscCall(MatSetValuesCOOFromCSRMatrix_Private(oA, oH));
767   M->assembled = PETSC_TRUE;
768   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
769   PetscFunctionReturn(PETSC_SUCCESS);
770 }
771 
772 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
773 {
774   hypre_ParCSRMatrix *tA;
775   hypre_CSRMatrix    *hdiag, *hoffd;
776   Mat_SeqAIJ         *diag, *offd;
777   PetscInt           *garray, i, noffd, dnnz, onnz, *row_starts, *col_starts;
778   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
779   PetscBool           ismpiaij, isseqaij;
780   PetscBool           sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
781   HYPRE_Int          *hdi = NULL, *hdj = NULL, *hoi = NULL, *hoj = NULL;
782   PetscInt           *pdi = NULL, *pdj = NULL, *poi = NULL, *poj = NULL;
783   PetscBool           iscuda, iship;
784 #if defined(PETSC_HAVE_DEVICE) && defined(PETSC_HAVE_HYPRE_DEVICE)
785   PetscBool boundtocpu = A->boundtocpu;
786 #else
787   PetscBool boundtocpu = PETSC_TRUE;
788 #endif
789 
790   PetscFunctionBegin;
791   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
792   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
793   PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Unsupported type %s", ((PetscObject)A)->type_name);
794   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJHIPSPARSE, MATMPIAIJCUSPARSE, ""));
795   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iship, MATSEQAIJCUSPARSE, MATMPIAIJHIPSPARSE, ""));
796   PetscHYPREInitialize();
797   if (ismpiaij) {
798     Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
799 
800     diag = (Mat_SeqAIJ *)a->A->data;
801     offd = (Mat_SeqAIJ *)a->B->data;
802     if (!boundtocpu && (iscuda || iship)) {
803 #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
804       if (iscuda) {
805         sameint = PETSC_TRUE;
806         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
807         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
808       }
809 #endif
810 #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
811       if (iship) {
812         sameint = PETSC_TRUE;
813         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
814         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
815       }
816 #endif
817     } else {
818       boundtocpu = PETSC_TRUE;
819       pdi        = diag->i;
820       pdj        = diag->j;
821       poi        = offd->i;
822       poj        = offd->j;
823       if (sameint) {
824         hdi = (HYPRE_Int *)pdi;
825         hdj = (HYPRE_Int *)pdj;
826         hoi = (HYPRE_Int *)poi;
827         hoj = (HYPRE_Int *)poj;
828       }
829     }
830     garray = a->garray;
831     noffd  = a->B->cmap->N;
832     dnnz   = diag->nz;
833     onnz   = offd->nz;
834   } else {
835     diag = (Mat_SeqAIJ *)A->data;
836     offd = NULL;
837     if (!boundtocpu && (iscuda || iship)) {
838 #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
839       if (iscuda) {
840         sameint = PETSC_TRUE;
841         PetscCall(MatSeqAIJCUSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
842       }
843 #endif
844 #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
845       if (iship) {
846         sameint = PETSC_TRUE;
847         PetscCall(MatSeqAIJHIPSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
848       }
849 #endif
850     } else {
851       boundtocpu = PETSC_TRUE;
852       pdi        = diag->i;
853       pdj        = diag->j;
854       if (sameint) {
855         hdi = (HYPRE_Int *)pdi;
856         hdj = (HYPRE_Int *)pdj;
857       }
858     }
859     garray = NULL;
860     noffd  = 0;
861     dnnz   = diag->nz;
862     onnz   = 0;
863   }
864 
865   /* create a temporary ParCSR */
866   if (HYPRE_AssumedPartitionCheck()) {
867     PetscMPIInt myid;
868 
869     PetscCallMPI(MPI_Comm_rank(comm, &myid));
870     row_starts = A->rmap->range + myid;
871     col_starts = A->cmap->range + myid;
872   } else {
873     row_starts = A->rmap->range;
874     col_starts = A->cmap->range;
875   }
876   tA = hypre_ParCSRMatrixCreate(comm, A->rmap->N, A->cmap->N, (HYPRE_BigInt *)row_starts, (HYPRE_BigInt *)col_starts, noffd, dnnz, onnz);
877 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
878   hypre_ParCSRMatrixSetRowStartsOwner(tA, 0);
879   hypre_ParCSRMatrixSetColStartsOwner(tA, 0);
880 #endif
881 
882   /* set diagonal part */
883   hdiag = hypre_ParCSRMatrixDiag(tA);
884   if (!sameint) { /* malloc CSR pointers */
885     PetscCall(PetscMalloc2(A->rmap->n + 1, &hdi, dnnz, &hdj));
886     for (i = 0; i < A->rmap->n + 1; i++) hdi[i] = (HYPRE_Int)pdi[i];
887     for (i = 0; i < dnnz; i++) hdj[i] = (HYPRE_Int)pdj[i];
888   }
889   hypre_CSRMatrixI(hdiag)           = hdi;
890   hypre_CSRMatrixJ(hdiag)           = hdj;
891   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex *)diag->a;
892   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
893   hypre_CSRMatrixSetRownnz(hdiag);
894   hypre_CSRMatrixSetDataOwner(hdiag, 0);
895 
896   /* set off-diagonal part */
897   hoffd = hypre_ParCSRMatrixOffd(tA);
898   if (offd) {
899     if (!sameint) { /* malloc CSR pointers */
900       PetscCall(PetscMalloc2(A->rmap->n + 1, &hoi, onnz, &hoj));
901       for (i = 0; i < A->rmap->n + 1; i++) hoi[i] = (HYPRE_Int)poi[i];
902       for (i = 0; i < onnz; i++) hoj[i] = (HYPRE_Int)poj[i];
903     }
904     hypre_CSRMatrixI(hoffd)           = hoi;
905     hypre_CSRMatrixJ(hoffd)           = hoj;
906     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex *)offd->a;
907     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
908     hypre_CSRMatrixSetRownnz(hoffd);
909     hypre_CSRMatrixSetDataOwner(hoffd, 0);
910   }
911 #if defined(PETSC_HAVE_HYPRE_DEVICE)
912   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, !boundtocpu ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST);
913 #else
914   #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
915   PetscCallExternal(hypre_ParCSRMatrixInitialize, tA);
916   #else
917   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, HYPRE_MEMORY_HOST);
918   #endif
919 #endif
920   hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA), HYPRE_MEMORY_HOST);
921   hypre_ParCSRMatrixSetNumNonzeros(tA);
922   hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt *)garray;
923   if (!hypre_ParCSRMatrixCommPkg(tA)) PetscCallExternal(hypre_MatvecCommPkgCreate, tA);
924   *hA = tA;
925   PetscFunctionReturn(PETSC_SUCCESS);
926 }
927 
928 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
929 {
930   hypre_CSRMatrix *hdiag, *hoffd;
931   PetscBool        ismpiaij, sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
932 #if defined(PETSC_HAVE_HYPRE_DEVICE)
933   PetscBool iscuda = PETSC_FALSE;
934 #endif
935 
936   PetscFunctionBegin;
937   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
938 #if defined(PETSC_HAVE_HYPRE_DEVICE)
939   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
940   if (iscuda) sameint = PETSC_TRUE;
941 #endif
942   hdiag = hypre_ParCSRMatrixDiag(*hA);
943   hoffd = hypre_ParCSRMatrixOffd(*hA);
944   /* free temporary memory allocated by PETSc
945      set pointers to NULL before destroying tA */
946   if (!sameint) {
947     HYPRE_Int *hi, *hj;
948 
949     hi = hypre_CSRMatrixI(hdiag);
950     hj = hypre_CSRMatrixJ(hdiag);
951     PetscCall(PetscFree2(hi, hj));
952     if (ismpiaij) {
953       hi = hypre_CSRMatrixI(hoffd);
954       hj = hypre_CSRMatrixJ(hoffd);
955       PetscCall(PetscFree2(hi, hj));
956     }
957   }
958   hypre_CSRMatrixI(hdiag)    = NULL;
959   hypre_CSRMatrixJ(hdiag)    = NULL;
960   hypre_CSRMatrixData(hdiag) = NULL;
961   if (ismpiaij) {
962     hypre_CSRMatrixI(hoffd)    = NULL;
963     hypre_CSRMatrixJ(hoffd)    = NULL;
964     hypre_CSRMatrixData(hoffd) = NULL;
965   }
966   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
967   hypre_ParCSRMatrixDestroy(*hA);
968   *hA = NULL;
969   PetscFunctionReturn(PETSC_SUCCESS);
970 }
971 
972 /* calls RAP from BoomerAMG:
973    the resulting ParCSR will not own the column and row starts
974    It looks like we don't need to have the diagonal entries ordered first */
975 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
976 {
977 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
978   HYPRE_Int P_owns_col_starts, R_owns_row_starts;
979 #endif
980 
981   PetscFunctionBegin;
982 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
983   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
984   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
985 #endif
986   /* can be replaced by version test later */
987 #if defined(PETSC_HAVE_HYPRE_DEVICE)
988   PetscStackPushExternal("hypre_ParCSRMatrixRAP");
989   *hRAP = hypre_ParCSRMatrixRAP(hR, hA, hP);
990   PetscStackPop;
991 #else
992   PetscCallExternal(hypre_BoomerAMGBuildCoarseOperator, hR, hA, hP, hRAP);
993   PetscCallExternal(hypre_ParCSRMatrixSetNumNonzeros, *hRAP);
994 #endif
995   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
996 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
997   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP, 0);
998   hypre_ParCSRMatrixSetColStartsOwner(*hRAP, 0);
999   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP, 1);
1000   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR, 1);
1001 #endif
1002   PetscFunctionReturn(PETSC_SUCCESS);
1003 }
1004 
1005 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat P, Mat C)
1006 {
1007   Mat                 B;
1008   hypre_ParCSRMatrix *hA, *hP, *hPtAP = NULL;
1009   Mat_Product        *product = C->product;
1010 
1011   PetscFunctionBegin;
1012   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1013   PetscCall(MatAIJGetParCSR_Private(P, &hP));
1014   PetscCall(MatHYPRE_ParCSR_RAP(hP, hA, hP, &hPtAP));
1015   PetscCall(MatCreateFromParCSR(hPtAP, MATAIJ, PETSC_OWN_POINTER, &B));
1016 
1017   PetscCall(MatHeaderMerge(C, &B));
1018   C->product = product;
1019 
1020   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1021   PetscCall(MatAIJRestoreParCSR_Private(P, &hP));
1022   PetscFunctionReturn(PETSC_SUCCESS);
1023 }
1024 
1025 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat P, PetscReal fill, Mat C)
1026 {
1027   PetscFunctionBegin;
1028   PetscCall(MatSetType(C, MATAIJ));
1029   C->ops->ptapnumeric    = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
1030   C->ops->productnumeric = MatProductNumeric_PtAP;
1031   PetscFunctionReturn(PETSC_SUCCESS);
1032 }
1033 
1034 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A, Mat P, Mat C)
1035 {
1036   Mat                 B;
1037   Mat_HYPRE          *hP;
1038   hypre_ParCSRMatrix *hA = NULL, *Pparcsr, *ptapparcsr = NULL;
1039   HYPRE_Int           type;
1040   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
1041   PetscBool           ishypre;
1042 
1043   PetscFunctionBegin;
1044   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1045   PetscCheck(ishypre, comm, PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1046   hP = (Mat_HYPRE *)P->data;
1047   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
1048   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1049   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);
1050 
1051   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1052   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, hA, Pparcsr, &ptapparcsr));
1053   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1054 
1055   /* create temporary matrix and merge to C */
1056   PetscCall(MatCreateFromParCSR(ptapparcsr, ((PetscObject)C)->type_name, PETSC_OWN_POINTER, &B));
1057   PetscCall(MatHeaderMerge(C, &B));
1058   PetscFunctionReturn(PETSC_SUCCESS);
1059 }
1060 
1061 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A, Mat P, Mat C)
1062 {
1063   Mat                 B;
1064   hypre_ParCSRMatrix *Aparcsr, *Pparcsr, *ptapparcsr = NULL;
1065   Mat_HYPRE          *hA, *hP;
1066   PetscBool           ishypre;
1067   HYPRE_Int           type;
1068 
1069   PetscFunctionBegin;
1070   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1071   PetscCheck(ishypre, PetscObjectComm((PetscObject)P), PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1072   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1073   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1074   hA = (Mat_HYPRE *)A->data;
1075   hP = (Mat_HYPRE *)P->data;
1076   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1077   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1078   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
1079   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1080   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1081   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);
1082   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, Aparcsr, Pparcsr, &ptapparcsr));
1083   PetscCall(MatCreateFromParCSR(ptapparcsr, MATHYPRE, PETSC_OWN_POINTER, &B));
1084   PetscCall(MatHeaderMerge(C, &B));
1085   PetscFunctionReturn(PETSC_SUCCESS);
1086 }
1087 
1088 /* calls hypre_ParMatmul
1089    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
1090    hypre_ParMatrixCreate does not duplicate the communicator
1091    It looks like we don't need to have the diagonal entries ordered first */
1092 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
1093 {
1094   PetscFunctionBegin;
1095   /* can be replaced by version test later */
1096 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1097   PetscStackPushExternal("hypre_ParCSRMatMat");
1098   *hAB = hypre_ParCSRMatMat(hA, hB);
1099 #else
1100   PetscStackPushExternal("hypre_ParMatmul");
1101   *hAB = hypre_ParMatmul(hA, hB);
1102 #endif
1103   PetscStackPop;
1104   PetscFunctionReturn(PETSC_SUCCESS);
1105 }
1106 
1107 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C)
1108 {
1109   Mat                 D;
1110   hypre_ParCSRMatrix *hA, *hB, *hAB = NULL;
1111   Mat_Product        *product = C->product;
1112 
1113   PetscFunctionBegin;
1114   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1115   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1116   PetscCall(MatHYPRE_ParCSR_MatMatMult(hA, hB, &hAB));
1117   PetscCall(MatCreateFromParCSR(hAB, MATAIJ, PETSC_OWN_POINTER, &D));
1118 
1119   PetscCall(MatHeaderMerge(C, &D));
1120   C->product = product;
1121 
1122   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1123   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1124   PetscFunctionReturn(PETSC_SUCCESS);
1125 }
1126 
1127 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat B, PetscReal fill, Mat C)
1128 {
1129   PetscFunctionBegin;
1130   PetscCall(MatSetType(C, MATAIJ));
1131   C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
1132   C->ops->productnumeric = MatProductNumeric_AB;
1133   PetscFunctionReturn(PETSC_SUCCESS);
1134 }
1135 
1136 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A, Mat B, Mat C)
1137 {
1138   Mat                 D;
1139   hypre_ParCSRMatrix *Aparcsr, *Bparcsr, *ABparcsr = NULL;
1140   Mat_HYPRE          *hA, *hB;
1141   PetscBool           ishypre;
1142   HYPRE_Int           type;
1143   Mat_Product        *product;
1144 
1145   PetscFunctionBegin;
1146   PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHYPRE, &ishypre));
1147   PetscCheck(ishypre, PetscObjectComm((PetscObject)B), PETSC_ERR_USER, "B should be of type %s", MATHYPRE);
1148   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1149   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1150   hA = (Mat_HYPRE *)A->data;
1151   hB = (Mat_HYPRE *)B->data;
1152   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1153   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1154   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hB->ij, &type);
1155   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1156   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1157   PetscCallExternal(HYPRE_IJMatrixGetObject, hB->ij, (void **)&Bparcsr);
1158   PetscCall(MatHYPRE_ParCSR_MatMatMult(Aparcsr, Bparcsr, &ABparcsr));
1159   PetscCall(MatCreateFromParCSR(ABparcsr, MATHYPRE, PETSC_OWN_POINTER, &D));
1160 
1161   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
1162   product    = C->product; /* save it from MatHeaderReplace() */
1163   C->product = NULL;
1164   PetscCall(MatHeaderReplace(C, &D));
1165   C->product             = product;
1166   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
1167   C->ops->productnumeric = MatProductNumeric_AB;
1168   PetscFunctionReturn(PETSC_SUCCESS);
1169 }
1170 
1171 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, Mat D)
1172 {
1173   Mat                 E;
1174   hypre_ParCSRMatrix *hA, *hB, *hC, *hABC = NULL;
1175 
1176   PetscFunctionBegin;
1177   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1178   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1179   PetscCall(MatAIJGetParCSR_Private(C, &hC));
1180   PetscCall(MatHYPRE_ParCSR_RAP(hA, hB, hC, &hABC));
1181   PetscCall(MatCreateFromParCSR(hABC, MATAIJ, PETSC_OWN_POINTER, &E));
1182   PetscCall(MatHeaderMerge(D, &E));
1183   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1184   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1185   PetscCall(MatAIJRestoreParCSR_Private(C, &hC));
1186   PetscFunctionReturn(PETSC_SUCCESS);
1187 }
1188 
1189 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, PetscReal fill, Mat D)
1190 {
1191   PetscFunctionBegin;
1192   PetscCall(MatSetType(D, MATAIJ));
1193   PetscFunctionReturn(PETSC_SUCCESS);
1194 }
1195 
1196 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C)
1197 {
1198   PetscFunctionBegin;
1199   C->ops->productnumeric = MatProductNumeric_AB;
1200   PetscFunctionReturn(PETSC_SUCCESS);
1201 }
1202 
1203 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C)
1204 {
1205   Mat_Product *product = C->product;
1206   PetscBool    Ahypre;
1207 
1208   PetscFunctionBegin;
1209   PetscCall(PetscObjectTypeCompare((PetscObject)product->A, MATHYPRE, &Ahypre));
1210   if (Ahypre) { /* A is a Hypre matrix */
1211     PetscCall(MatSetType(C, MATHYPRE));
1212     C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE;
1213     C->ops->matmultnumeric  = MatMatMultNumeric_HYPRE_HYPRE;
1214     PetscFunctionReturn(PETSC_SUCCESS);
1215   }
1216   PetscFunctionReturn(PETSC_SUCCESS);
1217 }
1218 
1219 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C)
1220 {
1221   PetscFunctionBegin;
1222   C->ops->productnumeric = MatProductNumeric_PtAP;
1223   PetscFunctionReturn(PETSC_SUCCESS);
1224 }
1225 
1226 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C)
1227 {
1228   Mat_Product *product = C->product;
1229   PetscBool    flg;
1230   PetscInt     type        = 0;
1231   const char  *outTypes[4] = {"aij", "seqaij", "mpiaij", "hypre"};
1232   PetscInt     ntype       = 4;
1233   Mat          A           = product->A;
1234   PetscBool    Ahypre;
1235 
1236   PetscFunctionBegin;
1237   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &Ahypre));
1238   if (Ahypre) { /* A is a Hypre matrix */
1239     PetscCall(MatSetType(C, MATHYPRE));
1240     C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1241     C->ops->ptapnumeric     = MatPtAPNumeric_HYPRE_HYPRE;
1242     PetscFunctionReturn(PETSC_SUCCESS);
1243   }
1244 
1245   /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */
1246   /* Get runtime option */
1247   if (product->api_user) {
1248     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatPtAP_HYPRE", "Mat");
1249     PetscCall(PetscOptionsEList("-matptap_hypre_outtype", "MatPtAP outtype", "MatPtAP outtype", outTypes, ntype, outTypes[type], &type, &flg));
1250     PetscOptionsEnd();
1251   } else {
1252     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatProduct_PtAP_HYPRE", "Mat");
1253     PetscCall(PetscOptionsEList("-mat_product_algorithm_hypre_outtype", "MatProduct_PtAP outtype", "MatProduct_PtAP", outTypes, ntype, outTypes[type], &type, &flg));
1254     PetscOptionsEnd();
1255   }
1256 
1257   if (type == 0 || type == 1 || type == 2) {
1258     PetscCall(MatSetType(C, MATAIJ));
1259   } else if (type == 3) {
1260     PetscCall(MatSetType(C, MATHYPRE));
1261   } else SETERRQ(PetscObjectComm((PetscObject)C), PETSC_ERR_SUP, "MatPtAP outtype is not supported");
1262   C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1263   C->ops->ptapnumeric     = MatPtAPNumeric_AIJ_HYPRE;
1264   PetscFunctionReturn(PETSC_SUCCESS);
1265 }
1266 
1267 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C)
1268 {
1269   Mat_Product *product = C->product;
1270 
1271   PetscFunctionBegin;
1272   switch (product->type) {
1273   case MATPRODUCT_AB:
1274     PetscCall(MatProductSetFromOptions_HYPRE_AB(C));
1275     break;
1276   case MATPRODUCT_PtAP:
1277     PetscCall(MatProductSetFromOptions_HYPRE_PtAP(C));
1278     break;
1279   default:
1280     break;
1281   }
1282   PetscFunctionReturn(PETSC_SUCCESS);
1283 }
1284 
1285 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1286 {
1287   PetscFunctionBegin;
1288   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_TRUE));
1289   PetscFunctionReturn(PETSC_SUCCESS);
1290 }
1291 
1292 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1293 {
1294   PetscFunctionBegin;
1295   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_FALSE));
1296   PetscFunctionReturn(PETSC_SUCCESS);
1297 }
1298 
1299 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1300 {
1301   PetscFunctionBegin;
1302   if (y != z) PetscCall(VecCopy(y, z));
1303   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_FALSE));
1304   PetscFunctionReturn(PETSC_SUCCESS);
1305 }
1306 
1307 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1308 {
1309   PetscFunctionBegin;
1310   if (y != z) PetscCall(VecCopy(y, z));
1311   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_TRUE));
1312   PetscFunctionReturn(PETSC_SUCCESS);
1313 }
1314 
1315 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1316 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1317 {
1318   Mat_HYPRE          *hA = (Mat_HYPRE *)A->data;
1319   hypre_ParCSRMatrix *parcsr;
1320   hypre_ParVector    *hx, *hy;
1321 
1322   PetscFunctionBegin;
1323   if (trans) {
1324     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->b, x));
1325     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->x, y));
1326     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->x, y));
1327     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hx);
1328     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hy);
1329   } else {
1330     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->x, x));
1331     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->b, y));
1332     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->b, y));
1333     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hx);
1334     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hy);
1335   }
1336   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1337   if (trans) {
1338     PetscCallExternal(hypre_ParCSRMatrixMatvecT, a, parcsr, hx, b, hy);
1339   } else {
1340     PetscCallExternal(hypre_ParCSRMatrixMatvec, a, parcsr, hx, b, hy);
1341   }
1342   PetscCall(VecHYPRE_IJVectorPopVec(hA->x));
1343   PetscCall(VecHYPRE_IJVectorPopVec(hA->b));
1344   PetscFunctionReturn(PETSC_SUCCESS);
1345 }
1346 
1347 static PetscErrorCode MatDestroy_HYPRE(Mat A)
1348 {
1349   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1350 
1351   PetscFunctionBegin;
1352   PetscCall(VecHYPRE_IJVectorDestroy(&hA->x));
1353   PetscCall(VecHYPRE_IJVectorDestroy(&hA->b));
1354   PetscCall(MatHYPRE_DestroyCOOMat(A)); /* must be called before destroying the individual CSR */
1355   if (hA->ij) {
1356     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1357     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
1358   }
1359   if (hA->comm) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)A), &hA->comm));
1360 
1361   PetscCall(MatStashDestroy_Private(&A->stash));
1362   PetscCall(PetscFree(hA->array));
1363   if (hA->rows_d) PetscStackCallExternalVoid("hypre_Free", hypre_Free(hA->rows_d, HYPRE_MEMORY_DEVICE));
1364 
1365   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_aij_C", NULL));
1366   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_is_C", NULL));
1367   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_hypre_C", NULL));
1368   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaij_hypre_C", NULL));
1369   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", NULL));
1370   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", NULL));
1371   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijcusparse_hypre_C", NULL));
1372   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", NULL));
1373   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPRESetPreallocation_C", NULL));
1374   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPREGetParCSR_C", NULL));
1375   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL));
1376   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL));
1377   PetscCall(PetscFree(A->data));
1378   PetscFunctionReturn(PETSC_SUCCESS);
1379 }
1380 
1381 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1382 {
1383   PetscFunctionBegin;
1384   if (!A->preallocated) PetscCall(MatHYPRESetPreallocation(A, PETSC_DEFAULT, NULL, PETSC_DEFAULT, NULL));
1385   PetscFunctionReturn(PETSC_SUCCESS);
1386 }
1387 
1388 //TODO FIX hypre_CSRMatrixMatvecOutOfPlace
1389 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1390 static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind)
1391 {
1392   Mat_HYPRE           *hA   = (Mat_HYPRE *)A->data;
1393   HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE;
1394 
1395   PetscFunctionBegin;
1396   A->boundtocpu = bind;
1397   if (hA->ij && hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) {
1398     hypre_ParCSRMatrix *parcsr;
1399     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1400     PetscCallExternal(hypre_ParCSRMatrixMigrate, parcsr, hmem);
1401   }
1402   if (hA->x) PetscCall(VecHYPRE_IJBindToCPU(hA->x, bind));
1403   if (hA->b) PetscCall(VecHYPRE_IJBindToCPU(hA->b, bind));
1404   PetscFunctionReturn(PETSC_SUCCESS);
1405 }
1406 #endif
1407 
1408 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1409 {
1410   Mat_HYPRE   *hA = (Mat_HYPRE *)A->data;
1411   PetscMPIInt  n;
1412   PetscInt     i, j, rstart, ncols, flg;
1413   PetscInt    *row, *col;
1414   PetscScalar *val;
1415 
1416   PetscFunctionBegin;
1417   PetscCheck(mode != MAT_FLUSH_ASSEMBLY, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1418 
1419   if (!A->nooffprocentries) {
1420     while (1) {
1421       PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
1422       if (!flg) break;
1423 
1424       for (i = 0; i < n;) {
1425         /* Now identify the consecutive vals belonging to the same row */
1426         for (j = i, rstart = row[j]; j < n; j++) {
1427           if (row[j] != rstart) break;
1428         }
1429         if (j < n) ncols = j - i;
1430         else ncols = n - i;
1431         /* Now assemble all these values with a single function call */
1432         PetscCall(MatSetValues_HYPRE(A, 1, row + i, ncols, col + i, val + i, A->insertmode));
1433 
1434         i = j;
1435       }
1436     }
1437     PetscCall(MatStashScatterEnd_Private(&A->stash));
1438   }
1439 
1440   PetscCallExternal(HYPRE_IJMatrixAssemble, hA->ij);
1441   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */
1442   /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */
1443   if (!A->sortedfull) {
1444     hypre_AuxParCSRMatrix *aux_matrix;
1445 
1446     /* call destroy just to make sure we do not leak anything */
1447     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1448     PetscCallExternal(hypre_AuxParCSRMatrixDestroy, aux_matrix);
1449     hypre_IJMatrixTranslator(hA->ij) = NULL;
1450 
1451     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1452     PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1453     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1454     if (aux_matrix) {
1455       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1456 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1457       PetscCallExternal(hypre_AuxParCSRMatrixInitialize, aux_matrix);
1458 #else
1459       PetscCallExternal(hypre_AuxParCSRMatrixInitialize_v2, aux_matrix, HYPRE_MEMORY_HOST);
1460 #endif
1461     }
1462   }
1463   {
1464     hypre_ParCSRMatrix *parcsr;
1465 
1466     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1467     if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscCallExternal(hypre_MatvecCommPkgCreate, parcsr);
1468   }
1469   if (!hA->x) PetscCall(VecHYPRE_IJVectorCreate(A->cmap, &hA->x));
1470   if (!hA->b) PetscCall(VecHYPRE_IJVectorCreate(A->rmap, &hA->b));
1471 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1472   PetscCall(MatBindToCPU_HYPRE(A, A->boundtocpu));
1473 #endif
1474   PetscFunctionReturn(PETSC_SUCCESS);
1475 }
1476 
1477 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1478 {
1479   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1480 
1481   PetscFunctionBegin;
1482   PetscCheck(hA->array_available, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Temporary space is in use");
1483 
1484   if (hA->array_size >= size) {
1485     *array = hA->array;
1486   } else {
1487     PetscCall(PetscFree(hA->array));
1488     hA->array_size = size;
1489     PetscCall(PetscMalloc(hA->array_size, &hA->array));
1490     *array = hA->array;
1491   }
1492 
1493   hA->array_available = PETSC_FALSE;
1494   PetscFunctionReturn(PETSC_SUCCESS);
1495 }
1496 
1497 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1498 {
1499   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1500 
1501   PetscFunctionBegin;
1502   *array              = NULL;
1503   hA->array_available = PETSC_TRUE;
1504   PetscFunctionReturn(PETSC_SUCCESS);
1505 }
1506 
1507 static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1508 {
1509   Mat_HYPRE     *hA   = (Mat_HYPRE *)A->data;
1510   PetscScalar   *vals = (PetscScalar *)v;
1511   HYPRE_Complex *sscr;
1512   PetscInt      *cscr[2];
1513   PetscInt       i, nzc;
1514   PetscInt       rst = A->rmap->rstart, ren = A->rmap->rend;
1515   void          *array = NULL;
1516 
1517   PetscFunctionBegin;
1518   PetscCall(MatGetArray_HYPRE(A, sizeof(PetscInt) * (2 * nc) + sizeof(HYPRE_Complex) * nc * nr, &array));
1519   cscr[0] = (PetscInt *)array;
1520   cscr[1] = ((PetscInt *)array) + nc;
1521   sscr    = (HYPRE_Complex *)(((PetscInt *)array) + nc * 2);
1522   for (i = 0, nzc = 0; i < nc; i++) {
1523     if (cols[i] >= 0) {
1524       cscr[0][nzc]   = cols[i];
1525       cscr[1][nzc++] = i;
1526     }
1527   }
1528   if (!nzc) {
1529     PetscCall(MatRestoreArray_HYPRE(A, &array));
1530     PetscFunctionReturn(PETSC_SUCCESS);
1531   }
1532 
1533 #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE)
1534   if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) {
1535     hypre_ParCSRMatrix *parcsr;
1536 
1537     PetscCallExternal(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr);
1538     PetscCallExternal(hypre_ParCSRMatrixMigrate,parcsr, HYPRE_MEMORY_HOST);
1539   }
1540 #endif
1541 
1542   if (ins == ADD_VALUES) {
1543     for (i = 0; i < nr; i++) {
1544       if (rows[i] >= 0) {
1545         PetscInt  j;
1546         HYPRE_Int hnc = (HYPRE_Int)nzc;
1547 
1548         if (!nzc) continue;
1549         /* nonlocal values */
1550         if (rows[i] < rst || rows[i] >= ren) {
1551           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1552           if (hA->donotstash) continue;
1553         }
1554         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1555         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1556         PetscCallExternal(HYPRE_IJMatrixAddToValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1557       }
1558       vals += nc;
1559     }
1560   } else { /* INSERT_VALUES */
1561     for (i = 0; i < nr; i++) {
1562       if (rows[i] >= 0) {
1563         PetscInt  j;
1564         HYPRE_Int hnc = (HYPRE_Int)nzc;
1565 
1566         if (!nzc) continue;
1567         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1568         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1569         /* nonlocal values */
1570         if (rows[i] < rst || rows[i] >= ren) {
1571           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1572           if (!hA->donotstash) PetscCall(MatStashValuesRow_Private(&A->stash, rows[i], nzc, cscr[0], (PetscScalar *)sscr, PETSC_FALSE));
1573         }
1574         /* local values */
1575         else
1576           PetscCallExternal(HYPRE_IJMatrixSetValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1577       }
1578       vals += nc;
1579     }
1580   }
1581 
1582   PetscCall(MatRestoreArray_HYPRE(A, &array));
1583   PetscFunctionReturn(PETSC_SUCCESS);
1584 }
1585 
1586 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1587 {
1588   Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
1589   HYPRE_Int  *hdnnz, *honnz;
1590   PetscInt    i, rs, re, cs, ce, bs;
1591   PetscMPIInt size;
1592 
1593   PetscFunctionBegin;
1594   PetscCall(PetscLayoutSetUp(A->rmap));
1595   PetscCall(PetscLayoutSetUp(A->cmap));
1596   rs = A->rmap->rstart;
1597   re = A->rmap->rend;
1598   cs = A->cmap->rstart;
1599   ce = A->cmap->rend;
1600   if (!hA->ij) {
1601     PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rs, re - 1, cs, ce - 1, &hA->ij);
1602     PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
1603   } else {
1604     HYPRE_BigInt hrs, hre, hcs, hce;
1605     PetscCallExternal(HYPRE_IJMatrixGetLocalRange, hA->ij, &hrs, &hre, &hcs, &hce);
1606     PetscCheck(hre - hrs + 1 == re - rs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local rows: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hrs, hre + 1, rs, re);
1607     PetscCheck(hce - hcs + 1 == ce - cs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local cols: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hcs, hce + 1, cs, ce);
1608   }
1609   PetscCall(MatHYPRE_DestroyCOOMat(A));
1610   PetscCall(MatGetBlockSize(A, &bs));
1611   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10 * bs;
1612   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10 * bs;
1613 
1614   if (!dnnz) {
1615     PetscCall(PetscMalloc1(A->rmap->n, &hdnnz));
1616     for (i = 0; i < A->rmap->n; i++) hdnnz[i] = dnz;
1617   } else {
1618     hdnnz = (HYPRE_Int *)dnnz;
1619   }
1620   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1621   if (size > 1) {
1622     hypre_AuxParCSRMatrix *aux_matrix;
1623     if (!onnz) {
1624       PetscCall(PetscMalloc1(A->rmap->n, &honnz));
1625       for (i = 0; i < A->rmap->n; i++) honnz[i] = onz;
1626     } else honnz = (HYPRE_Int *)onnz;
1627     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1628        they assume the user will input the entire row values, properly sorted
1629        In PETSc, we don't make such an assumption and set this flag to 1,
1630        unless the option MAT_SORTED_FULL is set to true.
1631        Also, to avoid possible memory leaks, we destroy and recreate the translator
1632        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1633        the IJ matrix for us */
1634     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1635     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1636     hypre_IJMatrixTranslator(hA->ij) = NULL;
1637     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, hA->ij, hdnnz, honnz);
1638     aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1639     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !A->sortedfull;
1640   } else {
1641     honnz = NULL;
1642     PetscCallExternal(HYPRE_IJMatrixSetRowSizes, hA->ij, hdnnz);
1643   }
1644 
1645   /* reset assembled flag and call the initialize method */
1646   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1647 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1648   PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1649 #else
1650   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, hA->ij, HYPRE_MEMORY_HOST);
1651 #endif
1652   if (!dnnz) PetscCall(PetscFree(hdnnz));
1653   if (!onnz && honnz) PetscCall(PetscFree(honnz));
1654   /* Match AIJ logic */
1655   A->preallocated = PETSC_TRUE;
1656   A->assembled    = PETSC_FALSE;
1657   PetscFunctionReturn(PETSC_SUCCESS);
1658 }
1659 
1660 /*@C
1661   MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1662 
1663   Collective
1664 
1665   Input Parameters:
1666 + A    - the matrix
1667 . dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1668           (same value is used for all local rows)
1669 . dnnz - array containing the number of nonzeros in the various rows of the
1670           DIAGONAL portion of the local submatrix (possibly different for each row)
1671           or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `d_nz` is used to specify the nonzero structure.
1672           The size of this array is equal to the number of local rows, i.e `m`.
1673           For matrices that will be factored, you must leave room for (and set)
1674           the diagonal entry even if it is zero.
1675 . onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1676           submatrix (same value is used for all local rows).
1677 - onnz - array containing the number of nonzeros in the various rows of the
1678           OFF-DIAGONAL portion of the local submatrix (possibly different for
1679           each row) or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `o_nz` is used to specify the nonzero
1680           structure. The size of this array is equal to the number
1681           of local rows, i.e `m`.
1682 
1683   Level: intermediate
1684 
1685   Note:
1686   If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, `onz` and `onnz` are ignored.
1687 
1688 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatMPIAIJSetPreallocation()`, `MATHYPRE`, `MATAIJ`
1689 @*/
1690 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1691 {
1692   PetscFunctionBegin;
1693   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1694   PetscValidType(A, 1);
1695   PetscTryMethod(A, "MatHYPRESetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (A, dnz, dnnz, onz, onnz));
1696   PetscFunctionReturn(PETSC_SUCCESS);
1697 }
1698 
1699 /*@C
1700   MatCreateFromParCSR - Creates a `Mat` from a `hypre_ParCSRMatrix`
1701 
1702   Collective
1703 
1704   Input Parameters:
1705 + parcsr   - the pointer to the `hypre_ParCSRMatrix`
1706 . mtype    - matrix type to be created. Currently `MATAIJ`, `MATIS` and `MATHYPRE` are supported.
1707 - copymode - PETSc copying options, see  `PetscCopyMode`
1708 
1709   Output Parameter:
1710 . A - the matrix
1711 
1712   Level: intermediate
1713 
1714 .seealso: [](ch_matrices), `Mat`, `MatHYPRE`, `PetscCopyMode`
1715 @*/
1716 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat *A)
1717 {
1718   Mat        T;
1719   Mat_HYPRE *hA;
1720   MPI_Comm   comm;
1721   PetscInt   rstart, rend, cstart, cend, M, N;
1722   PetscBool  isseqaij, isseqaijmkl, ismpiaij, isaij, ishyp, isis;
1723 
1724   PetscFunctionBegin;
1725   comm = hypre_ParCSRMatrixComm(parcsr);
1726   PetscCall(PetscStrcmp(mtype, MATSEQAIJ, &isseqaij));
1727   PetscCall(PetscStrcmp(mtype, MATSEQAIJMKL, &isseqaijmkl));
1728   PetscCall(PetscStrcmp(mtype, MATMPIAIJ, &ismpiaij));
1729   PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
1730   PetscCall(PetscStrcmp(mtype, MATHYPRE, &ishyp));
1731   PetscCall(PetscStrcmp(mtype, MATIS, &isis));
1732   isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1733   /* TODO */
1734   PetscCheck(isaij || ishyp || isis, comm, PETSC_ERR_SUP, "Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s", mtype, MATAIJ, MATSEQAIJ, MATSEQAIJMKL, MATMPIAIJ, MATIS, MATHYPRE);
1735   /* access ParCSRMatrix */
1736   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1737   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1738   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1739   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1740   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1741   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1742 
1743   /* fix for empty local rows/columns */
1744   if (rend < rstart) rend = rstart;
1745   if (cend < cstart) cend = cstart;
1746 
1747   /* PETSc convention */
1748   rend++;
1749   cend++;
1750   rend = PetscMin(rend, M);
1751   cend = PetscMin(cend, N);
1752 
1753   /* create PETSc matrix with MatHYPRE */
1754   PetscCall(MatCreate(comm, &T));
1755   PetscCall(MatSetSizes(T, rend - rstart, cend - cstart, M, N));
1756   PetscCall(MatSetType(T, MATHYPRE));
1757   hA = (Mat_HYPRE *)T->data;
1758 
1759   /* create HYPRE_IJMatrix */
1760   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij);
1761   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
1762 
1763   /* create new ParCSR object if needed */
1764   if (ishyp && copymode == PETSC_COPY_VALUES) {
1765     hypre_ParCSRMatrix *new_parcsr;
1766 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
1767     hypre_CSRMatrix *hdiag, *hoffd, *ndiag, *noffd;
1768 
1769     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
1770     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1771     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1772     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1773     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1774     PetscCall(PetscArraycpy(hypre_CSRMatrixData(ndiag), hypre_CSRMatrixData(hdiag), hypre_CSRMatrixNumNonzeros(hdiag)));
1775     PetscCall(PetscArraycpy(hypre_CSRMatrixData(noffd), hypre_CSRMatrixData(hoffd), hypre_CSRMatrixNumNonzeros(hoffd)));
1776 #else
1777     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 1);
1778 #endif
1779     parcsr   = new_parcsr;
1780     copymode = PETSC_OWN_POINTER;
1781   }
1782 
1783   /* set ParCSR object */
1784   hypre_IJMatrixObject(hA->ij) = parcsr;
1785   T->preallocated              = PETSC_TRUE;
1786 
1787   /* set assembled flag */
1788   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1789 #if 0
1790   PetscCallExternal(HYPRE_IJMatrixInitialize,hA->ij);
1791 #endif
1792   if (ishyp) {
1793     PetscMPIInt myid = 0;
1794 
1795     /* make sure we always have row_starts and col_starts available */
1796     if (HYPRE_AssumedPartitionCheck()) PetscCallMPI(MPI_Comm_rank(comm, &myid));
1797 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1798     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1799       PetscLayout map;
1800 
1801       PetscCall(MatGetLayouts(T, NULL, &map));
1802       PetscCall(PetscLayoutSetUp(map));
1803       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1804     }
1805     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1806       PetscLayout map;
1807 
1808       PetscCall(MatGetLayouts(T, &map, NULL));
1809       PetscCall(PetscLayoutSetUp(map));
1810       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1811     }
1812 #endif
1813     /* prevent from freeing the pointer */
1814     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1815     *A = T;
1816     PetscCall(MatSetOption(*A, MAT_SORTED_FULL, PETSC_TRUE));
1817     PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY));
1818     PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY));
1819   } else if (isaij) {
1820     if (copymode != PETSC_OWN_POINTER) {
1821       /* prevent from freeing the pointer */
1822       hA->inner_free = PETSC_FALSE;
1823       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INITIAL_MATRIX, A));
1824       PetscCall(MatDestroy(&T));
1825     } else { /* AIJ return type with PETSC_OWN_POINTER */
1826       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INPLACE_MATRIX, &T));
1827       *A = T;
1828     }
1829   } else if (isis) {
1830     PetscCall(MatConvert_HYPRE_IS(T, MATIS, MAT_INITIAL_MATRIX, A));
1831     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1832     PetscCall(MatDestroy(&T));
1833   }
1834   PetscFunctionReturn(PETSC_SUCCESS);
1835 }
1836 
1837 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1838 {
1839   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1840   HYPRE_Int  type;
1841 
1842   PetscFunctionBegin;
1843   PetscCheck(hA->ij, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE_IJMatrix not present");
1844   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1845   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1846   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)parcsr);
1847   PetscFunctionReturn(PETSC_SUCCESS);
1848 }
1849 
1850 /*@C
1851   MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1852 
1853   Not Collective
1854 
1855   Input Parameter:
1856 . A - the `MATHYPRE` object
1857 
1858   Output Parameter:
1859 . parcsr - the pointer to the `hypre_ParCSRMatrix`
1860 
1861   Level: intermediate
1862 
1863 .seealso: [](ch_matrices), `Mat`, `MatHYPRE`, `PetscCopyMode`
1864 @*/
1865 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1866 {
1867   PetscFunctionBegin;
1868   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1869   PetscValidType(A, 1);
1870   PetscUseMethod(A, "MatHYPREGetParCSR_C", (Mat, hypre_ParCSRMatrix **), (A, parcsr));
1871   PetscFunctionReturn(PETSC_SUCCESS);
1872 }
1873 
1874 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1875 {
1876   hypre_ParCSRMatrix *parcsr;
1877   hypre_CSRMatrix    *ha;
1878   PetscInt            rst;
1879 
1880   PetscFunctionBegin;
1881   PetscCheck(A->rmap->n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not implemented with non-square diagonal blocks");
1882   PetscCall(MatGetOwnershipRange(A, &rst, NULL));
1883   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1884   if (missing) *missing = PETSC_FALSE;
1885   if (dd) *dd = -1;
1886   ha = hypre_ParCSRMatrixDiag(parcsr);
1887   if (ha) {
1888     PetscInt   size, i;
1889     HYPRE_Int *ii, *jj;
1890 
1891     size = hypre_CSRMatrixNumRows(ha);
1892     ii   = hypre_CSRMatrixI(ha);
1893     jj   = hypre_CSRMatrixJ(ha);
1894     for (i = 0; i < size; i++) {
1895       PetscInt  j;
1896       PetscBool found = PETSC_FALSE;
1897 
1898       for (j = ii[i]; j < ii[i + 1] && !found; j++) found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1899 
1900       if (!found) {
1901         PetscCall(PetscInfo(A, "Matrix is missing local diagonal entry %" PetscInt_FMT "\n", i));
1902         if (missing) *missing = PETSC_TRUE;
1903         if (dd) *dd = i + rst;
1904         PetscFunctionReturn(PETSC_SUCCESS);
1905       }
1906     }
1907     if (!size) {
1908       PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1909       if (missing) *missing = PETSC_TRUE;
1910       if (dd) *dd = rst;
1911     }
1912   } else {
1913     PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1914     if (missing) *missing = PETSC_TRUE;
1915     if (dd) *dd = rst;
1916   }
1917   PetscFunctionReturn(PETSC_SUCCESS);
1918 }
1919 
1920 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1921 {
1922   hypre_ParCSRMatrix *parcsr;
1923 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1924   hypre_CSRMatrix *ha;
1925 #endif
1926   HYPRE_Complex hs;
1927 
1928   PetscFunctionBegin;
1929   PetscCall(PetscHYPREScalarCast(s, &hs));
1930   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1931 #if PETSC_PKG_HYPRE_VERSION_GE(2, 19, 0)
1932   PetscCallExternal(hypre_ParCSRMatrixScale, parcsr, hs);
1933 #else /* diagonal part */
1934   ha = hypre_ParCSRMatrixDiag(parcsr);
1935   if (ha) {
1936     PetscInt       size, i;
1937     HYPRE_Int     *ii;
1938     HYPRE_Complex *a;
1939 
1940     size = hypre_CSRMatrixNumRows(ha);
1941     a    = hypre_CSRMatrixData(ha);
1942     ii   = hypre_CSRMatrixI(ha);
1943     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1944   }
1945   /* off-diagonal part */
1946   ha = hypre_ParCSRMatrixOffd(parcsr);
1947   if (ha) {
1948     PetscInt       size, i;
1949     HYPRE_Int     *ii;
1950     HYPRE_Complex *a;
1951 
1952     size = hypre_CSRMatrixNumRows(ha);
1953     a    = hypre_CSRMatrixData(ha);
1954     ii   = hypre_CSRMatrixI(ha);
1955     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1956   }
1957 #endif
1958   PetscFunctionReturn(PETSC_SUCCESS);
1959 }
1960 
1961 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1962 {
1963   hypre_ParCSRMatrix *parcsr;
1964   HYPRE_Int          *lrows;
1965   PetscInt            rst, ren, i;
1966 
1967   PetscFunctionBegin;
1968   PetscCheck(!x && !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "To be implemented");
1969   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1970   PetscCall(PetscMalloc1(numRows, &lrows));
1971   PetscCall(MatGetOwnershipRange(A, &rst, &ren));
1972   for (i = 0; i < numRows; i++) {
1973     PetscCheck(rows[i] >= rst && rows[i] < ren, PETSC_COMM_SELF, PETSC_ERR_SUP, "Non-local rows not yet supported");
1974     lrows[i] = rows[i] - rst;
1975   }
1976   PetscCallExternal(hypre_ParCSRMatrixEliminateRowsCols, parcsr, numRows, lrows);
1977   PetscCall(PetscFree(lrows));
1978   PetscFunctionReturn(PETSC_SUCCESS);
1979 }
1980 
1981 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1982 {
1983   PetscFunctionBegin;
1984   if (ha) {
1985     HYPRE_Int     *ii, size;
1986     HYPRE_Complex *a;
1987 
1988     size = hypre_CSRMatrixNumRows(ha);
1989     a    = hypre_CSRMatrixData(ha);
1990     ii   = hypre_CSRMatrixI(ha);
1991 
1992     if (a) PetscCall(PetscArrayzero(a, ii[size]));
1993   }
1994   PetscFunctionReturn(PETSC_SUCCESS);
1995 }
1996 
1997 static PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1998 {
1999   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2000 
2001   PetscFunctionBegin;
2002   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) {
2003     PetscCallExternal(HYPRE_IJMatrixSetConstantValues, hA->ij, 0.0);
2004   } else {
2005     hypre_ParCSRMatrix *parcsr;
2006 
2007     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2008     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr)));
2009     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr)));
2010   }
2011   PetscFunctionReturn(PETSC_SUCCESS);
2012 }
2013 
2014 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA, PetscInt N, const PetscInt rows[], HYPRE_Complex diag)
2015 {
2016   PetscInt       ii;
2017   HYPRE_Int     *i, *j;
2018   HYPRE_Complex *a;
2019 
2020   PetscFunctionBegin;
2021   if (!hA) PetscFunctionReturn(PETSC_SUCCESS);
2022 
2023   i = hypre_CSRMatrixI(hA);
2024   j = hypre_CSRMatrixJ(hA);
2025   a = hypre_CSRMatrixData(hA);
2026 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2027   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hA)) {
2028   #if defined(HYPRE_USING_CUDA)
2029     MatZeroRows_CUDA(N, rows, i, j, a, diag);
2030   #elif defined(HYPRE_USING_HIP)
2031     MatZeroRows_HIP(N, rows, i, j, a, diag);
2032   #elif defined(PETSC_HAVE_KOKKOS)
2033     MatZeroRows_Kokkos(N, rows, i, j, a, diag);
2034   #else
2035     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for MatZeroRows on a hypre matrix in this memory location");
2036   #endif
2037   } else
2038 #endif
2039   {
2040     for (ii = 0; ii < N; ii++) {
2041       HYPRE_Int jj, ibeg, iend, irow;
2042 
2043       irow = rows[ii];
2044       ibeg = i[irow];
2045       iend = i[irow + 1];
2046       for (jj = ibeg; jj < iend; jj++)
2047         if (j[jj] == irow) a[jj] = diag;
2048         else a[jj] = 0.0;
2049     }
2050   }
2051   PetscFunctionReturn(PETSC_SUCCESS);
2052 }
2053 
2054 static PetscErrorCode MatZeroRows_HYPRE(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2055 {
2056   hypre_ParCSRMatrix *parcsr;
2057   PetscInt           *lrows, len, *lrows2;
2058   HYPRE_Complex       hdiag;
2059 
2060   PetscFunctionBegin;
2061   PetscCheck(!x && !b, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
2062   PetscCall(PetscHYPREScalarCast(diag, &hdiag));
2063   /* retrieve the internal matrix */
2064   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2065   /* get locally owned rows */
2066   PetscCall(MatZeroRowsMapLocal_Private(A, N, rows, &len, &lrows));
2067 
2068 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2069   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hypre_ParCSRMatrixDiag(parcsr))) {
2070     Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2071     PetscInt   m;
2072     PetscCall(MatGetLocalSize(A, &m, NULL));
2073     if (!hA->rows_d) {
2074       hA->rows_d = hypre_TAlloc(PetscInt, m, HYPRE_MEMORY_DEVICE);
2075       if (m) PetscCheck(hA->rows_d, PETSC_COMM_SELF, PETSC_ERR_MEM, "HYPRE_TAlloc failed");
2076     }
2077     PetscCheck(len <= m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Too many rows in rows[]");
2078     PetscStackCallExternalVoid("hypre_Memcpy", hypre_Memcpy(hA->rows_d, lrows, sizeof(PetscInt) * len, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST));
2079     lrows2 = hA->rows_d;
2080   } else
2081 #endif
2082   {
2083     lrows2 = lrows;
2084   }
2085 
2086   /* zero diagonal part */
2087   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr), len, lrows2, hdiag));
2088   /* zero off-diagonal part */
2089   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr), len, lrows2, 0.0));
2090 
2091   PetscCall(PetscFree(lrows));
2092   PetscFunctionReturn(PETSC_SUCCESS);
2093 }
2094 
2095 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat, MatAssemblyType mode)
2096 {
2097   PetscFunctionBegin;
2098   if (mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);
2099 
2100   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
2101   PetscFunctionReturn(PETSC_SUCCESS);
2102 }
2103 
2104 static PetscErrorCode MatGetRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2105 {
2106   hypre_ParCSRMatrix *parcsr;
2107   HYPRE_Int           hnz;
2108 
2109   PetscFunctionBegin;
2110   /* retrieve the internal matrix */
2111   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2112   /* call HYPRE API */
2113   PetscCallExternal(HYPRE_ParCSRMatrixGetRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2114   if (nz) *nz = (PetscInt)hnz;
2115   PetscFunctionReturn(PETSC_SUCCESS);
2116 }
2117 
2118 static PetscErrorCode MatRestoreRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2119 {
2120   hypre_ParCSRMatrix *parcsr;
2121   HYPRE_Int           hnz;
2122 
2123   PetscFunctionBegin;
2124   /* retrieve the internal matrix */
2125   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2126   /* call HYPRE API */
2127   hnz = nz ? (HYPRE_Int)(*nz) : 0;
2128   PetscCallExternal(HYPRE_ParCSRMatrixRestoreRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2129   PetscFunctionReturn(PETSC_SUCCESS);
2130 }
2131 
2132 static PetscErrorCode MatGetValues_HYPRE(Mat A, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2133 {
2134   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2135   PetscInt   i;
2136 
2137   PetscFunctionBegin;
2138   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2139   /* Ignore negative row indices
2140    * And negative column indices should be automatically ignored in hypre
2141    * */
2142   for (i = 0; i < m; i++) {
2143     if (idxm[i] >= 0) {
2144       HYPRE_Int hn = (HYPRE_Int)n;
2145       PetscCallExternal(HYPRE_IJMatrixGetValues, hA->ij, 1, &hn, (HYPRE_BigInt *)&idxm[i], (HYPRE_BigInt *)idxn, (HYPRE_Complex *)(v + i * n));
2146     }
2147   }
2148   PetscFunctionReturn(PETSC_SUCCESS);
2149 }
2150 
2151 static PetscErrorCode MatSetOption_HYPRE(Mat A, MatOption op, PetscBool flg)
2152 {
2153   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2154 
2155   PetscFunctionBegin;
2156   switch (op) {
2157   case MAT_NO_OFF_PROC_ENTRIES:
2158     if (flg) PetscCallExternal(HYPRE_IJMatrixSetMaxOffProcElmts, hA->ij, 0);
2159     break;
2160   case MAT_IGNORE_OFF_PROC_ENTRIES:
2161     hA->donotstash = flg;
2162     break;
2163   default:
2164     break;
2165   }
2166   PetscFunctionReturn(PETSC_SUCCESS);
2167 }
2168 
2169 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
2170 {
2171   PetscViewerFormat format;
2172 
2173   PetscFunctionBegin;
2174   PetscCall(PetscViewerGetFormat(view, &format));
2175   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(PETSC_SUCCESS);
2176   if (format != PETSC_VIEWER_NATIVE) {
2177     Mat                 B;
2178     hypre_ParCSRMatrix *parcsr;
2179     PetscErrorCode (*mview)(Mat, PetscViewer) = NULL;
2180 
2181     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2182     PetscCall(MatCreateFromParCSR(parcsr, MATAIJ, PETSC_USE_POINTER, &B));
2183     PetscCall(MatGetOperation(B, MATOP_VIEW, (void (**)(void)) & mview));
2184     PetscCheck(mview, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing view operation");
2185     PetscCall((*mview)(B, view));
2186     PetscCall(MatDestroy(&B));
2187   } else {
2188     Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
2189     PetscMPIInt size;
2190     PetscBool   isascii;
2191     const char *filename;
2192 
2193     /* HYPRE uses only text files */
2194     PetscCall(PetscObjectTypeCompare((PetscObject)view, PETSCVIEWERASCII, &isascii));
2195     PetscCheck(isascii, PetscObjectComm((PetscObject)view), PETSC_ERR_SUP, "PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII", ((PetscObject)view)->type_name);
2196     PetscCall(PetscViewerFileGetName(view, &filename));
2197     PetscCallExternal(HYPRE_IJMatrixPrint, hA->ij, filename);
2198     PetscCallMPI(MPI_Comm_size(hA->comm, &size));
2199     if (size > 1) {
2200       PetscCall(PetscViewerASCIIPrintf(view, "Matrix files: %s.%05d ... %s.%05d\n", filename, 0, filename, size - 1));
2201     } else {
2202       PetscCall(PetscViewerASCIIPrintf(view, "Matrix file: %s.%05d\n", filename, 0));
2203     }
2204   }
2205   PetscFunctionReturn(PETSC_SUCCESS);
2206 }
2207 
2208 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
2209 {
2210   hypre_ParCSRMatrix *acsr, *bcsr;
2211 
2212   PetscFunctionBegin;
2213   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
2214     PetscCall(MatHYPREGetParCSR_HYPRE(A, &acsr));
2215     PetscCall(MatHYPREGetParCSR_HYPRE(B, &bcsr));
2216     PetscCallExternal(hypre_ParCSRMatrixCopy, acsr, bcsr, 1);
2217     PetscCall(MatSetOption(B, MAT_SORTED_FULL, PETSC_TRUE)); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
2218     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2219     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2220   } else {
2221     PetscCall(MatCopy_Basic(A, B, str));
2222   }
2223   PetscFunctionReturn(PETSC_SUCCESS);
2224 }
2225 
2226 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
2227 {
2228   hypre_ParCSRMatrix *parcsr;
2229   hypre_CSRMatrix    *dmat;
2230   HYPRE_Complex      *a;
2231   PetscBool           cong;
2232 
2233   PetscFunctionBegin;
2234   PetscCall(MatHasCongruentLayouts(A, &cong));
2235   PetscCheck(cong, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only for square matrices with same local distributions of rows and columns");
2236   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2237   dmat = hypre_ParCSRMatrixDiag(parcsr);
2238   if (dmat) {
2239 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2240     HYPRE_MemoryLocation mem = hypre_CSRMatrixMemoryLocation(dmat);
2241 #else
2242     HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
2243 #endif
2244 
2245     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecGetArrayWriteAndMemType(d, (PetscScalar **)&a, NULL));
2246     else PetscCall(VecGetArrayWrite(d, (PetscScalar **)&a));
2247     hypre_CSRMatrixExtractDiagonal(dmat, a, 0);
2248     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecRestoreArrayWriteAndMemType(d, (PetscScalar **)&a));
2249     else PetscCall(VecRestoreArrayWrite(d, (PetscScalar **)&a));
2250   }
2251   PetscFunctionReturn(PETSC_SUCCESS);
2252 }
2253 
2254 #include <petscblaslapack.h>
2255 
2256 static PetscErrorCode MatAXPY_HYPRE(Mat Y, PetscScalar a, Mat X, MatStructure str)
2257 {
2258   PetscFunctionBegin;
2259 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2260   {
2261     Mat                 B;
2262     hypre_ParCSRMatrix *x, *y, *z;
2263 
2264     PetscCall(MatHYPREGetParCSR(Y, &y));
2265     PetscCall(MatHYPREGetParCSR(X, &x));
2266     PetscCallExternal(hypre_ParCSRMatrixAdd, 1.0, y, 1.0, x, &z);
2267     PetscCall(MatCreateFromParCSR(z, MATHYPRE, PETSC_OWN_POINTER, &B));
2268     PetscCall(MatHeaderMerge(Y, &B));
2269   }
2270 #else
2271   if (str == SAME_NONZERO_PATTERN) {
2272     hypre_ParCSRMatrix *x, *y;
2273     hypre_CSRMatrix    *xloc, *yloc;
2274     PetscInt            xnnz, ynnz;
2275     HYPRE_Complex      *xarr, *yarr;
2276     PetscBLASInt        one = 1, bnz;
2277 
2278     PetscCall(MatHYPREGetParCSR(Y, &y));
2279     PetscCall(MatHYPREGetParCSR(X, &x));
2280 
2281     /* diagonal block */
2282     xloc = hypre_ParCSRMatrixDiag(x);
2283     yloc = hypre_ParCSRMatrixDiag(y);
2284     xnnz = 0;
2285     ynnz = 0;
2286     xarr = NULL;
2287     yarr = NULL;
2288     if (xloc) {
2289       xarr = hypre_CSRMatrixData(xloc);
2290       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2291     }
2292     if (yloc) {
2293       yarr = hypre_CSRMatrixData(yloc);
2294       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2295     }
2296     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2297     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2298     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2299 
2300     /* off-diagonal block */
2301     xloc = hypre_ParCSRMatrixOffd(x);
2302     yloc = hypre_ParCSRMatrixOffd(y);
2303     xnnz = 0;
2304     ynnz = 0;
2305     xarr = NULL;
2306     yarr = NULL;
2307     if (xloc) {
2308       xarr = hypre_CSRMatrixData(xloc);
2309       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2310     }
2311     if (yloc) {
2312       yarr = hypre_CSRMatrixData(yloc);
2313       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2314     }
2315     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in off-diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2316     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2317     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2318   } else if (str == SUBSET_NONZERO_PATTERN) {
2319     PetscCall(MatAXPY_Basic(Y, a, X, str));
2320   } else {
2321     Mat B;
2322 
2323     PetscCall(MatAXPY_Basic_Preallocate(Y, X, &B));
2324     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
2325     PetscCall(MatHeaderReplace(Y, &B));
2326   }
2327 #endif
2328   PetscFunctionReturn(PETSC_SUCCESS);
2329 }
2330 
2331 static PetscErrorCode MatDuplicate_HYPRE(Mat A, MatDuplicateOption op, Mat *B)
2332 {
2333   hypre_ParCSRMatrix *parcsr = NULL;
2334   PetscCopyMode       cpmode;
2335   Mat_HYPRE          *hA;
2336 
2337   PetscFunctionBegin;
2338   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2339   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
2340     parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
2341     cpmode = PETSC_OWN_POINTER;
2342   } else {
2343     cpmode = PETSC_COPY_VALUES;
2344   }
2345   PetscCall(MatCreateFromParCSR(parcsr, MATHYPRE, cpmode, B));
2346   hA = (Mat_HYPRE *)A->data;
2347   if (hA->cooMat) {
2348     Mat_HYPRE *hB = (Mat_HYPRE *)((*B)->data);
2349     op            = (op == MAT_DO_NOT_COPY_VALUES) ? op : MAT_COPY_VALUES;
2350     /* Cannot simply increase the reference count of hA->cooMat, since B needs to share cooMat's data array */
2351     PetscCall(MatDuplicate(hA->cooMat, op, &hB->cooMat));
2352     PetscCall(MatHYPRE_AttachCOOMat(*B));
2353   }
2354   PetscFunctionReturn(PETSC_SUCCESS);
2355 }
2356 
2357 static PetscErrorCode MatSetPreallocationCOO_HYPRE(Mat mat, PetscCount coo_n, PetscInt coo_i[], PetscInt coo_j[])
2358 {
2359   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;
2360 
2361   PetscFunctionBegin;
2362   /* Build an agent matrix cooMat with AIJ format
2363      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
2364    */
2365   PetscCall(MatHYPRE_CreateCOOMat(mat));
2366   PetscCall(MatSetOption(hmat->cooMat, MAT_IGNORE_OFF_PROC_ENTRIES, hmat->donotstash));
2367   PetscCall(MatSetOption(hmat->cooMat, MAT_NO_OFF_PROC_ENTRIES, mat->nooffprocentries));
2368 
2369   /* MatSetPreallocationCOO_SeqAIJ and MatSetPreallocationCOO_MPIAIJ uses this specific
2370      name to automatically put the diagonal entries first */
2371   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
2372   PetscCall(MatSetPreallocationCOO(hmat->cooMat, coo_n, coo_i, coo_j));
2373   hmat->cooMat->assembled = PETSC_TRUE;
2374 
2375   /* Copy the sparsity pattern from cooMat to hypre IJMatrix hmat->ij */
2376   PetscCall(MatSetOption(mat, MAT_SORTED_FULL, PETSC_TRUE));
2377   PetscCall(MatHYPRE_CreateFromMat(hmat->cooMat, hmat));      /* Create hmat->ij and preallocate it */
2378   PetscCall(MatHYPRE_IJMatrixCopyIJ(hmat->cooMat, hmat->ij)); /* Copy A's (i,j) to hmat->ij */
2379 
2380   mat->preallocated = PETSC_TRUE;
2381   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
2382   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY)); /* Migrate mat to device if it is bound to. Hypre builds its own SpMV context here */
2383 
2384   /* Attach cooMat to mat */
2385   PetscCall(MatHYPRE_AttachCOOMat(mat));
2386   PetscFunctionReturn(PETSC_SUCCESS);
2387 }
2388 
2389 static PetscErrorCode MatSetValuesCOO_HYPRE(Mat mat, const PetscScalar v[], InsertMode imode)
2390 {
2391   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;
2392 
2393   PetscFunctionBegin;
2394   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
2395   PetscCall(MatSetValuesCOO(hmat->cooMat, v, imode));
2396   PetscCall(MatViewFromOptions(hmat->cooMat, (PetscObject)mat, "-cooMat_view"));
2397   PetscFunctionReturn(PETSC_SUCCESS);
2398 }
2399 
2400 /*MC
2401    MATHYPRE - "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2402           based on the hypre IJ interface.
2403 
2404    Level: intermediate
2405 
2406 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatHYPRESetPreallocation`
2407 M*/
2408 
2409 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2410 {
2411   Mat_HYPRE *hB;
2412 
2413   PetscFunctionBegin;
2414   PetscCall(PetscNew(&hB));
2415 
2416   hB->inner_free      = PETSC_TRUE;
2417   hB->array_available = PETSC_TRUE;
2418 
2419   B->data = (void *)hB;
2420 
2421   PetscCall(PetscMemzero(B->ops, sizeof(struct _MatOps)));
2422   B->ops->mult                  = MatMult_HYPRE;
2423   B->ops->multtranspose         = MatMultTranspose_HYPRE;
2424   B->ops->multadd               = MatMultAdd_HYPRE;
2425   B->ops->multtransposeadd      = MatMultTransposeAdd_HYPRE;
2426   B->ops->setup                 = MatSetUp_HYPRE;
2427   B->ops->destroy               = MatDestroy_HYPRE;
2428   B->ops->assemblyend           = MatAssemblyEnd_HYPRE;
2429   B->ops->assemblybegin         = MatAssemblyBegin_HYPRE;
2430   B->ops->setvalues             = MatSetValues_HYPRE;
2431   B->ops->missingdiagonal       = MatMissingDiagonal_HYPRE;
2432   B->ops->scale                 = MatScale_HYPRE;
2433   B->ops->zerorowscolumns       = MatZeroRowsColumns_HYPRE;
2434   B->ops->zeroentries           = MatZeroEntries_HYPRE;
2435   B->ops->zerorows              = MatZeroRows_HYPRE;
2436   B->ops->getrow                = MatGetRow_HYPRE;
2437   B->ops->restorerow            = MatRestoreRow_HYPRE;
2438   B->ops->getvalues             = MatGetValues_HYPRE;
2439   B->ops->setoption             = MatSetOption_HYPRE;
2440   B->ops->duplicate             = MatDuplicate_HYPRE;
2441   B->ops->copy                  = MatCopy_HYPRE;
2442   B->ops->view                  = MatView_HYPRE;
2443   B->ops->getdiagonal           = MatGetDiagonal_HYPRE;
2444   B->ops->axpy                  = MatAXPY_HYPRE;
2445   B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE;
2446 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2447   B->ops->bindtocpu = MatBindToCPU_HYPRE;
2448   B->boundtocpu     = PETSC_FALSE;
2449 #endif
2450 
2451   /* build cache for off array entries formed */
2452   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));
2453 
2454   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)B), &hB->comm));
2455   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATHYPRE));
2456   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_aij_C", MatConvert_HYPRE_AIJ));
2457   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_is_C", MatConvert_HYPRE_IS));
2458   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_hypre_C", MatProductSetFromOptions_HYPRE));
2459   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaij_hypre_C", MatProductSetFromOptions_HYPRE));
2460   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPRESetPreallocation_C", MatHYPRESetPreallocation_HYPRE));
2461   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPREGetParCSR_C", MatHYPREGetParCSR_HYPRE));
2462   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_HYPRE));
2463   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetValuesCOO_C", MatSetValuesCOO_HYPRE));
2464 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2465   #if defined(HYPRE_USING_HIP)
2466   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2467   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2468   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2469   PetscCall(MatSetVecType(B, VECHIP));
2470   #endif
2471   #if defined(HYPRE_USING_CUDA)
2472   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2473   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2474   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2475   PetscCall(MatSetVecType(B, VECCUDA));
2476   #endif
2477 #endif
2478   PetscHYPREInitialize();
2479   PetscFunctionReturn(PETSC_SUCCESS);
2480 }
2481