xref: /petsc/src/mat/impls/hypre/mhypre.c (revision badd099fb2ece77d080fc02aefe95d4a02e75697)
1 /*
2     Creates hypre ijmatrix from PETSc matrix
3 */
4 
5 #include <petscpkg_version.h>
6 #include <petsc/private/petschypre.h>
7 #include <petscmathypre.h>
8 #include <petsc/private/matimpl.h>
9 #include <petsc/private/deviceimpl.h>
10 #include <../src/mat/impls/hypre/mhypre.h>
11 #include <../src/mat/impls/aij/mpi/mpiaij.h>
12 #include <../src/vec/vec/impls/hypre/vhyp.h>
13 #include <HYPRE.h>
14 #include <HYPRE_utilities.h>
15 #include <_hypre_parcsr_ls.h>
16 #include <_hypre_sstruct_ls.h>
17 
18 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
19   #define hypre_ParCSRMatrixClone(A, B) hypre_ParCSRMatrixCompleteClone(A)
20 #endif
21 
22 #if PETSC_PKG_HYPRE_VERSION_GE(2, 15, 0)
23   #define HYPRE_AssumedPartitionCheck() 1
24 #endif
25 
26 static PetscErrorCode MatHYPRE_CreateFromMat(Mat, Mat_HYPRE *);
27 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat, Mat, HYPRE_IJMatrix);
28 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat, HYPRE_IJMatrix);
29 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat, HYPRE_IJMatrix);
30 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat, HYPRE_Complex, Vec, HYPRE_Complex, Vec, PetscBool);
31 static PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode ins);
32 
33 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
34 {
35   PetscInt        i, n_d, n_o;
36   const PetscInt *ia_d, *ia_o;
37   PetscBool       done_d = PETSC_FALSE, done_o = PETSC_FALSE;
38   HYPRE_Int      *nnz_d = NULL, *nnz_o = NULL;
39 
40   PetscFunctionBegin;
41   if (A_d) { /* determine number of nonzero entries in local diagonal part */
42     PetscCall(MatGetRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, &n_d, &ia_d, NULL, &done_d));
43     if (done_d) {
44       PetscCall(PetscMalloc1(n_d, &nnz_d));
45       for (i = 0; i < n_d; i++) nnz_d[i] = ia_d[i + 1] - ia_d[i];
46     }
47     PetscCall(MatRestoreRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, NULL, &ia_d, NULL, &done_d));
48   }
49   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
50     PetscCall(MatGetRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
51     if (done_o) {
52       PetscCall(PetscMalloc1(n_o, &nnz_o));
53       for (i = 0; i < n_o; i++) nnz_o[i] = ia_o[i + 1] - ia_o[i];
54     }
55     PetscCall(MatRestoreRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
56   }
57   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
58     if (!done_o) { /* only diagonal part */
59       PetscCall(PetscCalloc1(n_d, &nnz_o));
60     }
61 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
62     { /* If we don't do this, the columns of the matrix will be all zeros! */
63       hypre_AuxParCSRMatrix *aux_matrix;
64       aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
65       hypre_AuxParCSRMatrixDestroy(aux_matrix);
66       hypre_IJMatrixTranslator(ij) = NULL;
67       PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
68       /* it seems they partially fixed it in 2.19.0 */
69   #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
70       aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
71       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
72   #endif
73     }
74 #else
75     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
76 #endif
77     PetscCall(PetscFree(nnz_d));
78     PetscCall(PetscFree(nnz_o));
79   }
80   PetscFunctionReturn(PETSC_SUCCESS);
81 }
82 
83 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
84 {
85   PetscInt rstart, rend, cstart, cend;
86 
87   PetscFunctionBegin;
88   PetscCall(PetscLayoutSetUp(A->rmap));
89   PetscCall(PetscLayoutSetUp(A->cmap));
90   rstart = A->rmap->rstart;
91   rend   = A->rmap->rend;
92   cstart = A->cmap->rstart;
93   cend   = A->cmap->rend;
94   PetscHYPREInitialize();
95   if (hA->ij) {
96     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
97     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
98   }
99   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij);
100   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
101   {
102     PetscBool       same;
103     Mat             A_d, A_o;
104     const PetscInt *colmap;
105     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &same));
106     if (same) {
107       PetscCall(MatMPIAIJGetSeqAIJ(A, &A_d, &A_o, &colmap));
108       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
109       PetscFunctionReturn(PETSC_SUCCESS);
110     }
111     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &same));
112     if (same) {
113       PetscCall(MatMPIBAIJGetSeqBAIJ(A, &A_d, &A_o, &colmap));
114       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
115       PetscFunctionReturn(PETSC_SUCCESS);
116     }
117     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &same));
118     if (same) {
119       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
120       PetscFunctionReturn(PETSC_SUCCESS);
121     }
122     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQBAIJ, &same));
123     if (same) {
124       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
125       PetscFunctionReturn(PETSC_SUCCESS);
126     }
127   }
128   PetscFunctionReturn(PETSC_SUCCESS);
129 }
130 
131 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ(Mat A, HYPRE_IJMatrix ij)
132 {
133   PetscBool flg;
134 
135   PetscFunctionBegin;
136 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
137   PetscCallExternal(HYPRE_IJMatrixInitialize, ij);
138 #else
139   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, ij, HYPRE_MEMORY_HOST);
140 #endif
141   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &flg));
142   if (flg) {
143     PetscCall(MatHYPRE_IJMatrixCopyIJ_MPIAIJ(A, ij));
144     PetscFunctionReturn(PETSC_SUCCESS);
145   }
146   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
147   if (flg) {
148     PetscCall(MatHYPRE_IJMatrixCopyIJ_SeqAIJ(A, ij));
149     PetscFunctionReturn(PETSC_SUCCESS);
150   }
151   PetscCheck(PETSC_FALSE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for matrix type %s", ((PetscObject)A)->type_name);
152   PetscFunctionReturn(PETSC_SUCCESS);
153 }
154 
155 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
156 {
157   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ *)A->data;
158   HYPRE_Int              type;
159   hypre_ParCSRMatrix    *par_matrix;
160   hypre_AuxParCSRMatrix *aux_matrix;
161   hypre_CSRMatrix       *hdiag;
162   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
163 
164   PetscFunctionBegin;
165   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
166   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
167   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
168   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
169   /*
170        this is the Hack part where we monkey directly with the hypre datastructures
171   */
172   if (sameint) {
173     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, A->rmap->n + 1));
174     PetscCall(PetscArraycpy(hdiag->j, pdiag->j, pdiag->nz));
175   } else {
176     PetscInt i;
177 
178     for (i = 0; i < A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
179     for (i = 0; i < pdiag->nz; i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
180   }
181 
182   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
183   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
184   PetscFunctionReturn(PETSC_SUCCESS);
185 }
186 
187 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
188 {
189   Mat_MPIAIJ            *pA = (Mat_MPIAIJ *)A->data;
190   Mat_SeqAIJ            *pdiag, *poffd;
191   PetscInt               i, *garray = pA->garray, *jj, cstart, *pjj;
192   HYPRE_Int             *hjj, type;
193   hypre_ParCSRMatrix    *par_matrix;
194   hypre_AuxParCSRMatrix *aux_matrix;
195   hypre_CSRMatrix       *hdiag, *hoffd;
196   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
197 
198   PetscFunctionBegin;
199   pdiag = (Mat_SeqAIJ *)pA->A->data;
200   poffd = (Mat_SeqAIJ *)pA->B->data;
201   /* cstart is only valid for square MPIAIJ laid out in the usual way */
202   PetscCall(MatGetOwnershipRange(A, &cstart, NULL));
203 
204   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
205   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
206   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
207   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
208   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
209 
210   if (sameint) {
211     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, pA->A->rmap->n + 1));
212   } else {
213     for (i = 0; i < pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
214   }
215 
216   hjj = hdiag->j;
217   pjj = pdiag->j;
218 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
219   for (i = 0; i < pdiag->nz; i++) hjj[i] = pjj[i];
220 #else
221   for (i = 0; i < pdiag->nz; i++) hjj[i] = cstart + pjj[i];
222 #endif
223   if (sameint) {
224     PetscCall(PetscArraycpy(hoffd->i, poffd->i, pA->A->rmap->n + 1));
225   } else {
226     for (i = 0; i < pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)poffd->i[i];
227   }
228 
229   jj = (PetscInt *)hoffd->j;
230 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
231   PetscCallExternal(hypre_CSRMatrixBigInitialize, hoffd);
232   jj = (PetscInt *)hoffd->big_j;
233 #endif
234   pjj = poffd->j;
235   for (i = 0; i < poffd->nz; i++) jj[i] = garray[pjj[i]];
236 
237   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
238   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
239   PetscFunctionReturn(PETSC_SUCCESS);
240 }
241 
242 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat *B)
243 {
244   Mat_HYPRE             *mhA = (Mat_HYPRE *)A->data;
245   Mat                    lA;
246   ISLocalToGlobalMapping rl2g, cl2g;
247   IS                     is;
248   hypre_ParCSRMatrix    *hA;
249   hypre_CSRMatrix       *hdiag, *hoffd;
250   MPI_Comm               comm;
251   HYPRE_Complex         *hdd, *hod, *aa;
252   PetscScalar           *data;
253   HYPRE_BigInt          *col_map_offd;
254   HYPRE_Int             *hdi, *hdj, *hoi, *hoj;
255   PetscInt              *ii, *jj, *iptr, *jptr;
256   PetscInt               cum, dr, dc, oc, str, stc, nnz, i, jd, jo, M, N;
257   HYPRE_Int              type;
258   MatType                lmattype   = NULL;
259   PetscBool              freeparcsr = PETSC_FALSE;
260 
261   PetscFunctionBegin;
262   comm = PetscObjectComm((PetscObject)A);
263   PetscCallExternal(HYPRE_IJMatrixGetObjectType, mhA->ij, &type);
264   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
265   PetscCallExternal(HYPRE_IJMatrixGetObject, mhA->ij, (void **)&hA);
266 #if defined(PETSC_HAVE_HYPRE_DEVICE)
267   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(mhA->ij)) {
268     /* Support by copying back on the host and copy to GPU
269        Kind of inefficient, but this is the best we can do now */
270   #if defined(HYPRE_USING_HIP)
271     lmattype = MATSEQAIJHIPSPARSE;
272   #elif defined(HYPRE_USING_CUDA)
273     lmattype = MATSEQAIJCUSPARSE;
274   #endif
275     hA         = hypre_ParCSRMatrixClone_v2(hA, 1, HYPRE_MEMORY_HOST);
276     freeparcsr = PETSC_TRUE;
277   }
278 #endif
279   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
280   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
281   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
282   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
283   hdiag = hypre_ParCSRMatrixDiag(hA);
284   hoffd = hypre_ParCSRMatrixOffd(hA);
285   dr    = hypre_CSRMatrixNumRows(hdiag);
286   dc    = hypre_CSRMatrixNumCols(hdiag);
287   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
288   hdi   = hypre_CSRMatrixI(hdiag);
289   hdj   = hypre_CSRMatrixJ(hdiag);
290   hdd   = hypre_CSRMatrixData(hdiag);
291   oc    = hypre_CSRMatrixNumCols(hoffd);
292   nnz += hypre_CSRMatrixNumNonzeros(hoffd);
293   hoi = hypre_CSRMatrixI(hoffd);
294   hoj = hypre_CSRMatrixJ(hoffd);
295   hod = hypre_CSRMatrixData(hoffd);
296   if (reuse != MAT_REUSE_MATRIX) {
297     PetscInt *aux;
298 
299     /* generate l2g maps for rows and cols */
300     PetscCall(ISCreateStride(comm, dr, str, 1, &is));
301     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
302     PetscCall(ISDestroy(&is));
303     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
304     PetscCall(PetscMalloc1(dc + oc, &aux));
305     for (i = 0; i < dc; i++) aux[i] = i + stc;
306     for (i = 0; i < oc; i++) aux[i + dc] = col_map_offd[i];
307     PetscCall(ISCreateGeneral(comm, dc + oc, aux, PETSC_OWN_POINTER, &is));
308     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
309     PetscCall(ISDestroy(&is));
310     /* create MATIS object */
311     PetscCall(MatCreate(comm, B));
312     PetscCall(MatSetSizes(*B, dr, dc, M, N));
313     PetscCall(MatSetType(*B, MATIS));
314     PetscCall(MatSetLocalToGlobalMapping(*B, rl2g, cl2g));
315     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
316     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
317 
318     /* allocate CSR for local matrix */
319     PetscCall(PetscMalloc1(dr + 1, &iptr));
320     PetscCall(PetscMalloc1(nnz, &jptr));
321     PetscCall(PetscMalloc1(nnz, &data));
322   } else {
323     PetscInt  nr;
324     PetscBool done;
325     PetscCall(MatISGetLocalMat(*B, &lA));
326     PetscCall(MatGetRowIJ(lA, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&iptr, (const PetscInt **)&jptr, &done));
327     PetscCheck(nr == dr, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of rows in local mat! %" PetscInt_FMT " != %" PetscInt_FMT, nr, dr);
328     PetscCheck(iptr[nr] >= nnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros in local mat! reuse %" PetscInt_FMT " requested %" PetscInt_FMT, iptr[nr], nnz);
329     PetscCall(MatSeqAIJGetArrayWrite(lA, &data));
330   }
331   /* merge local matrices */
332   ii  = iptr;
333   jj  = jptr;
334   aa  = (HYPRE_Complex *)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
335   *ii = *(hdi++) + *(hoi++);
336   for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) {
337     PetscScalar *aold = (PetscScalar *)aa;
338     PetscInt    *jold = jj, nc = jd + jo;
339     for (; jd < *hdi; jd++) {
340       *jj++ = *hdj++;
341       *aa++ = *hdd++;
342     }
343     for (; jo < *hoi; jo++) {
344       *jj++ = *hoj++ + dc;
345       *aa++ = *hod++;
346     }
347     *(++ii) = *(hdi++) + *(hoi++);
348     PetscCall(PetscSortIntWithScalarArray(jd + jo - nc, jold, aold));
349   }
350   for (; cum < dr; cum++) *(++ii) = nnz;
351   if (reuse != MAT_REUSE_MATRIX) {
352     Mat_SeqAIJ *a;
353 
354     PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, dc + oc, iptr, jptr, data, &lA));
355     /* hack SeqAIJ */
356     a          = (Mat_SeqAIJ *)lA->data;
357     a->free_a  = PETSC_TRUE;
358     a->free_ij = PETSC_TRUE;
359     if (lmattype) PetscCall(MatConvert(lA, lmattype, MAT_INPLACE_MATRIX, &lA));
360     PetscCall(MatISSetLocalMat(*B, lA));
361     PetscCall(MatDestroy(&lA));
362   } else {
363     PetscCall(MatSeqAIJRestoreArrayWrite(lA, &data));
364   }
365   PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY));
366   PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY));
367   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, B));
368   if (freeparcsr) PetscCallExternal(hypre_ParCSRMatrixDestroy, hA);
369   PetscFunctionReturn(PETSC_SUCCESS);
370 }
371 
372 static PetscErrorCode MatHYPRE_DestroyCOOMat(Mat mat)
373 {
374   Mat_HYPRE *hA = (Mat_HYPRE *)mat->data;
375 
376   PetscFunctionBegin;
377   if (hA->cooMat) { /* If cooMat is present we need to destroy the column indices */
378     PetscCall(MatDestroy(&hA->cooMat));
379     if (hA->cooMatAttached) {
380       hypre_CSRMatrix     *csr;
381       hypre_ParCSRMatrix  *parcsr;
382       HYPRE_MemoryLocation mem;
383 
384       PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
385       csr = hypre_ParCSRMatrixDiag(parcsr);
386       if (csr) {
387         mem = hypre_CSRMatrixMemoryLocation(csr);
388         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
389         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
390       }
391       csr = hypre_ParCSRMatrixOffd(parcsr);
392       if (csr) {
393         mem = hypre_CSRMatrixMemoryLocation(csr);
394         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
395         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
396       }
397     }
398   }
399   hA->cooMatAttached = PETSC_FALSE;
400   PetscFunctionReturn(PETSC_SUCCESS);
401 }
402 
403 static PetscErrorCode MatHYPRE_CreateCOOMat(Mat mat)
404 {
405   MPI_Comm    comm;
406   PetscMPIInt size;
407   PetscLayout rmap, cmap;
408   Mat_HYPRE  *hmat    = (Mat_HYPRE *)mat->data;
409   MatType     matType = MATAIJ; /* default type of cooMat */
410 
411   PetscFunctionBegin;
412   /* Build an agent matrix cooMat with AIJ format
413      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
414    */
415   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
416   PetscCallMPI(MPI_Comm_size(comm, &size));
417   PetscCall(PetscLayoutSetUp(mat->rmap));
418   PetscCall(PetscLayoutSetUp(mat->cmap));
419   PetscCall(MatGetLayouts(mat, &rmap, &cmap));
420 
421 #if defined(PETSC_HAVE_HYPRE_DEVICE)
422   if (!mat->boundtocpu) { /* mat will be on device, so will cooMat */
423   #if defined(HYPRE_USING_HIP)
424     matType = MATAIJHIPSPARSE;
425   #elif defined(HYPRE_USING_CUDA)
426     matType = MATAIJCUSPARSE;
427   #elif defined(HYPRE_USING_SYCL) && defined(PETSC_HAVE_KOKKOS_KERNELS)
428     matType = MATAIJKOKKOS;
429   #else
430     SETERRQ(comm, PETSC_ERR_SUP, "No HYPRE device available. Suggest re-installing with Kokkos Kernels");
431   #endif
432   }
433 #endif
434 
435   /* Do COO preallocation through cooMat */
436   PetscCall(MatHYPRE_DestroyCOOMat(mat));
437   PetscCall(MatCreate(comm, &hmat->cooMat));
438   PetscCall(MatSetType(hmat->cooMat, matType));
439   PetscCall(MatSetLayouts(hmat->cooMat, rmap, cmap));
440 
441   /* allocate local matrices if needed */
442   PetscCall(MatMPIAIJSetPreallocation(hmat->cooMat, 0, NULL, 0, NULL));
443   PetscFunctionReturn(PETSC_SUCCESS);
444 }
445 
446 /* Attach cooMat data array to hypre matrix.
447    When AIJCUPMSPARSE will support raw device pointers and not THRUSTARRAY
448    we should swap the arrays: i.e., attach hypre matrix array to cooMat
449    This is because hypre should be in charge of handling the memory,
450    cooMat is only a way to reuse PETSc COO code.
451    attaching the memory will then be done at MatSetValuesCOO time and it will dynamically
452    support hypre matrix migrating to host.
453 */
454 static PetscErrorCode MatHYPRE_AttachCOOMat(Mat mat)
455 {
456   Mat_HYPRE           *hmat = (Mat_HYPRE *)mat->data;
457   hypre_CSRMatrix     *diag, *offd;
458   hypre_ParCSRMatrix  *parCSR;
459   HYPRE_MemoryLocation hmem = HYPRE_MEMORY_HOST;
460   PetscMemType         pmem;
461   Mat                  A, B;
462   PetscScalar         *a;
463   PetscMPIInt          size;
464   MPI_Comm             comm;
465 
466   PetscFunctionBegin;
467   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
468   if (hmat->cooMatAttached) PetscFunctionReturn(PETSC_SUCCESS);
469   PetscCheck(hmat->cooMat->preallocated, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix is not preallocated");
470   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
471   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
472   PetscCallMPI(MPI_Comm_size(comm, &size));
473 
474   /* Alias cooMat's data array to IJMatrix's */
475   PetscCallExternal(HYPRE_IJMatrixGetObject, hmat->ij, (void **)&parCSR);
476   diag = hypre_ParCSRMatrixDiag(parCSR);
477   offd = hypre_ParCSRMatrixOffd(parCSR);
478 
479   A = (size == 1) ? hmat->cooMat : ((Mat_MPIAIJ *)hmat->cooMat->data)->A;
480   B = (size == 1) ? NULL : ((Mat_MPIAIJ *)hmat->cooMat->data)->B;
481 
482   PetscCall(PetscObjectSetName((PetscObject)A, "_internal_COO_mat_for_hypre"));
483   hmem = hypre_CSRMatrixMemoryLocation(diag);
484   PetscCall(MatSeqAIJGetCSRAndMemType(A, NULL, NULL, &a, &pmem));
485   PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
486   PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(diag), hmem));
487   hypre_CSRMatrixData(diag)     = (HYPRE_Complex *)a;
488   hypre_CSRMatrixOwnsData(diag) = 0; /* Take ownership of (j,a) away from hypre. As a result, we need to free them on our own */
489 
490   if (B) {
491     hmem = hypre_CSRMatrixMemoryLocation(offd);
492     PetscCall(MatSeqAIJGetCSRAndMemType(B, NULL, NULL, &a, &pmem));
493     PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
494     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(offd), hmem));
495     hypre_CSRMatrixData(offd)     = (HYPRE_Complex *)a;
496     hypre_CSRMatrixOwnsData(offd) = 0;
497   }
498   hmat->cooMatAttached = PETSC_TRUE;
499   PetscFunctionReturn(PETSC_SUCCESS);
500 }
501 
502 // Build COO's coordinate list i[], j[] based on CSR's i[], j[] arrays and the number of local rows 'n'
503 static PetscErrorCode CSRtoCOO_Private(PetscInt n, const PetscInt ii[], const PetscInt jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
504 {
505   PetscInt *cooi, *cooj;
506 
507   PetscFunctionBegin;
508   *ncoo = ii[n];
509   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
510   for (PetscInt i = 0; i < n; i++) {
511     for (PetscInt j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
512   }
513   PetscCall(PetscArraycpy(cooj, jj, *ncoo));
514   *coo_i = cooi;
515   *coo_j = cooj;
516   PetscFunctionReturn(PETSC_SUCCESS);
517 }
518 
519 // Similar to CSRtoCOO_Private, but the CSR's i[], j[] are of type HYPRE_Int
520 static PetscErrorCode CSRtoCOO_HYPRE_Int_Private(PetscInt n, const HYPRE_Int ii[], const HYPRE_Int jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
521 {
522   PetscInt *cooi, *cooj;
523 
524   PetscFunctionBegin;
525   *ncoo = ii[n];
526   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
527   for (PetscInt i = 0; i < n; i++) {
528     for (HYPRE_Int j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
529   }
530   for (PetscCount i = 0; i < *ncoo; i++) cooj[i] = jj[i];
531   *coo_i = cooi;
532   *coo_j = cooj;
533   PetscFunctionReturn(PETSC_SUCCESS);
534 }
535 
536 // Build a COO data structure for the seqaij matrix, as if the nonzeros are laid out in the same order as in the CSR
537 static PetscErrorCode MatSeqAIJGetCOO_Private(Mat A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
538 {
539   PetscInt        n;
540   const PetscInt *ii, *jj;
541   PetscBool       done;
542 
543   PetscFunctionBegin;
544   PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
545   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatGetRowIJ");
546   PetscCall(CSRtoCOO_Private(n, ii, jj, ncoo, coo_i, coo_j));
547   PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
548   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatRestoreRowIJ");
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 
552 // Build a COO data structure for the hypreCSRMatrix, as if the nonzeros are laid out in the same order as in the hypreCSRMatrix
553 static PetscErrorCode hypreCSRMatrixGetCOO_Private(hypre_CSRMatrix *A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
554 {
555   PetscInt             n = hypre_CSRMatrixNumRows(A);
556   HYPRE_Int           *ii, *jj;
557   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
558 
559   PetscFunctionBegin;
560 #if defined(PETSC_HAVE_HYPRE_DEVICE)
561   mem = hypre_CSRMatrixMemoryLocation(A);
562   if (mem != HYPRE_MEMORY_HOST) {
563     PetscCount nnz = hypre_CSRMatrixNumNonzeros(A);
564     PetscCall(PetscMalloc2(n + 1, &ii, nnz, &jj));
565     hypre_TMemcpy(ii, hypre_CSRMatrixI(A), HYPRE_Int, n + 1, HYPRE_MEMORY_HOST, mem);
566     hypre_TMemcpy(jj, hypre_CSRMatrixJ(A), HYPRE_Int, nnz, HYPRE_MEMORY_HOST, mem);
567   } else {
568 #else
569   {
570 #endif
571     ii = hypre_CSRMatrixI(A);
572     jj = hypre_CSRMatrixJ(A);
573   }
574   PetscCall(CSRtoCOO_HYPRE_Int_Private(n, ii, jj, ncoo, coo_i, coo_j));
575   if (mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree2(ii, jj));
576   PetscFunctionReturn(PETSC_SUCCESS);
577 }
578 
579 static PetscErrorCode MatSetValuesCOOFromCSRMatrix_Private(Mat A, hypre_CSRMatrix *H)
580 {
581   PetscBool            iscpu = PETSC_TRUE;
582   PetscScalar         *a;
583   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
584 
585   PetscFunctionBegin;
586 #if defined(PETSC_HAVE_HYPRE_DEVICE)
587   mem = hypre_CSRMatrixMemoryLocation(H);
588   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &iscpu));
589 #endif
590   if (iscpu && mem != HYPRE_MEMORY_HOST) {
591     PetscCount nnz = hypre_CSRMatrixNumNonzeros(H);
592     PetscCall(PetscMalloc1(nnz, &a));
593     hypre_TMemcpy(a, hypre_CSRMatrixData(H), PetscScalar, nnz, HYPRE_MEMORY_HOST, mem);
594   } else {
595     a = (PetscScalar *)hypre_CSRMatrixData(H);
596   }
597   PetscCall(MatSetValuesCOO(A, a, INSERT_VALUES));
598   if (iscpu && mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree(a));
599   PetscFunctionReturn(PETSC_SUCCESS);
600 }
601 
602 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
603 {
604   MPI_Comm     comm = PetscObjectComm((PetscObject)A);
605   Mat          M = NULL, dH = NULL, oH = NULL, dA = NULL, oA = NULL;
606   PetscBool    ismpiaij, issbaij, isbaij, boundtocpu = PETSC_TRUE;
607   Mat_HYPRE   *hA;
608   PetscMemType memtype = PETSC_MEMTYPE_HOST;
609 
610   PetscFunctionBegin;
611   if (PetscDefined(HAVE_HYPRE_DEVICE)) {
612     PetscCall(MatGetCurrentMemType(A, &memtype));
613     PetscHYPREInitialize();
614     boundtocpu = PetscMemTypeHost(memtype) ? PETSC_TRUE : PETSC_FALSE;
615     PetscCallExternal(HYPRE_SetMemoryLocation, boundtocpu ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
616   }
617 
618   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &issbaij, MATSEQSBAIJ, MATMPIBAIJ, ""));
619   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &isbaij, MATSEQBAIJ, MATMPIBAIJ, ""));
620   if (isbaij || issbaij) { /* handle BAIJ and SBAIJ */
621     PetscBool ismpi;
622     MatType   newtype;
623 
624     PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &ismpi, MATMPISBAIJ, MATMPIBAIJ, ""));
625     newtype = ismpi ? MATMPIAIJ : MATSEQAIJ;
626     if (reuse == MAT_REUSE_MATRIX) {
627       PetscCall(MatConvert(*B, newtype, MAT_INPLACE_MATRIX, B));
628       PetscCall(MatConvert(A, newtype, MAT_REUSE_MATRIX, B));
629       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
630     } else if (reuse == MAT_INITIAL_MATRIX) {
631       PetscCall(MatConvert(A, newtype, MAT_INITIAL_MATRIX, B));
632       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
633     } else {
634       PetscCall(MatConvert(A, newtype, MAT_INPLACE_MATRIX, &A));
635       PetscCall(MatConvert(A, MATHYPRE, MAT_INPLACE_MATRIX, &A));
636     }
637 #if defined(PETSC_HAVE_DEVICE)
638     (*B)->boundtocpu = boundtocpu;
639 #endif
640     PetscFunctionReturn(PETSC_SUCCESS);
641   }
642 
643   dA = A;
644   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
645   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(A, &dA, &oA, NULL));
646 
647   if (reuse != MAT_REUSE_MATRIX) {
648     PetscCount coo_n;
649     PetscInt  *coo_i, *coo_j;
650 
651     PetscCall(MatCreate(comm, &M));
652     PetscCall(MatSetType(M, MATHYPRE));
653     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
654     PetscCall(MatSetOption(M, MAT_SORTED_FULL, PETSC_TRUE));
655     PetscCall(MatSetOption(M, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
656 
657     hA = (Mat_HYPRE *)M->data;
658     PetscCall(MatHYPRE_CreateFromMat(A, hA));
659     PetscCall(MatHYPRE_IJMatrixCopyIJ(A, hA->ij));
660 
661     PetscCall(MatHYPRE_CreateCOOMat(M));
662 
663     dH = hA->cooMat;
664     PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
665     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));
666 
667     PetscCall(PetscObjectSetName((PetscObject)dH, "_internal_COO_mat_for_hypre"));
668     PetscCall(MatSeqAIJGetCOO_Private(dA, &coo_n, &coo_i, &coo_j));
669     PetscCall(MatSetPreallocationCOO(dH, coo_n, coo_i, coo_j));
670     PetscCall(PetscFree2(coo_i, coo_j));
671     if (oH) {
672       PetscCall(PetscLayoutDestroy(&oH->cmap));
673       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oH), oA->cmap->n, oA->cmap->n, 1, &oH->cmap));
674       PetscCall(MatSeqAIJGetCOO_Private(oA, &coo_n, &coo_i, &coo_j));
675       PetscCall(MatSetPreallocationCOO(oH, coo_n, coo_i, coo_j));
676       PetscCall(PetscFree2(coo_i, coo_j));
677     }
678     hA->cooMat->assembled = PETSC_TRUE;
679 
680     M->preallocated = PETSC_TRUE;
681     PetscCall(MatAssemblyBegin(M, MAT_FINAL_ASSEMBLY));
682     PetscCall(MatAssemblyEnd(M, MAT_FINAL_ASSEMBLY));
683 
684     PetscCall(MatHYPRE_AttachCOOMat(M));
685     if (reuse == MAT_INITIAL_MATRIX) *B = M;
686   } else M = *B;
687 
688   hA = (Mat_HYPRE *)M->data;
689   PetscCheck(hA->cooMat, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
690 
691   dH = hA->cooMat;
692   PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
693   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));
694 
695   PetscScalar *a;
696   PetscCall(MatSeqAIJGetCSRAndMemType(dA, NULL, NULL, &a, NULL));
697   PetscCall(MatSetValuesCOO(dH, a, INSERT_VALUES));
698   if (oH) {
699     PetscCall(MatSeqAIJGetCSRAndMemType(oA, NULL, NULL, &a, NULL));
700     PetscCall(MatSetValuesCOO(oH, a, INSERT_VALUES));
701   }
702 
703   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
704 #if defined(PETSC_HAVE_DEVICE)
705   (*B)->boundtocpu = boundtocpu;
706 #endif
707   PetscFunctionReturn(PETSC_SUCCESS);
708 }
709 
710 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
711 {
712   Mat                 M, dA = NULL, oA = NULL;
713   hypre_ParCSRMatrix *parcsr;
714   hypre_CSRMatrix    *dH, *oH;
715   MPI_Comm            comm;
716   PetscBool           ismpiaij, isseqaij;
717 
718   PetscFunctionBegin;
719   comm = PetscObjectComm((PetscObject)A);
720   if (reuse == MAT_REUSE_MATRIX) {
721     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATMPIAIJ, &ismpiaij));
722     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATSEQAIJ, &isseqaij));
723     PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Only MATMPIAIJ or MATSEQAIJ base types are supported");
724   }
725   PetscCall(MatHYPREGetParCSR(A, &parcsr));
726 #if defined(PETSC_HAVE_HYPRE_DEVICE)
727   if (HYPRE_MEMORY_DEVICE == hypre_ParCSRMatrixMemoryLocation(parcsr)) {
728     PetscBool isaij;
729 
730     PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
731     if (isaij) {
732       PetscMPIInt size;
733 
734       PetscCallMPI(MPI_Comm_size(comm, &size));
735   #if defined(HYPRE_USING_HIP)
736       mtype = size > 1 ? MATMPIAIJHIPSPARSE : MATSEQAIJHIPSPARSE;
737   #elif defined(HYPRE_USING_CUDA)
738       mtype = size > 1 ? MATMPIAIJCUSPARSE : MATSEQAIJCUSPARSE;
739   #else
740       mtype = size > 1 ? MATMPIAIJ : MATSEQAIJ;
741   #endif
742     }
743   }
744 #endif
745   dH = hypre_ParCSRMatrixDiag(parcsr);
746   oH = hypre_ParCSRMatrixOffd(parcsr);
747   if (reuse != MAT_REUSE_MATRIX) {
748     PetscCount coo_n;
749     PetscInt  *coo_i, *coo_j;
750 
751     PetscCall(MatCreate(comm, &M));
752     PetscCall(MatSetType(M, mtype));
753     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
754     PetscCall(MatMPIAIJSetPreallocation(M, 0, NULL, 0, NULL));
755 
756     dA = M;
757     PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
758     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));
759 
760     PetscCall(hypreCSRMatrixGetCOO_Private(dH, &coo_n, &coo_i, &coo_j));
761     PetscCall(MatSetPreallocationCOO(dA, coo_n, coo_i, coo_j));
762     PetscCall(PetscFree2(coo_i, coo_j));
763     if (ismpiaij) {
764       HYPRE_Int nc = hypre_CSRMatrixNumCols(oH);
765 
766       PetscCall(PetscLayoutDestroy(&oA->cmap));
767       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oA), nc, nc, 1, &oA->cmap));
768       PetscCall(hypreCSRMatrixGetCOO_Private(oH, &coo_n, &coo_i, &coo_j));
769       PetscCall(MatSetPreallocationCOO(oA, coo_n, coo_i, coo_j));
770       PetscCall(PetscFree2(coo_i, coo_j));
771 
772       /* garray */
773       Mat_MPIAIJ   *aij    = (Mat_MPIAIJ *)M->data;
774       HYPRE_BigInt *harray = hypre_ParCSRMatrixColMapOffd(parcsr);
775       PetscInt     *garray;
776 
777       PetscCall(PetscFree(aij->garray));
778       PetscCall(PetscMalloc1(nc, &garray));
779       for (HYPRE_Int i = 0; i < nc; i++) garray[i] = (PetscInt)harray[i];
780       aij->garray = garray;
781       PetscCall(MatSetUpMultiply_MPIAIJ(M));
782     }
783     if (reuse == MAT_INITIAL_MATRIX) *B = M;
784   } else M = *B;
785 
786   dA = M;
787   PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
788   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));
789   PetscCall(MatSetValuesCOOFromCSRMatrix_Private(dA, dH));
790   if (oA) PetscCall(MatSetValuesCOOFromCSRMatrix_Private(oA, oH));
791   M->assembled = PETSC_TRUE;
792   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
793   PetscFunctionReturn(PETSC_SUCCESS);
794 }
795 
796 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
797 {
798   hypre_ParCSRMatrix *tA;
799   hypre_CSRMatrix    *hdiag, *hoffd;
800   Mat_SeqAIJ         *diag, *offd;
801   PetscInt           *garray, i, noffd, dnnz, onnz, *row_starts, *col_starts;
802   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
803   PetscBool           ismpiaij, isseqaij;
804   PetscBool           sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
805   HYPRE_Int          *hdi = NULL, *hdj = NULL, *hoi = NULL, *hoj = NULL;
806   PetscInt           *pdi = NULL, *pdj = NULL, *poi = NULL, *poj = NULL;
807   PetscBool           iscuda, iship;
808 #if defined(PETSC_HAVE_DEVICE) && defined(PETSC_HAVE_HYPRE_DEVICE)
809   PetscBool boundtocpu = A->boundtocpu;
810 #else
811   PetscBool boundtocpu = PETSC_TRUE;
812 #endif
813 
814   PetscFunctionBegin;
815   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
816   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
817   PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Unsupported type %s", ((PetscObject)A)->type_name);
818   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
819   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
820   PetscHYPREInitialize();
821   if (ismpiaij) {
822     Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
823 
824     diag = (Mat_SeqAIJ *)a->A->data;
825     offd = (Mat_SeqAIJ *)a->B->data;
826     if (!boundtocpu && (iscuda || iship)) {
827 #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
828       if (iscuda) {
829         sameint = PETSC_TRUE;
830         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
831         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
832       }
833 #endif
834 #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
835       if (iship) {
836         sameint = PETSC_TRUE;
837         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
838         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
839       }
840 #endif
841     } else {
842       boundtocpu = PETSC_TRUE;
843       pdi        = diag->i;
844       pdj        = diag->j;
845       poi        = offd->i;
846       poj        = offd->j;
847       if (sameint) {
848         hdi = (HYPRE_Int *)pdi;
849         hdj = (HYPRE_Int *)pdj;
850         hoi = (HYPRE_Int *)poi;
851         hoj = (HYPRE_Int *)poj;
852       }
853     }
854     garray = a->garray;
855     noffd  = a->B->cmap->N;
856     dnnz   = diag->nz;
857     onnz   = offd->nz;
858   } else {
859     diag = (Mat_SeqAIJ *)A->data;
860     offd = NULL;
861     if (!boundtocpu && (iscuda || iship)) {
862 #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
863       if (iscuda) {
864         sameint = PETSC_TRUE;
865         PetscCall(MatSeqAIJCUSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
866       }
867 #endif
868 #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
869       if (iship) {
870         sameint = PETSC_TRUE;
871         PetscCall(MatSeqAIJHIPSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
872       }
873 #endif
874     } else {
875       boundtocpu = PETSC_TRUE;
876       pdi        = diag->i;
877       pdj        = diag->j;
878       if (sameint) {
879         hdi = (HYPRE_Int *)pdi;
880         hdj = (HYPRE_Int *)pdj;
881       }
882     }
883     garray = NULL;
884     noffd  = 0;
885     dnnz   = diag->nz;
886     onnz   = 0;
887   }
888 
889   /* create a temporary ParCSR */
890   if (HYPRE_AssumedPartitionCheck()) {
891     PetscMPIInt myid;
892 
893     PetscCallMPI(MPI_Comm_rank(comm, &myid));
894     row_starts = A->rmap->range + myid;
895     col_starts = A->cmap->range + myid;
896   } else {
897     row_starts = A->rmap->range;
898     col_starts = A->cmap->range;
899   }
900   tA = hypre_ParCSRMatrixCreate(comm, A->rmap->N, A->cmap->N, (HYPRE_BigInt *)row_starts, (HYPRE_BigInt *)col_starts, noffd, dnnz, onnz);
901 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
902   hypre_ParCSRMatrixSetRowStartsOwner(tA, 0);
903   hypre_ParCSRMatrixSetColStartsOwner(tA, 0);
904 #endif
905 
906   /* set diagonal part */
907   hdiag = hypre_ParCSRMatrixDiag(tA);
908   if (!sameint) { /* malloc CSR pointers */
909     PetscCall(PetscMalloc2(A->rmap->n + 1, &hdi, dnnz, &hdj));
910     for (i = 0; i < A->rmap->n + 1; i++) hdi[i] = (HYPRE_Int)pdi[i];
911     for (i = 0; i < dnnz; i++) hdj[i] = (HYPRE_Int)pdj[i];
912   }
913   hypre_CSRMatrixI(hdiag)           = hdi;
914   hypre_CSRMatrixJ(hdiag)           = hdj;
915   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex *)diag->a;
916   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
917   hypre_CSRMatrixSetDataOwner(hdiag, 0);
918 
919   /* set off-diagonal part */
920   hoffd = hypre_ParCSRMatrixOffd(tA);
921   if (offd) {
922     if (!sameint) { /* malloc CSR pointers */
923       PetscCall(PetscMalloc2(A->rmap->n + 1, &hoi, onnz, &hoj));
924       for (i = 0; i < A->rmap->n + 1; i++) hoi[i] = (HYPRE_Int)poi[i];
925       for (i = 0; i < onnz; i++) hoj[i] = (HYPRE_Int)poj[i];
926     }
927     hypre_CSRMatrixI(hoffd)           = hoi;
928     hypre_CSRMatrixJ(hoffd)           = hoj;
929     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex *)offd->a;
930     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
931     hypre_CSRMatrixSetDataOwner(hoffd, 0);
932   }
933 #if defined(PETSC_HAVE_HYPRE_DEVICE)
934   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, !boundtocpu ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST);
935 #else
936   #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
937   PetscCallExternal(hypre_ParCSRMatrixInitialize, tA);
938   #else
939   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, HYPRE_MEMORY_HOST);
940   #endif
941 #endif
942 
943   /* MatrixSetRownnz comes after MatrixInitialize, so the first uses the right memory location */
944   hypre_CSRMatrixSetRownnz(hdiag);
945   if (offd) hypre_CSRMatrixSetRownnz(hoffd);
946 
947   hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA), HYPRE_MEMORY_HOST);
948   hypre_ParCSRMatrixSetNumNonzeros(tA);
949   hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt *)garray;
950   if (!hypre_ParCSRMatrixCommPkg(tA)) PetscCallExternal(hypre_MatvecCommPkgCreate, tA);
951   *hA = tA;
952   PetscFunctionReturn(PETSC_SUCCESS);
953 }
954 
955 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
956 {
957   hypre_CSRMatrix *hdiag, *hoffd;
958   PetscBool        ismpiaij, sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
959   PetscBool        iscuda, iship;
960 
961   PetscFunctionBegin;
962   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
963   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
964   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
965 #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
966   if (iscuda) sameint = PETSC_TRUE;
967 #elif defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
968   if (iship) sameint = PETSC_TRUE;
969 #endif
970   hdiag = hypre_ParCSRMatrixDiag(*hA);
971   hoffd = hypre_ParCSRMatrixOffd(*hA);
972   /* free temporary memory allocated by PETSc
973      set pointers to NULL before destroying tA */
974   if (!sameint) {
975     HYPRE_Int *hi, *hj;
976 
977     hi = hypre_CSRMatrixI(hdiag);
978     hj = hypre_CSRMatrixJ(hdiag);
979     PetscCall(PetscFree2(hi, hj));
980     if (ismpiaij) {
981       hi = hypre_CSRMatrixI(hoffd);
982       hj = hypre_CSRMatrixJ(hoffd);
983       PetscCall(PetscFree2(hi, hj));
984     }
985   }
986   hypre_CSRMatrixI(hdiag)    = NULL;
987   hypre_CSRMatrixJ(hdiag)    = NULL;
988   hypre_CSRMatrixData(hdiag) = NULL;
989   if (ismpiaij) {
990     hypre_CSRMatrixI(hoffd)    = NULL;
991     hypre_CSRMatrixJ(hoffd)    = NULL;
992     hypre_CSRMatrixData(hoffd) = NULL;
993   }
994   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
995   hypre_ParCSRMatrixDestroy(*hA);
996   *hA = NULL;
997   PetscFunctionReturn(PETSC_SUCCESS);
998 }
999 
1000 /* calls RAP from BoomerAMG:
1001    the resulting ParCSR will not own the column and row starts
1002    It looks like we don't need to have the diagonal entries ordered first */
1003 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
1004 {
1005 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1006   HYPRE_Int P_owns_col_starts, R_owns_row_starts;
1007 #endif
1008 
1009   PetscFunctionBegin;
1010 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1011   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
1012   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
1013 #endif
1014   /* can be replaced by version test later */
1015 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1016   PetscStackPushExternal("hypre_ParCSRMatrixRAP");
1017   *hRAP = hypre_ParCSRMatrixRAP(hR, hA, hP);
1018   PetscStackPop;
1019 #else
1020   PetscCallExternal(hypre_BoomerAMGBuildCoarseOperator, hR, hA, hP, hRAP);
1021   PetscCallExternal(hypre_ParCSRMatrixSetNumNonzeros, *hRAP);
1022 #endif
1023   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
1024 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1025   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP, 0);
1026   hypre_ParCSRMatrixSetColStartsOwner(*hRAP, 0);
1027   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP, 1);
1028   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR, 1);
1029 #endif
1030   PetscFunctionReturn(PETSC_SUCCESS);
1031 }
1032 
1033 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat P, Mat C)
1034 {
1035   Mat                 B;
1036   hypre_ParCSRMatrix *hA, *hP, *hPtAP = NULL;
1037   Mat_Product        *product = C->product;
1038 
1039   PetscFunctionBegin;
1040   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1041   PetscCall(MatAIJGetParCSR_Private(P, &hP));
1042   PetscCall(MatHYPRE_ParCSR_RAP(hP, hA, hP, &hPtAP));
1043   PetscCall(MatCreateFromParCSR(hPtAP, MATAIJ, PETSC_OWN_POINTER, &B));
1044 
1045   PetscCall(MatHeaderMerge(C, &B));
1046   C->product = product;
1047 
1048   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1049   PetscCall(MatAIJRestoreParCSR_Private(P, &hP));
1050   PetscFunctionReturn(PETSC_SUCCESS);
1051 }
1052 
1053 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat P, PetscReal fill, Mat C)
1054 {
1055   PetscFunctionBegin;
1056   PetscCall(MatSetType(C, MATAIJ));
1057   C->ops->ptapnumeric    = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
1058   C->ops->productnumeric = MatProductNumeric_PtAP;
1059   PetscFunctionReturn(PETSC_SUCCESS);
1060 }
1061 
1062 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A, Mat P, Mat C)
1063 {
1064   Mat                 B;
1065   Mat_HYPRE          *hP;
1066   hypre_ParCSRMatrix *hA = NULL, *Pparcsr, *ptapparcsr = NULL;
1067   HYPRE_Int           type;
1068   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
1069   PetscBool           ishypre;
1070 
1071   PetscFunctionBegin;
1072   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1073   PetscCheck(ishypre, comm, PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1074   hP = (Mat_HYPRE *)P->data;
1075   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
1076   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1077   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);
1078 
1079   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1080   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, hA, Pparcsr, &ptapparcsr));
1081   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1082 
1083   /* create temporary matrix and merge to C */
1084   PetscCall(MatCreateFromParCSR(ptapparcsr, ((PetscObject)C)->type_name, PETSC_OWN_POINTER, &B));
1085   PetscCall(MatHeaderMerge(C, &B));
1086   PetscFunctionReturn(PETSC_SUCCESS);
1087 }
1088 
1089 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A, Mat P, Mat C)
1090 {
1091   Mat                 B;
1092   hypre_ParCSRMatrix *Aparcsr, *Pparcsr, *ptapparcsr = NULL;
1093   Mat_HYPRE          *hA, *hP;
1094   PetscBool           ishypre;
1095   HYPRE_Int           type;
1096 
1097   PetscFunctionBegin;
1098   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1099   PetscCheck(ishypre, PetscObjectComm((PetscObject)P), PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1100   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1101   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1102   hA = (Mat_HYPRE *)A->data;
1103   hP = (Mat_HYPRE *)P->data;
1104   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1105   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1106   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
1107   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1108   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1109   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);
1110   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, Aparcsr, Pparcsr, &ptapparcsr));
1111   PetscCall(MatCreateFromParCSR(ptapparcsr, MATHYPRE, PETSC_OWN_POINTER, &B));
1112   PetscCall(MatHeaderMerge(C, &B));
1113   PetscFunctionReturn(PETSC_SUCCESS);
1114 }
1115 
1116 /* calls hypre_ParMatmul
1117    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
1118    hypre_ParMatrixCreate does not duplicate the communicator
1119    It looks like we don't need to have the diagonal entries ordered first */
1120 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
1121 {
1122   PetscFunctionBegin;
1123   /* can be replaced by version test later */
1124 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1125   PetscStackPushExternal("hypre_ParCSRMatMat");
1126   *hAB = hypre_ParCSRMatMat(hA, hB);
1127 #else
1128   PetscStackPushExternal("hypre_ParMatmul");
1129   *hAB = hypre_ParMatmul(hA, hB);
1130 #endif
1131   PetscStackPop;
1132   PetscFunctionReturn(PETSC_SUCCESS);
1133 }
1134 
1135 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C)
1136 {
1137   Mat                 D;
1138   hypre_ParCSRMatrix *hA, *hB, *hAB = NULL;
1139   Mat_Product        *product = C->product;
1140 
1141   PetscFunctionBegin;
1142   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1143   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1144   PetscCall(MatHYPRE_ParCSR_MatMatMult(hA, hB, &hAB));
1145   PetscCall(MatCreateFromParCSR(hAB, MATAIJ, PETSC_OWN_POINTER, &D));
1146 
1147   PetscCall(MatHeaderMerge(C, &D));
1148   C->product = product;
1149 
1150   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1151   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1152   PetscFunctionReturn(PETSC_SUCCESS);
1153 }
1154 
1155 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat B, PetscReal fill, Mat C)
1156 {
1157   PetscFunctionBegin;
1158   PetscCall(MatSetType(C, MATAIJ));
1159   C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
1160   C->ops->productnumeric = MatProductNumeric_AB;
1161   PetscFunctionReturn(PETSC_SUCCESS);
1162 }
1163 
1164 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A, Mat B, Mat C)
1165 {
1166   Mat                 D;
1167   hypre_ParCSRMatrix *Aparcsr, *Bparcsr, *ABparcsr = NULL;
1168   Mat_HYPRE          *hA, *hB;
1169   PetscBool           ishypre;
1170   HYPRE_Int           type;
1171   Mat_Product        *product;
1172 
1173   PetscFunctionBegin;
1174   PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHYPRE, &ishypre));
1175   PetscCheck(ishypre, PetscObjectComm((PetscObject)B), PETSC_ERR_USER, "B should be of type %s", MATHYPRE);
1176   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1177   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1178   hA = (Mat_HYPRE *)A->data;
1179   hB = (Mat_HYPRE *)B->data;
1180   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1181   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1182   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hB->ij, &type);
1183   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1184   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1185   PetscCallExternal(HYPRE_IJMatrixGetObject, hB->ij, (void **)&Bparcsr);
1186   PetscCall(MatHYPRE_ParCSR_MatMatMult(Aparcsr, Bparcsr, &ABparcsr));
1187   PetscCall(MatCreateFromParCSR(ABparcsr, MATHYPRE, PETSC_OWN_POINTER, &D));
1188 
1189   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
1190   product    = C->product; /* save it from MatHeaderReplace() */
1191   C->product = NULL;
1192   PetscCall(MatHeaderReplace(C, &D));
1193   C->product             = product;
1194   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
1195   C->ops->productnumeric = MatProductNumeric_AB;
1196   PetscFunctionReturn(PETSC_SUCCESS);
1197 }
1198 
1199 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, Mat D)
1200 {
1201   Mat                 E;
1202   hypre_ParCSRMatrix *hA, *hB, *hC, *hABC = NULL;
1203 
1204   PetscFunctionBegin;
1205   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1206   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1207   PetscCall(MatAIJGetParCSR_Private(C, &hC));
1208   PetscCall(MatHYPRE_ParCSR_RAP(hA, hB, hC, &hABC));
1209   PetscCall(MatCreateFromParCSR(hABC, MATAIJ, PETSC_OWN_POINTER, &E));
1210   PetscCall(MatHeaderMerge(D, &E));
1211   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1212   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1213   PetscCall(MatAIJRestoreParCSR_Private(C, &hC));
1214   PetscFunctionReturn(PETSC_SUCCESS);
1215 }
1216 
1217 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, PetscReal fill, Mat D)
1218 {
1219   PetscFunctionBegin;
1220   PetscCall(MatSetType(D, MATAIJ));
1221   PetscFunctionReturn(PETSC_SUCCESS);
1222 }
1223 
1224 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C)
1225 {
1226   PetscFunctionBegin;
1227   C->ops->productnumeric = MatProductNumeric_AB;
1228   PetscFunctionReturn(PETSC_SUCCESS);
1229 }
1230 
1231 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C)
1232 {
1233   Mat_Product *product = C->product;
1234   PetscBool    Ahypre;
1235 
1236   PetscFunctionBegin;
1237   PetscCall(PetscObjectTypeCompare((PetscObject)product->A, MATHYPRE, &Ahypre));
1238   if (Ahypre) { /* A is a Hypre matrix */
1239     PetscCall(MatSetType(C, MATHYPRE));
1240     C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE;
1241     C->ops->matmultnumeric  = MatMatMultNumeric_HYPRE_HYPRE;
1242     PetscFunctionReturn(PETSC_SUCCESS);
1243   }
1244   PetscFunctionReturn(PETSC_SUCCESS);
1245 }
1246 
1247 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C)
1248 {
1249   PetscFunctionBegin;
1250   C->ops->productnumeric = MatProductNumeric_PtAP;
1251   PetscFunctionReturn(PETSC_SUCCESS);
1252 }
1253 
1254 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C)
1255 {
1256   Mat_Product *product = C->product;
1257   PetscBool    flg;
1258   PetscInt     type        = 0;
1259   const char  *outTypes[4] = {"aij", "seqaij", "mpiaij", "hypre"};
1260   PetscInt     ntype       = 4;
1261   Mat          A           = product->A;
1262   PetscBool    Ahypre;
1263 
1264   PetscFunctionBegin;
1265   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &Ahypre));
1266   if (Ahypre) { /* A is a Hypre matrix */
1267     PetscCall(MatSetType(C, MATHYPRE));
1268     C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1269     C->ops->ptapnumeric     = MatPtAPNumeric_HYPRE_HYPRE;
1270     PetscFunctionReturn(PETSC_SUCCESS);
1271   }
1272 
1273   /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */
1274   /* Get runtime option */
1275   if (product->api_user) {
1276     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatPtAP_HYPRE", "Mat");
1277     PetscCall(PetscOptionsEList("-matptap_hypre_outtype", "MatPtAP outtype", "MatPtAP outtype", outTypes, ntype, outTypes[type], &type, &flg));
1278     PetscOptionsEnd();
1279   } else {
1280     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatProduct_PtAP_HYPRE", "Mat");
1281     PetscCall(PetscOptionsEList("-mat_product_algorithm_hypre_outtype", "MatProduct_PtAP outtype", "MatProduct_PtAP", outTypes, ntype, outTypes[type], &type, &flg));
1282     PetscOptionsEnd();
1283   }
1284 
1285   if (type == 0 || type == 1 || type == 2) {
1286     PetscCall(MatSetType(C, MATAIJ));
1287   } else if (type == 3) {
1288     PetscCall(MatSetType(C, MATHYPRE));
1289   } else SETERRQ(PetscObjectComm((PetscObject)C), PETSC_ERR_SUP, "MatPtAP outtype is not supported");
1290   C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1291   C->ops->ptapnumeric     = MatPtAPNumeric_AIJ_HYPRE;
1292   PetscFunctionReturn(PETSC_SUCCESS);
1293 }
1294 
1295 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C)
1296 {
1297   Mat_Product *product = C->product;
1298 
1299   PetscFunctionBegin;
1300   switch (product->type) {
1301   case MATPRODUCT_AB:
1302     PetscCall(MatProductSetFromOptions_HYPRE_AB(C));
1303     break;
1304   case MATPRODUCT_PtAP:
1305     PetscCall(MatProductSetFromOptions_HYPRE_PtAP(C));
1306     break;
1307   default:
1308     break;
1309   }
1310   PetscFunctionReturn(PETSC_SUCCESS);
1311 }
1312 
1313 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1314 {
1315   PetscFunctionBegin;
1316   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_TRUE));
1317   PetscFunctionReturn(PETSC_SUCCESS);
1318 }
1319 
1320 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1321 {
1322   PetscFunctionBegin;
1323   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_FALSE));
1324   PetscFunctionReturn(PETSC_SUCCESS);
1325 }
1326 
1327 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1328 {
1329   PetscFunctionBegin;
1330   if (y != z) PetscCall(VecCopy(y, z));
1331   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_FALSE));
1332   PetscFunctionReturn(PETSC_SUCCESS);
1333 }
1334 
1335 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1336 {
1337   PetscFunctionBegin;
1338   if (y != z) PetscCall(VecCopy(y, z));
1339   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_TRUE));
1340   PetscFunctionReturn(PETSC_SUCCESS);
1341 }
1342 
1343 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1344 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1345 {
1346   Mat_HYPRE          *hA = (Mat_HYPRE *)A->data;
1347   hypre_ParCSRMatrix *parcsr;
1348   hypre_ParVector    *hx, *hy;
1349 
1350   PetscFunctionBegin;
1351   if (trans) {
1352     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->b, x));
1353     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->x, y));
1354     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->x, y));
1355     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hx);
1356     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hy);
1357   } else {
1358     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->x, x));
1359     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->b, y));
1360     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->b, y));
1361     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hx);
1362     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hy);
1363   }
1364   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1365   if (trans) {
1366     PetscCallExternal(hypre_ParCSRMatrixMatvecT, a, parcsr, hx, b, hy);
1367   } else {
1368     PetscCallExternal(hypre_ParCSRMatrixMatvec, a, parcsr, hx, b, hy);
1369   }
1370   PetscCall(VecHYPRE_IJVectorPopVec(hA->x));
1371   PetscCall(VecHYPRE_IJVectorPopVec(hA->b));
1372   PetscFunctionReturn(PETSC_SUCCESS);
1373 }
1374 
1375 static PetscErrorCode MatDestroy_HYPRE(Mat A)
1376 {
1377   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1378 
1379   PetscFunctionBegin;
1380   PetscCall(VecHYPRE_IJVectorDestroy(&hA->x));
1381   PetscCall(VecHYPRE_IJVectorDestroy(&hA->b));
1382   PetscCall(MatHYPRE_DestroyCOOMat(A)); /* must be called before destroying the individual CSR */
1383   if (hA->ij) {
1384     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1385     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
1386   }
1387   if (hA->comm) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)A), &hA->comm));
1388 
1389   PetscCall(MatStashDestroy_Private(&A->stash));
1390   PetscCall(PetscFree(hA->array));
1391   if (hA->rows_d) PetscStackCallExternalVoid("hypre_Free", hypre_Free(hA->rows_d, HYPRE_MEMORY_DEVICE));
1392 
1393   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_aij_C", NULL));
1394   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_is_C", NULL));
1395   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_hypre_C", NULL));
1396   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaij_hypre_C", NULL));
1397   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", NULL));
1398   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", NULL));
1399   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijcusparse_hypre_C", NULL));
1400   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", NULL));
1401   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPRESetPreallocation_C", NULL));
1402   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPREGetParCSR_C", NULL));
1403   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL));
1404   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL));
1405   PetscCall(PetscFree(A->data));
1406   PetscFunctionReturn(PETSC_SUCCESS);
1407 }
1408 
1409 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1410 {
1411   PetscFunctionBegin;
1412   if (!A->preallocated) PetscCall(MatHYPRESetPreallocation(A, PETSC_DEFAULT, NULL, PETSC_DEFAULT, NULL));
1413   PetscFunctionReturn(PETSC_SUCCESS);
1414 }
1415 
1416 //TODO FIX hypre_CSRMatrixMatvecOutOfPlace
1417 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1418 static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind)
1419 {
1420   Mat_HYPRE           *hA   = (Mat_HYPRE *)A->data;
1421   HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE;
1422 
1423   PetscFunctionBegin;
1424   A->boundtocpu = bind;
1425   if (hA->ij && hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) {
1426     hypre_ParCSRMatrix *parcsr;
1427     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1428     PetscCallExternal(hypre_ParCSRMatrixMigrate, parcsr, hmem);
1429   }
1430   if (hA->x) PetscCall(VecHYPRE_IJBindToCPU(hA->x, bind));
1431   if (hA->b) PetscCall(VecHYPRE_IJBindToCPU(hA->b, bind));
1432   PetscFunctionReturn(PETSC_SUCCESS);
1433 }
1434 #endif
1435 
1436 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1437 {
1438   Mat_HYPRE   *hA = (Mat_HYPRE *)A->data;
1439   PetscMPIInt  n;
1440   PetscInt     i, j, rstart, ncols, flg;
1441   PetscInt    *row, *col;
1442   PetscScalar *val;
1443 
1444   PetscFunctionBegin;
1445   PetscCheck(mode != MAT_FLUSH_ASSEMBLY, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1446 
1447   if (!A->nooffprocentries) {
1448     while (1) {
1449       PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
1450       if (!flg) break;
1451 
1452       for (i = 0; i < n;) {
1453         /* Now identify the consecutive vals belonging to the same row */
1454         for (j = i, rstart = row[j]; j < n; j++) {
1455           if (row[j] != rstart) break;
1456         }
1457         if (j < n) ncols = j - i;
1458         else ncols = n - i;
1459         /* Now assemble all these values with a single function call */
1460         PetscCall(MatSetValues_HYPRE(A, 1, row + i, ncols, col + i, val + i, A->insertmode));
1461 
1462         i = j;
1463       }
1464     }
1465     PetscCall(MatStashScatterEnd_Private(&A->stash));
1466   }
1467 
1468   PetscCallExternal(HYPRE_IJMatrixAssemble, hA->ij);
1469   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */
1470   /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */
1471   if (!A->sortedfull) {
1472     hypre_AuxParCSRMatrix *aux_matrix;
1473 
1474     /* call destroy just to make sure we do not leak anything */
1475     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1476     PetscCallExternal(hypre_AuxParCSRMatrixDestroy, aux_matrix);
1477     hypre_IJMatrixTranslator(hA->ij) = NULL;
1478 
1479     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1480     PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1481     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1482     if (aux_matrix) {
1483       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1484 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1485       PetscCallExternal(hypre_AuxParCSRMatrixInitialize, aux_matrix);
1486 #else
1487       PetscCallExternal(hypre_AuxParCSRMatrixInitialize_v2, aux_matrix, HYPRE_MEMORY_HOST);
1488 #endif
1489     }
1490   }
1491   {
1492     hypre_ParCSRMatrix *parcsr;
1493 
1494     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1495     if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscCallExternal(hypre_MatvecCommPkgCreate, parcsr);
1496   }
1497   if (!hA->x) PetscCall(VecHYPRE_IJVectorCreate(A->cmap, &hA->x));
1498   if (!hA->b) PetscCall(VecHYPRE_IJVectorCreate(A->rmap, &hA->b));
1499 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1500   PetscCall(MatBindToCPU_HYPRE(A, A->boundtocpu));
1501 #endif
1502   PetscFunctionReturn(PETSC_SUCCESS);
1503 }
1504 
1505 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1506 {
1507   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1508 
1509   PetscFunctionBegin;
1510   PetscCheck(hA->array_available, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Temporary space is in use");
1511 
1512   if (hA->array_size >= size) {
1513     *array = hA->array;
1514   } else {
1515     PetscCall(PetscFree(hA->array));
1516     hA->array_size = size;
1517     PetscCall(PetscMalloc(hA->array_size, &hA->array));
1518     *array = hA->array;
1519   }
1520 
1521   hA->array_available = PETSC_FALSE;
1522   PetscFunctionReturn(PETSC_SUCCESS);
1523 }
1524 
1525 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1526 {
1527   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1528 
1529   PetscFunctionBegin;
1530   *array              = NULL;
1531   hA->array_available = PETSC_TRUE;
1532   PetscFunctionReturn(PETSC_SUCCESS);
1533 }
1534 
1535 static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1536 {
1537   Mat_HYPRE     *hA   = (Mat_HYPRE *)A->data;
1538   PetscScalar   *vals = (PetscScalar *)v;
1539   HYPRE_Complex *sscr;
1540   PetscInt      *cscr[2];
1541   PetscInt       i, nzc;
1542   PetscInt       rst = A->rmap->rstart, ren = A->rmap->rend;
1543   void          *array = NULL;
1544 
1545   PetscFunctionBegin;
1546   PetscCall(MatGetArray_HYPRE(A, sizeof(PetscInt) * (2 * nc) + sizeof(HYPRE_Complex) * nc * nr, &array));
1547   cscr[0] = (PetscInt *)array;
1548   cscr[1] = ((PetscInt *)array) + nc;
1549   sscr    = (HYPRE_Complex *)(((PetscInt *)array) + nc * 2);
1550   for (i = 0, nzc = 0; i < nc; i++) {
1551     if (cols[i] >= 0) {
1552       cscr[0][nzc]   = cols[i];
1553       cscr[1][nzc++] = i;
1554     }
1555   }
1556   if (!nzc) {
1557     PetscCall(MatRestoreArray_HYPRE(A, &array));
1558     PetscFunctionReturn(PETSC_SUCCESS);
1559   }
1560 
1561 #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE)
1562   if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) {
1563     hypre_ParCSRMatrix *parcsr;
1564 
1565     PetscCallExternal(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr);
1566     PetscCallExternal(hypre_ParCSRMatrixMigrate,parcsr, HYPRE_MEMORY_HOST);
1567   }
1568 #endif
1569 
1570   if (ins == ADD_VALUES) {
1571     for (i = 0; i < nr; i++) {
1572       if (rows[i] >= 0) {
1573         PetscInt  j;
1574         HYPRE_Int hnc = (HYPRE_Int)nzc;
1575 
1576         if (!nzc) continue;
1577         /* nonlocal values */
1578         if (rows[i] < rst || rows[i] >= ren) {
1579           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1580           if (hA->donotstash) continue;
1581         }
1582         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1583         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1584         PetscCallExternal(HYPRE_IJMatrixAddToValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1585       }
1586       vals += nc;
1587     }
1588   } else { /* INSERT_VALUES */
1589     for (i = 0; i < nr; i++) {
1590       if (rows[i] >= 0) {
1591         PetscInt  j;
1592         HYPRE_Int hnc = (HYPRE_Int)nzc;
1593 
1594         if (!nzc) continue;
1595         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1596         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1597         /* nonlocal values */
1598         if (rows[i] < rst || rows[i] >= ren) {
1599           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1600           if (!hA->donotstash) PetscCall(MatStashValuesRow_Private(&A->stash, rows[i], nzc, cscr[0], (PetscScalar *)sscr, PETSC_FALSE));
1601         }
1602         /* local values */
1603         else
1604           PetscCallExternal(HYPRE_IJMatrixSetValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1605       }
1606       vals += nc;
1607     }
1608   }
1609 
1610   PetscCall(MatRestoreArray_HYPRE(A, &array));
1611   PetscFunctionReturn(PETSC_SUCCESS);
1612 }
1613 
1614 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1615 {
1616   Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
1617   HYPRE_Int  *hdnnz, *honnz;
1618   PetscInt    i, rs, re, cs, ce, bs;
1619   PetscMPIInt size;
1620 
1621   PetscFunctionBegin;
1622   PetscCall(PetscLayoutSetUp(A->rmap));
1623   PetscCall(PetscLayoutSetUp(A->cmap));
1624   rs = A->rmap->rstart;
1625   re = A->rmap->rend;
1626   cs = A->cmap->rstart;
1627   ce = A->cmap->rend;
1628   if (!hA->ij) {
1629     PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rs, re - 1, cs, ce - 1, &hA->ij);
1630     PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
1631   } else {
1632     HYPRE_BigInt hrs, hre, hcs, hce;
1633     PetscCallExternal(HYPRE_IJMatrixGetLocalRange, hA->ij, &hrs, &hre, &hcs, &hce);
1634     PetscCheck(hre - hrs + 1 == re - rs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local rows: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hrs, hre + 1, rs, re);
1635     PetscCheck(hce - hcs + 1 == ce - cs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local cols: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hcs, hce + 1, cs, ce);
1636   }
1637   PetscCall(MatHYPRE_DestroyCOOMat(A));
1638   PetscCall(MatGetBlockSize(A, &bs));
1639   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10 * bs;
1640   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10 * bs;
1641 
1642   if (!dnnz) {
1643     PetscCall(PetscMalloc1(A->rmap->n, &hdnnz));
1644     for (i = 0; i < A->rmap->n; i++) hdnnz[i] = dnz;
1645   } else {
1646     hdnnz = (HYPRE_Int *)dnnz;
1647   }
1648   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1649   if (size > 1) {
1650     hypre_AuxParCSRMatrix *aux_matrix;
1651     if (!onnz) {
1652       PetscCall(PetscMalloc1(A->rmap->n, &honnz));
1653       for (i = 0; i < A->rmap->n; i++) honnz[i] = onz;
1654     } else honnz = (HYPRE_Int *)onnz;
1655     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1656        they assume the user will input the entire row values, properly sorted
1657        In PETSc, we don't make such an assumption and set this flag to 1,
1658        unless the option MAT_SORTED_FULL is set to true.
1659        Also, to avoid possible memory leaks, we destroy and recreate the translator
1660        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1661        the IJ matrix for us */
1662     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1663     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1664     hypre_IJMatrixTranslator(hA->ij) = NULL;
1665     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, hA->ij, hdnnz, honnz);
1666     aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1667     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !A->sortedfull;
1668   } else {
1669     honnz = NULL;
1670     PetscCallExternal(HYPRE_IJMatrixSetRowSizes, hA->ij, hdnnz);
1671   }
1672 
1673   /* reset assembled flag and call the initialize method */
1674   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1675 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1676   PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1677 #else
1678   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, hA->ij, HYPRE_MEMORY_HOST);
1679 #endif
1680   if (!dnnz) PetscCall(PetscFree(hdnnz));
1681   if (!onnz && honnz) PetscCall(PetscFree(honnz));
1682   /* Match AIJ logic */
1683   A->preallocated = PETSC_TRUE;
1684   A->assembled    = PETSC_FALSE;
1685   PetscFunctionReturn(PETSC_SUCCESS);
1686 }
1687 
1688 /*@C
1689   MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1690 
1691   Collective
1692 
1693   Input Parameters:
1694 + A    - the matrix
1695 . dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1696           (same value is used for all local rows)
1697 . dnnz - array containing the number of nonzeros in the various rows of the
1698           DIAGONAL portion of the local submatrix (possibly different for each row)
1699           or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `d_nz` is used to specify the nonzero structure.
1700           The size of this array is equal to the number of local rows, i.e `m`.
1701           For matrices that will be factored, you must leave room for (and set)
1702           the diagonal entry even if it is zero.
1703 . onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1704           submatrix (same value is used for all local rows).
1705 - onnz - array containing the number of nonzeros in the various rows of the
1706           OFF-DIAGONAL portion of the local submatrix (possibly different for
1707           each row) or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `o_nz` is used to specify the nonzero
1708           structure. The size of this array is equal to the number
1709           of local rows, i.e `m`.
1710 
1711   Level: intermediate
1712 
1713   Note:
1714   If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, `onz` and `onnz` are ignored.
1715 
1716 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatMPIAIJSetPreallocation()`, `MATHYPRE`, `MATAIJ`
1717 @*/
1718 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1719 {
1720   PetscFunctionBegin;
1721   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1722   PetscValidType(A, 1);
1723   PetscTryMethod(A, "MatHYPRESetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (A, dnz, dnnz, onz, onnz));
1724   PetscFunctionReturn(PETSC_SUCCESS);
1725 }
1726 
1727 /*@C
1728   MatCreateFromParCSR - Creates a `Mat` from a `hypre_ParCSRMatrix`
1729 
1730   Collective
1731 
1732   Input Parameters:
1733 + parcsr   - the pointer to the `hypre_ParCSRMatrix`
1734 . mtype    - matrix type to be created. Currently `MATAIJ`, `MATIS` and `MATHYPRE` are supported.
1735 - copymode - PETSc copying options, see  `PetscCopyMode`
1736 
1737   Output Parameter:
1738 . A - the matrix
1739 
1740   Level: intermediate
1741 
1742 .seealso: [](ch_matrices), `Mat`, `MATHYPRE`, `PetscCopyMode`
1743 @*/
1744 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat *A)
1745 {
1746   Mat        T;
1747   Mat_HYPRE *hA;
1748   MPI_Comm   comm;
1749   PetscInt   rstart, rend, cstart, cend, M, N;
1750   PetscBool  isseqaij, isseqaijmkl, ismpiaij, isaij, ishyp, isis;
1751 
1752   PetscFunctionBegin;
1753   comm = hypre_ParCSRMatrixComm(parcsr);
1754   PetscCall(PetscStrcmp(mtype, MATSEQAIJ, &isseqaij));
1755   PetscCall(PetscStrcmp(mtype, MATSEQAIJMKL, &isseqaijmkl));
1756   PetscCall(PetscStrcmp(mtype, MATMPIAIJ, &ismpiaij));
1757   PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
1758   PetscCall(PetscStrcmp(mtype, MATHYPRE, &ishyp));
1759   PetscCall(PetscStrcmp(mtype, MATIS, &isis));
1760   isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1761   /* TODO */
1762   PetscCheck(isaij || ishyp || isis, comm, PETSC_ERR_SUP, "Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s", mtype, MATAIJ, MATSEQAIJ, MATSEQAIJMKL, MATMPIAIJ, MATIS, MATHYPRE);
1763   /* access ParCSRMatrix */
1764   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1765   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1766   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1767   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1768   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1769   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1770 
1771   /* create PETSc matrix with MatHYPRE */
1772   PetscCall(MatCreate(comm, &T));
1773   PetscCall(MatSetSizes(T, PetscMax(rend - rstart + 1, 0), PetscMax(cend - cstart + 1, 0), M, N));
1774   PetscCall(MatSetType(T, MATHYPRE));
1775   hA = (Mat_HYPRE *)T->data;
1776 
1777   /* create HYPRE_IJMatrix */
1778   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend, cstart, cend, &hA->ij);
1779   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
1780 
1781   /* create new ParCSR object if needed */
1782   if (ishyp && copymode == PETSC_COPY_VALUES) {
1783     hypre_ParCSRMatrix *new_parcsr;
1784 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
1785     hypre_CSRMatrix *hdiag, *hoffd, *ndiag, *noffd;
1786 
1787     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
1788     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1789     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1790     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1791     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1792     PetscCall(PetscArraycpy(hypre_CSRMatrixData(ndiag), hypre_CSRMatrixData(hdiag), hypre_CSRMatrixNumNonzeros(hdiag)));
1793     PetscCall(PetscArraycpy(hypre_CSRMatrixData(noffd), hypre_CSRMatrixData(hoffd), hypre_CSRMatrixNumNonzeros(hoffd)));
1794 #else
1795     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 1);
1796 #endif
1797     parcsr   = new_parcsr;
1798     copymode = PETSC_OWN_POINTER;
1799   }
1800 
1801   /* set ParCSR object */
1802   hypre_IJMatrixObject(hA->ij) = parcsr;
1803   T->preallocated              = PETSC_TRUE;
1804 
1805   /* set assembled flag */
1806   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1807 #if 0
1808   PetscCallExternal(HYPRE_IJMatrixInitialize,hA->ij);
1809 #endif
1810   if (ishyp) {
1811     PetscMPIInt myid = 0;
1812 
1813     /* make sure we always have row_starts and col_starts available */
1814     if (HYPRE_AssumedPartitionCheck()) PetscCallMPI(MPI_Comm_rank(comm, &myid));
1815 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1816     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1817       PetscLayout map;
1818 
1819       PetscCall(MatGetLayouts(T, NULL, &map));
1820       PetscCall(PetscLayoutSetUp(map));
1821       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1822     }
1823     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1824       PetscLayout map;
1825 
1826       PetscCall(MatGetLayouts(T, &map, NULL));
1827       PetscCall(PetscLayoutSetUp(map));
1828       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1829     }
1830 #endif
1831     /* prevent from freeing the pointer */
1832     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1833     *A = T;
1834     PetscCall(MatSetOption(*A, MAT_SORTED_FULL, PETSC_TRUE));
1835     PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY));
1836     PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY));
1837   } else if (isaij) {
1838     if (copymode != PETSC_OWN_POINTER) {
1839       /* prevent from freeing the pointer */
1840       hA->inner_free = PETSC_FALSE;
1841       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INITIAL_MATRIX, A));
1842       PetscCall(MatDestroy(&T));
1843     } else { /* AIJ return type with PETSC_OWN_POINTER */
1844       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INPLACE_MATRIX, &T));
1845       *A = T;
1846     }
1847   } else if (isis) {
1848     PetscCall(MatConvert_HYPRE_IS(T, MATIS, MAT_INITIAL_MATRIX, A));
1849     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1850     PetscCall(MatDestroy(&T));
1851   }
1852   PetscFunctionReturn(PETSC_SUCCESS);
1853 }
1854 
1855 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1856 {
1857   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1858   HYPRE_Int  type;
1859 
1860   PetscFunctionBegin;
1861   PetscCheck(hA->ij, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE_IJMatrix not present");
1862   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1863   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1864   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)parcsr);
1865   PetscFunctionReturn(PETSC_SUCCESS);
1866 }
1867 
1868 /*@C
1869   MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1870 
1871   Not Collective, No Fortran Support
1872 
1873   Input Parameter:
1874 . A - the `MATHYPRE` object
1875 
1876   Output Parameter:
1877 . parcsr - the pointer to the `hypre_ParCSRMatrix`
1878 
1879   Level: intermediate
1880 
1881 .seealso: [](ch_matrices), `Mat`, `MATHYPRE`, `PetscCopyMode`
1882 @*/
1883 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1884 {
1885   PetscFunctionBegin;
1886   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1887   PetscValidType(A, 1);
1888   PetscUseMethod(A, "MatHYPREGetParCSR_C", (Mat, hypre_ParCSRMatrix **), (A, parcsr));
1889   PetscFunctionReturn(PETSC_SUCCESS);
1890 }
1891 
1892 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1893 {
1894   hypre_ParCSRMatrix *parcsr;
1895   hypre_CSRMatrix    *ha;
1896   PetscInt            rst;
1897 
1898   PetscFunctionBegin;
1899   PetscCheck(A->rmap->n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not implemented with non-square diagonal blocks");
1900   PetscCall(MatGetOwnershipRange(A, &rst, NULL));
1901   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1902   if (missing) *missing = PETSC_FALSE;
1903   if (dd) *dd = -1;
1904   ha = hypre_ParCSRMatrixDiag(parcsr);
1905   if (ha) {
1906     PetscInt   size, i;
1907     HYPRE_Int *ii, *jj;
1908 
1909     size = hypre_CSRMatrixNumRows(ha);
1910     ii   = hypre_CSRMatrixI(ha);
1911     jj   = hypre_CSRMatrixJ(ha);
1912     for (i = 0; i < size; i++) {
1913       PetscInt  j;
1914       PetscBool found = PETSC_FALSE;
1915 
1916       for (j = ii[i]; j < ii[i + 1] && !found; j++) found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1917 
1918       if (!found) {
1919         PetscCall(PetscInfo(A, "Matrix is missing local diagonal entry %" PetscInt_FMT "\n", i));
1920         if (missing) *missing = PETSC_TRUE;
1921         if (dd) *dd = i + rst;
1922         PetscFunctionReturn(PETSC_SUCCESS);
1923       }
1924     }
1925     if (!size) {
1926       PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1927       if (missing) *missing = PETSC_TRUE;
1928       if (dd) *dd = rst;
1929     }
1930   } else {
1931     PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1932     if (missing) *missing = PETSC_TRUE;
1933     if (dd) *dd = rst;
1934   }
1935   PetscFunctionReturn(PETSC_SUCCESS);
1936 }
1937 
1938 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1939 {
1940   hypre_ParCSRMatrix *parcsr;
1941 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1942   hypre_CSRMatrix *ha;
1943 #endif
1944   HYPRE_Complex hs;
1945 
1946   PetscFunctionBegin;
1947   PetscCall(PetscHYPREScalarCast(s, &hs));
1948   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1949 #if PETSC_PKG_HYPRE_VERSION_GE(2, 19, 0)
1950   PetscCallExternal(hypre_ParCSRMatrixScale, parcsr, hs);
1951 #else /* diagonal part */
1952   ha = hypre_ParCSRMatrixDiag(parcsr);
1953   if (ha) {
1954     PetscInt       size, i;
1955     HYPRE_Int     *ii;
1956     HYPRE_Complex *a;
1957 
1958     size = hypre_CSRMatrixNumRows(ha);
1959     a    = hypre_CSRMatrixData(ha);
1960     ii   = hypre_CSRMatrixI(ha);
1961     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1962   }
1963   /* off-diagonal part */
1964   ha = hypre_ParCSRMatrixOffd(parcsr);
1965   if (ha) {
1966     PetscInt       size, i;
1967     HYPRE_Int     *ii;
1968     HYPRE_Complex *a;
1969 
1970     size = hypre_CSRMatrixNumRows(ha);
1971     a    = hypre_CSRMatrixData(ha);
1972     ii   = hypre_CSRMatrixI(ha);
1973     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1974   }
1975 #endif
1976   PetscFunctionReturn(PETSC_SUCCESS);
1977 }
1978 
1979 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1980 {
1981   hypre_ParCSRMatrix *parcsr;
1982   HYPRE_Int          *lrows;
1983   PetscInt            rst, ren, i;
1984 
1985   PetscFunctionBegin;
1986   PetscCheck(!x && !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "To be implemented");
1987   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1988   PetscCall(PetscMalloc1(numRows, &lrows));
1989   PetscCall(MatGetOwnershipRange(A, &rst, &ren));
1990   for (i = 0; i < numRows; i++) {
1991     PetscCheck(rows[i] >= rst && rows[i] < ren, PETSC_COMM_SELF, PETSC_ERR_SUP, "Non-local rows not yet supported");
1992     lrows[i] = rows[i] - rst;
1993   }
1994   PetscCallExternal(hypre_ParCSRMatrixEliminateRowsCols, parcsr, numRows, lrows);
1995   PetscCall(PetscFree(lrows));
1996   PetscFunctionReturn(PETSC_SUCCESS);
1997 }
1998 
1999 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
2000 {
2001   PetscFunctionBegin;
2002   if (ha) {
2003     HYPRE_Int     *ii, size;
2004     HYPRE_Complex *a;
2005 
2006     size = hypre_CSRMatrixNumRows(ha);
2007     a    = hypre_CSRMatrixData(ha);
2008     ii   = hypre_CSRMatrixI(ha);
2009 
2010     if (a) PetscCall(PetscArrayzero(a, ii[size]));
2011   }
2012   PetscFunctionReturn(PETSC_SUCCESS);
2013 }
2014 
2015 static PetscErrorCode MatZeroEntries_HYPRE(Mat A)
2016 {
2017   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2018 
2019   PetscFunctionBegin;
2020   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) {
2021     PetscCallExternal(HYPRE_IJMatrixSetConstantValues, hA->ij, 0.0);
2022   } else {
2023     hypre_ParCSRMatrix *parcsr;
2024 
2025     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2026     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr)));
2027     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr)));
2028   }
2029   PetscFunctionReturn(PETSC_SUCCESS);
2030 }
2031 
2032 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA, PetscInt N, const PetscInt rows[], HYPRE_Complex diag)
2033 {
2034   PetscInt       ii;
2035   HYPRE_Int     *i, *j;
2036   HYPRE_Complex *a;
2037 
2038   PetscFunctionBegin;
2039   if (!hA) PetscFunctionReturn(PETSC_SUCCESS);
2040 
2041   i = hypre_CSRMatrixI(hA);
2042   j = hypre_CSRMatrixJ(hA);
2043   a = hypre_CSRMatrixData(hA);
2044 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2045   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hA)) {
2046   #if defined(HYPRE_USING_CUDA)
2047     MatZeroRows_CUDA(N, rows, i, j, a, diag);
2048   #elif defined(HYPRE_USING_HIP)
2049     MatZeroRows_HIP(N, rows, i, j, a, diag);
2050   #elif defined(PETSC_HAVE_KOKKOS)
2051     MatZeroRows_Kokkos(N, rows, i, j, a, diag);
2052   #else
2053     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for MatZeroRows on a hypre matrix in this memory location");
2054   #endif
2055   } else
2056 #endif
2057   {
2058     for (ii = 0; ii < N; ii++) {
2059       HYPRE_Int jj, ibeg, iend, irow;
2060 
2061       irow = rows[ii];
2062       ibeg = i[irow];
2063       iend = i[irow + 1];
2064       for (jj = ibeg; jj < iend; jj++)
2065         if (j[jj] == irow) a[jj] = diag;
2066         else a[jj] = 0.0;
2067     }
2068   }
2069   PetscFunctionReturn(PETSC_SUCCESS);
2070 }
2071 
2072 static PetscErrorCode MatZeroRows_HYPRE(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2073 {
2074   hypre_ParCSRMatrix *parcsr;
2075   PetscInt           *lrows, len, *lrows2;
2076   HYPRE_Complex       hdiag;
2077 
2078   PetscFunctionBegin;
2079   PetscCheck(!x && !b, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
2080   PetscCall(PetscHYPREScalarCast(diag, &hdiag));
2081   /* retrieve the internal matrix */
2082   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2083   /* get locally owned rows */
2084   PetscCall(MatZeroRowsMapLocal_Private(A, N, rows, &len, &lrows));
2085 
2086 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2087   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hypre_ParCSRMatrixDiag(parcsr))) {
2088     Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2089     PetscInt   m;
2090     PetscCall(MatGetLocalSize(A, &m, NULL));
2091     if (!hA->rows_d) {
2092       hA->rows_d = hypre_TAlloc(PetscInt, m, HYPRE_MEMORY_DEVICE);
2093       if (m) PetscCheck(hA->rows_d, PETSC_COMM_SELF, PETSC_ERR_MEM, "HYPRE_TAlloc failed");
2094     }
2095     PetscCheck(len <= m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Too many rows in rows[]");
2096     PetscStackCallExternalVoid("hypre_Memcpy", hypre_Memcpy(hA->rows_d, lrows, sizeof(PetscInt) * len, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST));
2097     lrows2 = hA->rows_d;
2098   } else
2099 #endif
2100   {
2101     lrows2 = lrows;
2102   }
2103 
2104   /* zero diagonal part */
2105   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr), len, lrows2, hdiag));
2106   /* zero off-diagonal part */
2107   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr), len, lrows2, 0.0));
2108 
2109   PetscCall(PetscFree(lrows));
2110   PetscFunctionReturn(PETSC_SUCCESS);
2111 }
2112 
2113 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat, MatAssemblyType mode)
2114 {
2115   PetscFunctionBegin;
2116   if (mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);
2117 
2118   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
2119   PetscFunctionReturn(PETSC_SUCCESS);
2120 }
2121 
2122 static PetscErrorCode MatGetRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2123 {
2124   hypre_ParCSRMatrix *parcsr;
2125   HYPRE_Int           hnz;
2126 
2127   PetscFunctionBegin;
2128   /* retrieve the internal matrix */
2129   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2130   /* call HYPRE API */
2131   PetscCallExternal(HYPRE_ParCSRMatrixGetRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2132   if (nz) *nz = (PetscInt)hnz;
2133   PetscFunctionReturn(PETSC_SUCCESS);
2134 }
2135 
2136 static PetscErrorCode MatRestoreRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2137 {
2138   hypre_ParCSRMatrix *parcsr;
2139   HYPRE_Int           hnz;
2140 
2141   PetscFunctionBegin;
2142   /* retrieve the internal matrix */
2143   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2144   /* call HYPRE API */
2145   hnz = nz ? (HYPRE_Int)(*nz) : 0;
2146   PetscCallExternal(HYPRE_ParCSRMatrixRestoreRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2147   PetscFunctionReturn(PETSC_SUCCESS);
2148 }
2149 
2150 static PetscErrorCode MatGetValues_HYPRE(Mat A, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2151 {
2152   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2153   PetscInt   i;
2154 
2155   PetscFunctionBegin;
2156   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2157   /* Ignore negative row indices
2158    * And negative column indices should be automatically ignored in hypre
2159    * */
2160   for (i = 0; i < m; i++) {
2161     if (idxm[i] >= 0) {
2162       HYPRE_Int hn = (HYPRE_Int)n;
2163       PetscCallExternal(HYPRE_IJMatrixGetValues, hA->ij, 1, &hn, (HYPRE_BigInt *)&idxm[i], (HYPRE_BigInt *)idxn, (HYPRE_Complex *)(v + i * n));
2164     }
2165   }
2166   PetscFunctionReturn(PETSC_SUCCESS);
2167 }
2168 
2169 static PetscErrorCode MatSetOption_HYPRE(Mat A, MatOption op, PetscBool flg)
2170 {
2171   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2172 
2173   PetscFunctionBegin;
2174   switch (op) {
2175   case MAT_NO_OFF_PROC_ENTRIES:
2176     if (flg) PetscCallExternal(HYPRE_IJMatrixSetMaxOffProcElmts, hA->ij, 0);
2177     break;
2178   case MAT_IGNORE_OFF_PROC_ENTRIES:
2179     hA->donotstash = flg;
2180     break;
2181   default:
2182     break;
2183   }
2184   PetscFunctionReturn(PETSC_SUCCESS);
2185 }
2186 
2187 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
2188 {
2189   PetscViewerFormat format;
2190 
2191   PetscFunctionBegin;
2192   PetscCall(PetscViewerGetFormat(view, &format));
2193   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(PETSC_SUCCESS);
2194   if (format != PETSC_VIEWER_NATIVE) {
2195     Mat                 B;
2196     hypre_ParCSRMatrix *parcsr;
2197     PetscErrorCode (*mview)(Mat, PetscViewer) = NULL;
2198 
2199     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2200     PetscCall(MatCreateFromParCSR(parcsr, MATAIJ, PETSC_USE_POINTER, &B));
2201     PetscCall(MatGetOperation(B, MATOP_VIEW, (PetscErrorCodeFn **)&mview));
2202     PetscCheck(mview, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing view operation");
2203     PetscCall((*mview)(B, view));
2204     PetscCall(MatDestroy(&B));
2205   } else {
2206     Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
2207     PetscMPIInt size;
2208     PetscBool   isascii;
2209     const char *filename;
2210 
2211     /* HYPRE uses only text files */
2212     PetscCall(PetscObjectTypeCompare((PetscObject)view, PETSCVIEWERASCII, &isascii));
2213     PetscCheck(isascii, PetscObjectComm((PetscObject)view), PETSC_ERR_SUP, "PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII", ((PetscObject)view)->type_name);
2214     PetscCall(PetscViewerFileGetName(view, &filename));
2215     PetscCallExternal(HYPRE_IJMatrixPrint, hA->ij, filename);
2216     PetscCallMPI(MPI_Comm_size(hA->comm, &size));
2217     if (size > 1) {
2218       PetscCall(PetscViewerASCIIPrintf(view, "Matrix files: %s.%05d ... %s.%05d\n", filename, 0, filename, size - 1));
2219     } else {
2220       PetscCall(PetscViewerASCIIPrintf(view, "Matrix file: %s.%05d\n", filename, 0));
2221     }
2222   }
2223   PetscFunctionReturn(PETSC_SUCCESS);
2224 }
2225 
2226 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
2227 {
2228   hypre_ParCSRMatrix *acsr, *bcsr;
2229 
2230   PetscFunctionBegin;
2231   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
2232     PetscCall(MatHYPREGetParCSR_HYPRE(A, &acsr));
2233     PetscCall(MatHYPREGetParCSR_HYPRE(B, &bcsr));
2234     PetscCallExternal(hypre_ParCSRMatrixCopy, acsr, bcsr, 1);
2235     PetscCall(MatSetOption(B, MAT_SORTED_FULL, PETSC_TRUE)); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
2236     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2237     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2238   } else {
2239     PetscCall(MatCopy_Basic(A, B, str));
2240   }
2241   PetscFunctionReturn(PETSC_SUCCESS);
2242 }
2243 
2244 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
2245 {
2246   hypre_ParCSRMatrix *parcsr;
2247   hypre_CSRMatrix    *dmat;
2248   HYPRE_Complex      *a;
2249   PetscBool           cong;
2250 
2251   PetscFunctionBegin;
2252   PetscCall(MatHasCongruentLayouts(A, &cong));
2253   PetscCheck(cong, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only for square matrices with same local distributions of rows and columns");
2254   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2255   dmat = hypre_ParCSRMatrixDiag(parcsr);
2256   if (dmat) {
2257 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2258     HYPRE_MemoryLocation mem = hypre_CSRMatrixMemoryLocation(dmat);
2259 #else
2260     HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
2261 #endif
2262 
2263     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecGetArrayWriteAndMemType(d, (PetscScalar **)&a, NULL));
2264     else PetscCall(VecGetArrayWrite(d, (PetscScalar **)&a));
2265     hypre_CSRMatrixExtractDiagonal(dmat, a, 0);
2266     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecRestoreArrayWriteAndMemType(d, (PetscScalar **)&a));
2267     else PetscCall(VecRestoreArrayWrite(d, (PetscScalar **)&a));
2268   }
2269   PetscFunctionReturn(PETSC_SUCCESS);
2270 }
2271 
2272 #include <petscblaslapack.h>
2273 
2274 static PetscErrorCode MatAXPY_HYPRE(Mat Y, PetscScalar a, Mat X, MatStructure str)
2275 {
2276   PetscFunctionBegin;
2277 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2278   {
2279     Mat                 B;
2280     hypre_ParCSRMatrix *x, *y, *z;
2281 
2282     PetscCall(MatHYPREGetParCSR(Y, &y));
2283     PetscCall(MatHYPREGetParCSR(X, &x));
2284     PetscCallExternal(hypre_ParCSRMatrixAdd, 1.0, y, 1.0, x, &z);
2285     PetscCall(MatCreateFromParCSR(z, MATHYPRE, PETSC_OWN_POINTER, &B));
2286     PetscCall(MatHeaderMerge(Y, &B));
2287   }
2288 #else
2289   if (str == SAME_NONZERO_PATTERN) {
2290     hypre_ParCSRMatrix *x, *y;
2291     hypre_CSRMatrix    *xloc, *yloc;
2292     PetscInt            xnnz, ynnz;
2293     HYPRE_Complex      *xarr, *yarr;
2294     PetscBLASInt        one = 1, bnz;
2295 
2296     PetscCall(MatHYPREGetParCSR(Y, &y));
2297     PetscCall(MatHYPREGetParCSR(X, &x));
2298 
2299     /* diagonal block */
2300     xloc = hypre_ParCSRMatrixDiag(x);
2301     yloc = hypre_ParCSRMatrixDiag(y);
2302     xnnz = 0;
2303     ynnz = 0;
2304     xarr = NULL;
2305     yarr = NULL;
2306     if (xloc) {
2307       xarr = hypre_CSRMatrixData(xloc);
2308       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2309     }
2310     if (yloc) {
2311       yarr = hypre_CSRMatrixData(yloc);
2312       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2313     }
2314     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2315     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2316     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2317 
2318     /* off-diagonal block */
2319     xloc = hypre_ParCSRMatrixOffd(x);
2320     yloc = hypre_ParCSRMatrixOffd(y);
2321     xnnz = 0;
2322     ynnz = 0;
2323     xarr = NULL;
2324     yarr = NULL;
2325     if (xloc) {
2326       xarr = hypre_CSRMatrixData(xloc);
2327       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2328     }
2329     if (yloc) {
2330       yarr = hypre_CSRMatrixData(yloc);
2331       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2332     }
2333     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in off-diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2334     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2335     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2336   } else if (str == SUBSET_NONZERO_PATTERN) {
2337     PetscCall(MatAXPY_Basic(Y, a, X, str));
2338   } else {
2339     Mat B;
2340 
2341     PetscCall(MatAXPY_Basic_Preallocate(Y, X, &B));
2342     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
2343     PetscCall(MatHeaderReplace(Y, &B));
2344   }
2345 #endif
2346   PetscFunctionReturn(PETSC_SUCCESS);
2347 }
2348 
2349 static PetscErrorCode MatDuplicate_HYPRE(Mat A, MatDuplicateOption op, Mat *B)
2350 {
2351   hypre_ParCSRMatrix *parcsr = NULL;
2352   PetscCopyMode       cpmode;
2353   Mat_HYPRE          *hA;
2354 
2355   PetscFunctionBegin;
2356   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2357   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
2358     parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
2359     cpmode = PETSC_OWN_POINTER;
2360   } else {
2361     cpmode = PETSC_COPY_VALUES;
2362   }
2363   PetscCall(MatCreateFromParCSR(parcsr, MATHYPRE, cpmode, B));
2364   hA = (Mat_HYPRE *)A->data;
2365   if (hA->cooMat) {
2366     Mat_HYPRE *hB = (Mat_HYPRE *)((*B)->data);
2367     op            = (op == MAT_DO_NOT_COPY_VALUES) ? op : MAT_COPY_VALUES;
2368     /* Cannot simply increase the reference count of hA->cooMat, since B needs to share cooMat's data array */
2369     PetscCall(MatDuplicate(hA->cooMat, op, &hB->cooMat));
2370     PetscCall(MatHYPRE_AttachCOOMat(*B));
2371   }
2372   PetscFunctionReturn(PETSC_SUCCESS);
2373 }
2374 
2375 static PetscErrorCode MatSetPreallocationCOO_HYPRE(Mat mat, PetscCount coo_n, PetscInt coo_i[], PetscInt coo_j[])
2376 {
2377   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;
2378 
2379   PetscFunctionBegin;
2380   /* Build an agent matrix cooMat with AIJ format
2381      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
2382    */
2383   PetscCall(MatHYPRE_CreateCOOMat(mat));
2384   PetscCall(MatSetOption(hmat->cooMat, MAT_IGNORE_OFF_PROC_ENTRIES, hmat->donotstash));
2385   PetscCall(MatSetOption(hmat->cooMat, MAT_NO_OFF_PROC_ENTRIES, mat->nooffprocentries));
2386 
2387   /* MatSetPreallocationCOO_SeqAIJ and MatSetPreallocationCOO_MPIAIJ uses this specific
2388      name to automatically put the diagonal entries first */
2389   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
2390   PetscCall(MatSetPreallocationCOO(hmat->cooMat, coo_n, coo_i, coo_j));
2391   hmat->cooMat->assembled = PETSC_TRUE;
2392 
2393   /* Copy the sparsity pattern from cooMat to hypre IJMatrix hmat->ij */
2394   PetscCall(MatSetOption(mat, MAT_SORTED_FULL, PETSC_TRUE));
2395   PetscCall(MatHYPRE_CreateFromMat(hmat->cooMat, hmat));      /* Create hmat->ij and preallocate it */
2396   PetscCall(MatHYPRE_IJMatrixCopyIJ(hmat->cooMat, hmat->ij)); /* Copy A's (i,j) to hmat->ij */
2397 
2398   mat->preallocated = PETSC_TRUE;
2399   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
2400   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY)); /* Migrate mat to device if it is bound to. Hypre builds its own SpMV context here */
2401 
2402   /* Attach cooMat to mat */
2403   PetscCall(MatHYPRE_AttachCOOMat(mat));
2404   PetscFunctionReturn(PETSC_SUCCESS);
2405 }
2406 
2407 static PetscErrorCode MatSetValuesCOO_HYPRE(Mat mat, const PetscScalar v[], InsertMode imode)
2408 {
2409   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;
2410 
2411   PetscFunctionBegin;
2412   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
2413   PetscCall(MatSetValuesCOO(hmat->cooMat, v, imode));
2414   PetscCall(MatViewFromOptions(hmat->cooMat, (PetscObject)mat, "-cooMat_view"));
2415   PetscFunctionReturn(PETSC_SUCCESS);
2416 }
2417 
2418 static PetscErrorCode MatGetCurrentMemType_HYPRE(Mat A, PetscMemType *m)
2419 {
2420   PetscBool petsconcpu;
2421 
2422   PetscFunctionBegin;
2423   PetscCall(MatBoundToCPU(A, &petsconcpu));
2424   *m = petsconcpu ? PETSC_MEMTYPE_HOST : PETSC_MEMTYPE_DEVICE;
2425   PetscFunctionReturn(PETSC_SUCCESS);
2426 }
2427 
2428 /*MC
2429    MATHYPRE - "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2430           based on the hypre IJ interface.
2431 
2432    Level: intermediate
2433 
2434 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatHYPRESetPreallocation`
2435 M*/
2436 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2437 {
2438   Mat_HYPRE *hB;
2439 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2440   HYPRE_MemoryLocation memory_location;
2441 #endif
2442 
2443   PetscFunctionBegin;
2444   PetscHYPREInitialize();
2445   PetscCall(PetscNew(&hB));
2446 
2447   hB->inner_free      = PETSC_TRUE;
2448   hB->array_available = PETSC_TRUE;
2449 
2450   B->data = (void *)hB;
2451 
2452   PetscCall(PetscMemzero(B->ops, sizeof(struct _MatOps)));
2453   B->ops->mult                  = MatMult_HYPRE;
2454   B->ops->multtranspose         = MatMultTranspose_HYPRE;
2455   B->ops->multadd               = MatMultAdd_HYPRE;
2456   B->ops->multtransposeadd      = MatMultTransposeAdd_HYPRE;
2457   B->ops->setup                 = MatSetUp_HYPRE;
2458   B->ops->destroy               = MatDestroy_HYPRE;
2459   B->ops->assemblyend           = MatAssemblyEnd_HYPRE;
2460   B->ops->assemblybegin         = MatAssemblyBegin_HYPRE;
2461   B->ops->setvalues             = MatSetValues_HYPRE;
2462   B->ops->missingdiagonal       = MatMissingDiagonal_HYPRE;
2463   B->ops->scale                 = MatScale_HYPRE;
2464   B->ops->zerorowscolumns       = MatZeroRowsColumns_HYPRE;
2465   B->ops->zeroentries           = MatZeroEntries_HYPRE;
2466   B->ops->zerorows              = MatZeroRows_HYPRE;
2467   B->ops->getrow                = MatGetRow_HYPRE;
2468   B->ops->restorerow            = MatRestoreRow_HYPRE;
2469   B->ops->getvalues             = MatGetValues_HYPRE;
2470   B->ops->setoption             = MatSetOption_HYPRE;
2471   B->ops->duplicate             = MatDuplicate_HYPRE;
2472   B->ops->copy                  = MatCopy_HYPRE;
2473   B->ops->view                  = MatView_HYPRE;
2474   B->ops->getdiagonal           = MatGetDiagonal_HYPRE;
2475   B->ops->axpy                  = MatAXPY_HYPRE;
2476   B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE;
2477   B->ops->getcurrentmemtype     = MatGetCurrentMemType_HYPRE;
2478 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2479   B->ops->bindtocpu = MatBindToCPU_HYPRE;
2480   /* Get hypre's default memory location. Users can control this using the corresponding HYPRE_SetMemoryLocation API */
2481   PetscCallExternal(HYPRE_GetMemoryLocation, &memory_location);
2482   B->boundtocpu = (memory_location == HYPRE_MEMORY_HOST) ? PETSC_TRUE : PETSC_FALSE;
2483 #endif
2484 
2485   /* build cache for off array entries formed */
2486   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));
2487 
2488   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)B), &hB->comm));
2489   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATHYPRE));
2490   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_aij_C", MatConvert_HYPRE_AIJ));
2491   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_is_C", MatConvert_HYPRE_IS));
2492   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_hypre_C", MatProductSetFromOptions_HYPRE));
2493   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaij_hypre_C", MatProductSetFromOptions_HYPRE));
2494   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPRESetPreallocation_C", MatHYPRESetPreallocation_HYPRE));
2495   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPREGetParCSR_C", MatHYPREGetParCSR_HYPRE));
2496   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_HYPRE));
2497   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetValuesCOO_C", MatSetValuesCOO_HYPRE));
2498 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2499   #if defined(HYPRE_USING_HIP)
2500   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2501   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2502   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2503   PetscCall(MatSetVecType(B, VECHIP));
2504   #endif
2505   #if defined(HYPRE_USING_CUDA)
2506   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2507   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2508   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2509   PetscCall(MatSetVecType(B, VECCUDA));
2510   #endif
2511 #endif
2512   PetscFunctionReturn(PETSC_SUCCESS);
2513 }
2514