xref: /petsc/src/mat/impls/hypre/mhypre.c (revision 0337bfe0b9dcc77abc5d44df0b7f57cdcdf2ff74)
1 /*
2     Creates hypre ijmatrix from PETSc matrix
3 */
4 
5 #include <petscpkg_version.h>
6 #include <petsc/private/petschypre.h>
7 #include <petscmathypre.h>
8 #include <petsc/private/matimpl.h>
9 #include <petsc/private/deviceimpl.h>
10 #include <../src/mat/impls/hypre/mhypre.h>
11 #include <../src/mat/impls/aij/mpi/mpiaij.h>
12 #include <../src/vec/vec/impls/hypre/vhyp.h>
13 #include <HYPRE.h>
14 #include <HYPRE_utilities.h>
15 #include <_hypre_parcsr_ls.h>
16 #include <_hypre_sstruct_ls.h>
17 
18 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
19   #define hypre_ParCSRMatrixClone(A, B) hypre_ParCSRMatrixCompleteClone(A)
20 #endif
21 
22 static PetscErrorCode MatHYPRE_CreateFromMat(Mat, Mat_HYPRE *);
23 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat, Mat, HYPRE_IJMatrix);
24 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat, HYPRE_IJMatrix);
25 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat, HYPRE_IJMatrix);
26 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat, HYPRE_Complex, Vec, HYPRE_Complex, Vec, PetscBool);
27 static PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode ins);
28 
29 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
30 {
31   PetscInt        i, n_d, n_o;
32   const PetscInt *ia_d, *ia_o;
33   PetscBool       done_d = PETSC_FALSE, done_o = PETSC_FALSE;
34   HYPRE_Int      *nnz_d = NULL, *nnz_o = NULL;
35 
36   PetscFunctionBegin;
37   if (A_d) { /* determine number of nonzero entries in local diagonal part */
38     PetscCall(MatGetRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, &n_d, &ia_d, NULL, &done_d));
39     if (done_d) {
40       PetscCall(PetscMalloc1(n_d, &nnz_d));
41       for (i = 0; i < n_d; i++) nnz_d[i] = ia_d[i + 1] - ia_d[i];
42     }
43     PetscCall(MatRestoreRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, NULL, &ia_d, NULL, &done_d));
44   }
45   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
46     PetscCall(MatGetRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
47     if (done_o) {
48       PetscCall(PetscMalloc1(n_o, &nnz_o));
49       for (i = 0; i < n_o; i++) nnz_o[i] = ia_o[i + 1] - ia_o[i];
50     }
51     PetscCall(MatRestoreRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
52   }
53   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
54     if (!done_o) { /* only diagonal part */
55       PetscCall(PetscCalloc1(n_d, &nnz_o));
56     }
57 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
58     { /* If we don't do this, the columns of the matrix will be all zeros! */
59       hypre_AuxParCSRMatrix *aux_matrix;
60       aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
61       hypre_AuxParCSRMatrixDestroy(aux_matrix);
62       hypre_IJMatrixTranslator(ij) = NULL;
63       PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
64       /* it seems they partially fixed it in 2.19.0 */
65   #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
66       aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
67       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
68   #endif
69     }
70 #else
71     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
72 #endif
73     PetscCall(PetscFree(nnz_d));
74     PetscCall(PetscFree(nnz_o));
75   }
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
80 {
81   PetscInt rstart, rend, cstart, cend;
82 
83   PetscFunctionBegin;
84   PetscCall(PetscLayoutSetUp(A->rmap));
85   PetscCall(PetscLayoutSetUp(A->cmap));
86   rstart = A->rmap->rstart;
87   rend   = A->rmap->rend;
88   cstart = A->cmap->rstart;
89   cend   = A->cmap->rend;
90   PetscHYPREInitialize();
91   if (hA->ij) {
92     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
93     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
94   }
95   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij);
96   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
97   {
98     PetscBool       same;
99     Mat             A_d, A_o;
100     const PetscInt *colmap;
101     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &same));
102     if (same) {
103       PetscCall(MatMPIAIJGetSeqAIJ(A, &A_d, &A_o, &colmap));
104       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
105       PetscFunctionReturn(PETSC_SUCCESS);
106     }
107     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &same));
108     if (same) {
109       PetscCall(MatMPIBAIJGetSeqBAIJ(A, &A_d, &A_o, &colmap));
110       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
111       PetscFunctionReturn(PETSC_SUCCESS);
112     }
113     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &same));
114     if (same) {
115       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
116       PetscFunctionReturn(PETSC_SUCCESS);
117     }
118     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQBAIJ, &same));
119     if (same) {
120       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
121       PetscFunctionReturn(PETSC_SUCCESS);
122     }
123   }
124   PetscFunctionReturn(PETSC_SUCCESS);
125 }
126 
127 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ(Mat A, HYPRE_IJMatrix ij)
128 {
129   PetscBool flg;
130 
131   PetscFunctionBegin;
132 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
133   PetscCallExternal(HYPRE_IJMatrixInitialize, ij);
134 #else
135   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, ij, HYPRE_MEMORY_HOST);
136 #endif
137   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &flg));
138   if (flg) {
139     PetscCall(MatHYPRE_IJMatrixCopyIJ_MPIAIJ(A, ij));
140     PetscFunctionReturn(PETSC_SUCCESS);
141   }
142   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
143   if (flg) {
144     PetscCall(MatHYPRE_IJMatrixCopyIJ_SeqAIJ(A, ij));
145     PetscFunctionReturn(PETSC_SUCCESS);
146   }
147   PetscCheck(PETSC_FALSE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for matrix type %s", ((PetscObject)A)->type_name);
148   PetscFunctionReturn(PETSC_SUCCESS);
149 }
150 
151 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
152 {
153   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ *)A->data;
154   HYPRE_Int              type;
155   hypre_ParCSRMatrix    *par_matrix;
156   hypre_AuxParCSRMatrix *aux_matrix;
157   hypre_CSRMatrix       *hdiag;
158   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
159 
160   PetscFunctionBegin;
161   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
162   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
163   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
164   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
165   /*
166        this is the Hack part where we monkey directly with the hypre datastructures
167   */
168   if (sameint) {
169     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, A->rmap->n + 1));
170     PetscCall(PetscArraycpy(hdiag->j, pdiag->j, pdiag->nz));
171   } else {
172     PetscInt i;
173 
174     for (i = 0; i < A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
175     for (i = 0; i < pdiag->nz; i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
176   }
177 
178   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
179   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
180   PetscFunctionReturn(PETSC_SUCCESS);
181 }
182 
183 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
184 {
185   Mat_MPIAIJ            *pA = (Mat_MPIAIJ *)A->data;
186   Mat_SeqAIJ            *pdiag, *poffd;
187   PetscInt               i, *garray = pA->garray, *jj, cstart, *pjj;
188   HYPRE_Int             *hjj, type;
189   hypre_ParCSRMatrix    *par_matrix;
190   hypre_AuxParCSRMatrix *aux_matrix;
191   hypre_CSRMatrix       *hdiag, *hoffd;
192   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
193 
194   PetscFunctionBegin;
195   pdiag = (Mat_SeqAIJ *)pA->A->data;
196   poffd = (Mat_SeqAIJ *)pA->B->data;
197   /* cstart is only valid for square MPIAIJ laid out in the usual way */
198   PetscCall(MatGetOwnershipRange(A, &cstart, NULL));
199 
200   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
201   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
202   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
203   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
204   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
205 
206   if (sameint) {
207     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, pA->A->rmap->n + 1));
208   } else {
209     for (i = 0; i < pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
210   }
211 
212   hjj = hdiag->j;
213   pjj = pdiag->j;
214 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
215   for (i = 0; i < pdiag->nz; i++) hjj[i] = pjj[i];
216 #else
217   for (i = 0; i < pdiag->nz; i++) hjj[i] = cstart + pjj[i];
218 #endif
219   if (sameint) {
220     PetscCall(PetscArraycpy(hoffd->i, poffd->i, pA->A->rmap->n + 1));
221   } else {
222     for (i = 0; i < pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)poffd->i[i];
223   }
224 
225   jj = (PetscInt *)hoffd->j;
226 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
227   PetscCallExternal(hypre_CSRMatrixBigInitialize, hoffd);
228   jj = (PetscInt *)hoffd->big_j;
229 #endif
230   pjj = poffd->j;
231   for (i = 0; i < poffd->nz; i++) jj[i] = garray[pjj[i]];
232 
233   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
234   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
235   PetscFunctionReturn(PETSC_SUCCESS);
236 }
237 
238 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat *B)
239 {
240   Mat_HYPRE             *mhA = (Mat_HYPRE *)A->data;
241   Mat                    lA;
242   ISLocalToGlobalMapping rl2g, cl2g;
243   IS                     is;
244   hypre_ParCSRMatrix    *hA;
245   hypre_CSRMatrix       *hdiag, *hoffd;
246   MPI_Comm               comm;
247   HYPRE_Complex         *hdd, *hod, *aa;
248   PetscScalar           *data;
249   HYPRE_BigInt          *col_map_offd;
250   HYPRE_Int             *hdi, *hdj, *hoi, *hoj;
251   PetscInt              *ii, *jj, *iptr, *jptr;
252   PetscInt               cum, dr, dc, oc, str, stc, nnz, i, jd, jo, M, N;
253   HYPRE_Int              type;
254   MatType                lmattype   = NULL;
255   PetscBool              freeparcsr = PETSC_FALSE;
256 
257   PetscFunctionBegin;
258   comm = PetscObjectComm((PetscObject)A);
259   PetscCallExternal(HYPRE_IJMatrixGetObjectType, mhA->ij, &type);
260   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
261   PetscCallExternal(HYPRE_IJMatrixGetObject, mhA->ij, (void **)&hA);
262 #if defined(PETSC_HAVE_HYPRE_DEVICE)
263   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(mhA->ij)) {
264     /* Support by copying back on the host and copy to GPU
265        Kind of inefficient, but this is the best we can do now */
266   #if defined(HYPRE_USING_HIP)
267     lmattype = MATSEQAIJHIPSPARSE;
268   #elif defined(HYPRE_USING_CUDA)
269     lmattype = MATSEQAIJCUSPARSE;
270   #endif
271     hA         = hypre_ParCSRMatrixClone_v2(hA, 1, HYPRE_MEMORY_HOST);
272     freeparcsr = PETSC_TRUE;
273   }
274 #endif
275   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
276   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
277   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
278   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
279   hdiag = hypre_ParCSRMatrixDiag(hA);
280   hoffd = hypre_ParCSRMatrixOffd(hA);
281   dr    = hypre_CSRMatrixNumRows(hdiag);
282   dc    = hypre_CSRMatrixNumCols(hdiag);
283   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
284   hdi   = hypre_CSRMatrixI(hdiag);
285   hdj   = hypre_CSRMatrixJ(hdiag);
286   hdd   = hypre_CSRMatrixData(hdiag);
287   oc    = hypre_CSRMatrixNumCols(hoffd);
288   nnz += hypre_CSRMatrixNumNonzeros(hoffd);
289   hoi = hypre_CSRMatrixI(hoffd);
290   hoj = hypre_CSRMatrixJ(hoffd);
291   hod = hypre_CSRMatrixData(hoffd);
292   if (reuse != MAT_REUSE_MATRIX) {
293     PetscInt *aux;
294 
295     /* generate l2g maps for rows and cols */
296     PetscCall(ISCreateStride(comm, dr, str, 1, &is));
297     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
298     PetscCall(ISDestroy(&is));
299     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
300     PetscCall(PetscMalloc1(dc + oc, &aux));
301     for (i = 0; i < dc; i++) aux[i] = i + stc;
302     for (i = 0; i < oc; i++) aux[i + dc] = col_map_offd[i];
303     PetscCall(ISCreateGeneral(comm, dc + oc, aux, PETSC_OWN_POINTER, &is));
304     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
305     PetscCall(ISDestroy(&is));
306     /* create MATIS object */
307     PetscCall(MatCreate(comm, B));
308     PetscCall(MatSetSizes(*B, dr, dc, M, N));
309     PetscCall(MatSetType(*B, MATIS));
310     PetscCall(MatSetLocalToGlobalMapping(*B, rl2g, cl2g));
311     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
312     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
313 
314     /* allocate CSR for local matrix */
315     PetscCall(PetscMalloc1(dr + 1, &iptr));
316     PetscCall(PetscMalloc1(nnz, &jptr));
317     PetscCall(PetscMalloc1(nnz, &data));
318   } else {
319     PetscInt  nr;
320     PetscBool done;
321     PetscCall(MatISGetLocalMat(*B, &lA));
322     PetscCall(MatGetRowIJ(lA, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&iptr, (const PetscInt **)&jptr, &done));
323     PetscCheck(nr == dr, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of rows in local mat! %" PetscInt_FMT " != %" PetscInt_FMT, nr, dr);
324     PetscCheck(iptr[nr] >= nnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros in local mat! reuse %" PetscInt_FMT " requested %" PetscInt_FMT, iptr[nr], nnz);
325     PetscCall(MatSeqAIJGetArrayWrite(lA, &data));
326   }
327   /* merge local matrices */
328   ii  = iptr;
329   jj  = jptr;
330   aa  = (HYPRE_Complex *)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
331   *ii = *(hdi++) + *(hoi++);
332   for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) {
333     PetscScalar *aold = (PetscScalar *)aa;
334     PetscInt    *jold = jj, nc = jd + jo;
335     for (; jd < *hdi; jd++) {
336       *jj++ = *hdj++;
337       *aa++ = *hdd++;
338     }
339     for (; jo < *hoi; jo++) {
340       *jj++ = *hoj++ + dc;
341       *aa++ = *hod++;
342     }
343     *(++ii) = *(hdi++) + *(hoi++);
344     PetscCall(PetscSortIntWithScalarArray(jd + jo - nc, jold, aold));
345   }
346   for (; cum < dr; cum++) *(++ii) = nnz;
347   if (reuse != MAT_REUSE_MATRIX) {
348     Mat_SeqAIJ *a;
349 
350     PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, dc + oc, iptr, jptr, data, &lA));
351     /* hack SeqAIJ */
352     a          = (Mat_SeqAIJ *)lA->data;
353     a->free_a  = PETSC_TRUE;
354     a->free_ij = PETSC_TRUE;
355     if (lmattype) PetscCall(MatConvert(lA, lmattype, MAT_INPLACE_MATRIX, &lA));
356     PetscCall(MatISSetLocalMat(*B, lA));
357     PetscCall(MatDestroy(&lA));
358   } else {
359     PetscCall(MatSeqAIJRestoreArrayWrite(lA, &data));
360   }
361   PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY));
362   PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY));
363   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, B));
364   if (freeparcsr) PetscCallExternal(hypre_ParCSRMatrixDestroy, hA);
365   PetscFunctionReturn(PETSC_SUCCESS);
366 }
367 
368 static PetscErrorCode MatHYPRE_DestroyCOOMat(Mat mat)
369 {
370   Mat_HYPRE *hA = (Mat_HYPRE *)mat->data;
371 
372   PetscFunctionBegin;
373   if (hA->cooMat) { /* If cooMat is present we need to destroy the column indices */
374     PetscCall(MatDestroy(&hA->cooMat));
375     if (hA->cooMatAttached) {
376       hypre_CSRMatrix     *csr;
377       hypre_ParCSRMatrix  *parcsr;
378       HYPRE_MemoryLocation mem;
379 
380       PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
381       csr = hypre_ParCSRMatrixDiag(parcsr);
382       if (csr) {
383         mem = hypre_CSRMatrixMemoryLocation(csr);
384         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
385         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
386       }
387       csr = hypre_ParCSRMatrixOffd(parcsr);
388       if (csr) {
389         mem = hypre_CSRMatrixMemoryLocation(csr);
390         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
391         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
392       }
393     }
394   }
395   hA->cooMatAttached = PETSC_FALSE;
396   PetscFunctionReturn(PETSC_SUCCESS);
397 }
398 
399 static PetscErrorCode MatHYPRE_CreateCOOMat(Mat mat)
400 {
401   MPI_Comm    comm;
402   PetscMPIInt size;
403   PetscLayout rmap, cmap;
404   Mat_HYPRE  *hmat    = (Mat_HYPRE *)mat->data;
405   MatType     matType = MATAIJ; /* default type of cooMat */
406 
407   PetscFunctionBegin;
408   /* Build an agent matrix cooMat with AIJ format
409      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
410    */
411   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
412   PetscCallMPI(MPI_Comm_size(comm, &size));
413   PetscCall(PetscLayoutSetUp(mat->rmap));
414   PetscCall(PetscLayoutSetUp(mat->cmap));
415   PetscCall(MatGetLayouts(mat, &rmap, &cmap));
416 
417 #if defined(PETSC_HAVE_HYPRE_DEVICE)
418   if (!mat->boundtocpu) { /* mat will be on device, so will cooMat */
419   #if defined(HYPRE_USING_HIP)
420     matType = MATAIJHIPSPARSE;
421   #elif defined(HYPRE_USING_CUDA)
422     matType = MATAIJCUSPARSE;
423   #else
424     SETERRQ(comm, PETSC_ERR_SUP, "Do not know the HYPRE device");
425   #endif
426   }
427 #endif
428 
429   /* Do COO preallocation through cooMat */
430   PetscCall(MatHYPRE_DestroyCOOMat(mat));
431   PetscCall(MatCreate(comm, &hmat->cooMat));
432   PetscCall(MatSetType(hmat->cooMat, matType));
433   PetscCall(MatSetLayouts(hmat->cooMat, rmap, cmap));
434 
435   /* allocate local matrices if needed */
436   PetscCall(MatMPIAIJSetPreallocation(hmat->cooMat, 0, NULL, 0, NULL));
437   PetscFunctionReturn(PETSC_SUCCESS);
438 }
439 
440 /* Attach cooMat data array to hypre matrix.
441    When AIJCUPMSPARSE will support raw device pointers and not THRUSTARRAY
442    we should swap the arrays: i.e., attach hypre matrix array to cooMat
443    This is because hypre should be in charge of handling the memory,
444    cooMat is only a way to reuse PETSc COO code.
445    attaching the memory will then be done at MatSetValuesCOO time and it will dynamically
446    support hypre matrix migrating to host.
447 */
448 static PetscErrorCode MatHYPRE_AttachCOOMat(Mat mat)
449 {
450   Mat_HYPRE           *hmat = (Mat_HYPRE *)mat->data;
451   hypre_CSRMatrix     *diag, *offd;
452   hypre_ParCSRMatrix  *parCSR;
453   HYPRE_MemoryLocation hmem = HYPRE_MEMORY_HOST;
454   PetscMemType         pmem;
455   Mat                  A, B;
456   PetscScalar         *a;
457   PetscMPIInt          size;
458   MPI_Comm             comm;
459 
460   PetscFunctionBegin;
461   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
462   if (hmat->cooMatAttached) PetscFunctionReturn(PETSC_SUCCESS);
463   PetscCheck(hmat->cooMat->preallocated, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix is not preallocated");
464   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
465   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
466   PetscCallMPI(MPI_Comm_size(comm, &size));
467 
468   /* Alias cooMat's data array to IJMatrix's */
469   PetscCallExternal(HYPRE_IJMatrixGetObject, hmat->ij, (void **)&parCSR);
470   diag = hypre_ParCSRMatrixDiag(parCSR);
471   offd = hypre_ParCSRMatrixOffd(parCSR);
472 
473   A = (size == 1) ? hmat->cooMat : ((Mat_MPIAIJ *)hmat->cooMat->data)->A;
474   B = (size == 1) ? NULL : ((Mat_MPIAIJ *)hmat->cooMat->data)->B;
475 
476   PetscCall(PetscObjectSetName((PetscObject)A, "_internal_COO_mat_for_hypre"));
477   hmem = hypre_CSRMatrixMemoryLocation(diag);
478   PetscCall(MatSeqAIJGetCSRAndMemType(A, NULL, NULL, &a, &pmem));
479   PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
480   PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(diag), hmem));
481   hypre_CSRMatrixData(diag)     = (HYPRE_Complex *)a;
482   hypre_CSRMatrixOwnsData(diag) = 0; /* Take ownership of (j,a) away from hypre. As a result, we need to free them on our own */
483 
484   if (B) {
485     hmem = hypre_CSRMatrixMemoryLocation(offd);
486     PetscCall(MatSeqAIJGetCSRAndMemType(B, NULL, NULL, &a, &pmem));
487     PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
488     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(offd), hmem));
489     hypre_CSRMatrixData(offd)     = (HYPRE_Complex *)a;
490     hypre_CSRMatrixOwnsData(offd) = 0;
491   }
492   hmat->cooMatAttached = PETSC_TRUE;
493   PetscFunctionReturn(PETSC_SUCCESS);
494 }
495 
496 // Build COO's coordinate list i[], j[] based on CSR's i[], j[] arrays and the number of local rows 'n'
497 static PetscErrorCode CSRtoCOO_Private(PetscInt n, const PetscInt ii[], const PetscInt jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
498 {
499   PetscInt *cooi, *cooj;
500 
501   PetscFunctionBegin;
502   *ncoo = ii[n];
503   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
504   for (PetscInt i = 0; i < n; i++) {
505     for (PetscInt j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
506   }
507   PetscCall(PetscArraycpy(cooj, jj, *ncoo));
508   *coo_i = cooi;
509   *coo_j = cooj;
510   PetscFunctionReturn(PETSC_SUCCESS);
511 }
512 
513 // Similar to CSRtoCOO_Private, but the CSR's i[], j[] are of type HYPRE_Int
514 static PetscErrorCode CSRtoCOO_HYPRE_Int_Private(PetscInt n, const HYPRE_Int ii[], const HYPRE_Int jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
515 {
516   PetscInt *cooi, *cooj;
517 
518   PetscFunctionBegin;
519   *ncoo = ii[n];
520   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
521   for (PetscInt i = 0; i < n; i++) {
522     for (HYPRE_Int j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
523   }
524   for (PetscCount i = 0; i < *ncoo; i++) cooj[i] = jj[i];
525   *coo_i = cooi;
526   *coo_j = cooj;
527   PetscFunctionReturn(PETSC_SUCCESS);
528 }
529 
530 // Build a COO data structure for the seqaij matrix, as if the nonzeros are laid out in the same order as in the CSR
531 static PetscErrorCode MatSeqAIJGetCOO_Private(Mat A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
532 {
533   PetscInt        n;
534   const PetscInt *ii, *jj;
535   PetscBool       done;
536 
537   PetscFunctionBegin;
538   PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
539   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatGetRowIJ");
540   PetscCall(CSRtoCOO_Private(n, ii, jj, ncoo, coo_i, coo_j));
541   PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
542   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatRestoreRowIJ");
543   PetscFunctionReturn(PETSC_SUCCESS);
544 }
545 
546 // Build a COO data structure for the hypreCSRMatrix, as if the nonzeros are laid out in the same order as in the hypreCSRMatrix
547 static PetscErrorCode hypreCSRMatrixGetCOO_Private(hypre_CSRMatrix *A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
548 {
549   PetscInt             n = hypre_CSRMatrixNumRows(A);
550   HYPRE_Int           *ii, *jj;
551   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
552 
553   PetscFunctionBegin;
554 #if defined(PETSC_HAVE_HYPRE_DEVICE)
555   mem = hypre_CSRMatrixMemoryLocation(A);
556   if (mem != HYPRE_MEMORY_HOST) {
557     PetscCount nnz = hypre_CSRMatrixNumNonzeros(A);
558     PetscCall(PetscMalloc2(n + 1, &ii, nnz, &jj));
559     hypre_TMemcpy(ii, hypre_CSRMatrixI(A), HYPRE_Int, n + 1, HYPRE_MEMORY_HOST, mem);
560     hypre_TMemcpy(jj, hypre_CSRMatrixJ(A), HYPRE_Int, nnz, HYPRE_MEMORY_HOST, mem);
561   } else {
562 #else
563   {
564 #endif
565     ii = hypre_CSRMatrixI(A);
566     jj = hypre_CSRMatrixJ(A);
567   }
568   PetscCall(CSRtoCOO_HYPRE_Int_Private(n, ii, jj, ncoo, coo_i, coo_j));
569   if (mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree2(ii, jj));
570   PetscFunctionReturn(PETSC_SUCCESS);
571 }
572 
573 static PetscErrorCode MatSetValuesCOOFromCSRMatrix_Private(Mat A, hypre_CSRMatrix *H)
574 {
575   PetscBool            iscpu = PETSC_TRUE;
576   PetscScalar         *a;
577   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
578 
579   PetscFunctionBegin;
580 #if defined(PETSC_HAVE_HYPRE_DEVICE)
581   mem = hypre_CSRMatrixMemoryLocation(H);
582   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &iscpu));
583 #endif
584   if (iscpu && mem != HYPRE_MEMORY_HOST) {
585     PetscCount nnz = hypre_CSRMatrixNumNonzeros(H);
586     PetscCall(PetscMalloc1(nnz, &a));
587     hypre_TMemcpy(a, hypre_CSRMatrixData(H), PetscScalar, nnz, HYPRE_MEMORY_HOST, mem);
588   } else {
589     a = (PetscScalar *)hypre_CSRMatrixData(H);
590   }
591   PetscCall(MatSetValuesCOO(A, a, INSERT_VALUES));
592   if (iscpu && mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree(a));
593   PetscFunctionReturn(PETSC_SUCCESS);
594 }
595 
596 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
597 {
598   MPI_Comm   comm = PetscObjectComm((PetscObject)A);
599   Mat        M = NULL, dH = NULL, oH = NULL, dA = NULL, oA = NULL;
600   PetscBool  ismpiaij, issbaij, isbaij;
601   Mat_HYPRE *hA;
602 
603   PetscFunctionBegin;
604   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &issbaij, MATSEQSBAIJ, MATMPIBAIJ, ""));
605   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &isbaij, MATSEQBAIJ, MATMPIBAIJ, ""));
606   if (isbaij || issbaij) { /* handle BAIJ and SBAIJ */
607     PetscBool ismpi;
608     MatType   newtype;
609 
610     PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &ismpi, MATMPISBAIJ, MATMPIBAIJ, ""));
611     newtype = ismpi ? MATMPIAIJ : MATSEQAIJ;
612     if (reuse == MAT_REUSE_MATRIX) {
613       PetscCall(MatConvert(*B, newtype, MAT_INPLACE_MATRIX, B));
614       PetscCall(MatConvert(A, newtype, MAT_REUSE_MATRIX, B));
615       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
616     } else if (reuse == MAT_INITIAL_MATRIX) {
617       PetscCall(MatConvert(A, newtype, MAT_INITIAL_MATRIX, B));
618       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
619     } else {
620       PetscCall(MatConvert(A, newtype, MAT_INPLACE_MATRIX, &A));
621       PetscCall(MatConvert(A, MATHYPRE, MAT_INPLACE_MATRIX, &A));
622     }
623     PetscFunctionReturn(PETSC_SUCCESS);
624   }
625 
626 #if defined(PETSC_HAVE_HYPRE_DEVICE)
627   {
628     PetscBool isaij;
629     // Hypre defaults to GPU when configured with GPU. We make it default to the memory location associated with the PETSc matrix,
630     // i.e., when A is a host matrix, Hypre will be on the host; otherwise, when A is of type aijcusparse, aijhipsarse, aijkokkos etc,
631     // Hypre will be on the device.
632     PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &isaij, MATSEQAIJ, MATMPIAIJ, ""));
633     PetscHYPREInitialize();
634     PetscCallExternal(HYPRE_SetMemoryLocation, isaij ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
635   }
636 #endif
637 
638   dA = A;
639   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
640   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(A, &dA, &oA, NULL));
641 
642   if (reuse != MAT_REUSE_MATRIX) {
643     PetscCount coo_n;
644     PetscInt  *coo_i, *coo_j;
645 
646     PetscCall(MatCreate(comm, &M));
647     PetscCall(MatSetType(M, MATHYPRE));
648     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
649     PetscCall(MatSetOption(M, MAT_SORTED_FULL, PETSC_TRUE));
650     PetscCall(MatSetOption(M, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
651 
652     hA = (Mat_HYPRE *)M->data;
653     PetscCall(MatHYPRE_CreateFromMat(A, hA));
654     PetscCall(MatHYPRE_IJMatrixCopyIJ(A, hA->ij));
655 
656     PetscCall(MatHYPRE_CreateCOOMat(M));
657 
658     dH = hA->cooMat;
659     PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
660     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));
661 
662     PetscCall(PetscObjectSetName((PetscObject)dH, "_internal_COO_mat_for_hypre"));
663     PetscCall(MatSeqAIJGetCOO_Private(dA, &coo_n, &coo_i, &coo_j));
664     PetscCall(MatSetPreallocationCOO(dH, coo_n, coo_i, coo_j));
665     PetscCall(PetscFree2(coo_i, coo_j));
666     if (oH) {
667       PetscCall(PetscLayoutDestroy(&oH->cmap));
668       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oH), oA->cmap->n, oA->cmap->n, 1, &oH->cmap));
669       PetscCall(MatSeqAIJGetCOO_Private(oA, &coo_n, &coo_i, &coo_j));
670       PetscCall(MatSetPreallocationCOO(oH, coo_n, coo_i, coo_j));
671       PetscCall(PetscFree2(coo_i, coo_j));
672     }
673     hA->cooMat->assembled = PETSC_TRUE;
674 
675     M->preallocated = PETSC_TRUE;
676     PetscCall(MatAssemblyBegin(M, MAT_FINAL_ASSEMBLY));
677     PetscCall(MatAssemblyEnd(M, MAT_FINAL_ASSEMBLY));
678 
679     PetscCall(MatHYPRE_AttachCOOMat(M));
680     if (reuse == MAT_INITIAL_MATRIX) *B = M;
681   } else M = *B;
682 
683   hA = (Mat_HYPRE *)M->data;
684   PetscCheck(hA->cooMat, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
685 
686   dH = hA->cooMat;
687   PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
688   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));
689 
690   PetscScalar *a;
691   PetscCall(MatSeqAIJGetCSRAndMemType(dA, NULL, NULL, &a, NULL));
692   PetscCall(MatSetValuesCOO(dH, a, INSERT_VALUES));
693   if (oH) {
694     PetscCall(MatSeqAIJGetCSRAndMemType(oA, NULL, NULL, &a, NULL));
695     PetscCall(MatSetValuesCOO(oH, a, INSERT_VALUES));
696   }
697 
698   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
699   PetscFunctionReturn(PETSC_SUCCESS);
700 }
701 
702 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
703 {
704   Mat                 M, dA = NULL, oA = NULL;
705   hypre_ParCSRMatrix *parcsr;
706   hypre_CSRMatrix    *dH, *oH;
707   MPI_Comm            comm;
708   PetscBool           ismpiaij, isseqaij;
709 
710   PetscFunctionBegin;
711   comm = PetscObjectComm((PetscObject)A);
712   if (reuse == MAT_REUSE_MATRIX) {
713     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATMPIAIJ, &ismpiaij));
714     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATSEQAIJ, &isseqaij));
715     PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Only MATMPIAIJ or MATSEQAIJ base types are supported");
716   }
717   PetscCall(MatHYPREGetParCSR(A, &parcsr));
718 #if defined(PETSC_HAVE_HYPRE_DEVICE)
719   if (HYPRE_MEMORY_DEVICE == hypre_ParCSRMatrixMemoryLocation(parcsr)) {
720     PetscBool isaij;
721 
722     PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
723     if (isaij) {
724       PetscMPIInt size;
725 
726       PetscCallMPI(MPI_Comm_size(comm, &size));
727   #if defined(HYPRE_USING_HIP)
728       mtype = size > 1 ? MATMPIAIJHIPSPARSE : MATSEQAIJHIPSPARSE;
729   #elif defined(HYPRE_USING_CUDA)
730       mtype = size > 1 ? MATMPIAIJCUSPARSE : MATSEQAIJCUSPARSE;
731   #else
732       mtype = size > 1 ? MATMPIAIJ : MATSEQAIJ;
733   #endif
734     }
735   }
736 #endif
737   dH = hypre_ParCSRMatrixDiag(parcsr);
738   oH = hypre_ParCSRMatrixOffd(parcsr);
739   if (reuse != MAT_REUSE_MATRIX) {
740     PetscCount coo_n;
741     PetscInt  *coo_i, *coo_j;
742 
743     PetscCall(MatCreate(comm, &M));
744     PetscCall(MatSetType(M, mtype));
745     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
746     PetscCall(MatMPIAIJSetPreallocation(M, 0, NULL, 0, NULL));
747 
748     dA = M;
749     PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
750     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));
751 
752     PetscCall(hypreCSRMatrixGetCOO_Private(dH, &coo_n, &coo_i, &coo_j));
753     PetscCall(MatSetPreallocationCOO(dA, coo_n, coo_i, coo_j));
754     PetscCall(PetscFree2(coo_i, coo_j));
755     if (ismpiaij) {
756       HYPRE_Int nc = hypre_CSRMatrixNumCols(oH);
757 
758       PetscCall(PetscLayoutDestroy(&oA->cmap));
759       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oA), nc, nc, 1, &oA->cmap));
760       PetscCall(hypreCSRMatrixGetCOO_Private(oH, &coo_n, &coo_i, &coo_j));
761       PetscCall(MatSetPreallocationCOO(oA, coo_n, coo_i, coo_j));
762       PetscCall(PetscFree2(coo_i, coo_j));
763 
764       /* garray */
765       Mat_MPIAIJ   *aij    = (Mat_MPIAIJ *)M->data;
766       HYPRE_BigInt *harray = hypre_ParCSRMatrixColMapOffd(parcsr);
767       PetscInt     *garray;
768 
769       PetscCall(PetscFree(aij->garray));
770       PetscCall(PetscMalloc1(nc, &garray));
771       for (HYPRE_Int i = 0; i < nc; i++) garray[i] = (PetscInt)harray[i];
772       aij->garray = garray;
773       PetscCall(MatSetUpMultiply_MPIAIJ(M));
774     }
775     if (reuse == MAT_INITIAL_MATRIX) *B = M;
776   } else M = *B;
777 
778   dA = M;
779   PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
780   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));
781   PetscCall(MatSetValuesCOOFromCSRMatrix_Private(dA, dH));
782   if (oA) PetscCall(MatSetValuesCOOFromCSRMatrix_Private(oA, oH));
783   M->assembled = PETSC_TRUE;
784   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
785   PetscFunctionReturn(PETSC_SUCCESS);
786 }
787 
788 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
789 {
790   hypre_ParCSRMatrix *tA;
791   hypre_CSRMatrix    *hdiag, *hoffd;
792   Mat_SeqAIJ         *diag, *offd;
793   PetscInt           *garray, i, noffd, dnnz, onnz, *row_starts, *col_starts;
794   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
795   PetscBool           ismpiaij, isseqaij;
796   PetscBool           sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
797   HYPRE_Int          *hdi = NULL, *hdj = NULL, *hoi = NULL, *hoj = NULL;
798   PetscInt           *pdi = NULL, *pdj = NULL, *poi = NULL, *poj = NULL;
799   PetscBool           iscuda, iship;
800 #if defined(PETSC_HAVE_DEVICE) && defined(PETSC_HAVE_HYPRE_DEVICE)
801   PetscBool boundtocpu = A->boundtocpu;
802 #else
803   PetscBool boundtocpu = PETSC_TRUE;
804 #endif
805 
806   PetscFunctionBegin;
807   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
808   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
809   PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Unsupported type %s", ((PetscObject)A)->type_name);
810   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJHIPSPARSE, MATMPIAIJCUSPARSE, ""));
811   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iship, MATSEQAIJCUSPARSE, MATMPIAIJHIPSPARSE, ""));
812   PetscHYPREInitialize();
813   if (ismpiaij) {
814     Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
815 
816     diag = (Mat_SeqAIJ *)a->A->data;
817     offd = (Mat_SeqAIJ *)a->B->data;
818     if (!boundtocpu && (iscuda || iship)) {
819 #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
820       if (iscuda) {
821         sameint = PETSC_TRUE;
822         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
823         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
824       }
825 #endif
826 #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
827       if (iship) {
828         sameint = PETSC_TRUE;
829         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
830         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
831       }
832 #endif
833     } else {
834       boundtocpu = PETSC_TRUE;
835       pdi        = diag->i;
836       pdj        = diag->j;
837       poi        = offd->i;
838       poj        = offd->j;
839       if (sameint) {
840         hdi = (HYPRE_Int *)pdi;
841         hdj = (HYPRE_Int *)pdj;
842         hoi = (HYPRE_Int *)poi;
843         hoj = (HYPRE_Int *)poj;
844       }
845     }
846     garray = a->garray;
847     noffd  = a->B->cmap->N;
848     dnnz   = diag->nz;
849     onnz   = offd->nz;
850   } else {
851     diag = (Mat_SeqAIJ *)A->data;
852     offd = NULL;
853     if (!boundtocpu && (iscuda || iship)) {
854 #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
855       if (iscuda) {
856         sameint = PETSC_TRUE;
857         PetscCall(MatSeqAIJCUSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
858       }
859 #endif
860 #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
861       if (iship) {
862         sameint = PETSC_TRUE;
863         PetscCall(MatSeqAIJHIPSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
864       }
865 #endif
866     } else {
867       boundtocpu = PETSC_TRUE;
868       pdi        = diag->i;
869       pdj        = diag->j;
870       if (sameint) {
871         hdi = (HYPRE_Int *)pdi;
872         hdj = (HYPRE_Int *)pdj;
873       }
874     }
875     garray = NULL;
876     noffd  = 0;
877     dnnz   = diag->nz;
878     onnz   = 0;
879   }
880 
881   /* create a temporary ParCSR */
882   if (HYPRE_AssumedPartitionCheck()) {
883     PetscMPIInt myid;
884 
885     PetscCallMPI(MPI_Comm_rank(comm, &myid));
886     row_starts = A->rmap->range + myid;
887     col_starts = A->cmap->range + myid;
888   } else {
889     row_starts = A->rmap->range;
890     col_starts = A->cmap->range;
891   }
892   tA = hypre_ParCSRMatrixCreate(comm, A->rmap->N, A->cmap->N, (HYPRE_BigInt *)row_starts, (HYPRE_BigInt *)col_starts, noffd, dnnz, onnz);
893 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
894   hypre_ParCSRMatrixSetRowStartsOwner(tA, 0);
895   hypre_ParCSRMatrixSetColStartsOwner(tA, 0);
896 #endif
897 
898   /* set diagonal part */
899   hdiag = hypre_ParCSRMatrixDiag(tA);
900   if (!sameint) { /* malloc CSR pointers */
901     PetscCall(PetscMalloc2(A->rmap->n + 1, &hdi, dnnz, &hdj));
902     for (i = 0; i < A->rmap->n + 1; i++) hdi[i] = (HYPRE_Int)pdi[i];
903     for (i = 0; i < dnnz; i++) hdj[i] = (HYPRE_Int)pdj[i];
904   }
905   hypre_CSRMatrixI(hdiag)           = hdi;
906   hypre_CSRMatrixJ(hdiag)           = hdj;
907   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex *)diag->a;
908   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
909   hypre_CSRMatrixSetRownnz(hdiag);
910   hypre_CSRMatrixSetDataOwner(hdiag, 0);
911 
912   /* set off-diagonal part */
913   hoffd = hypre_ParCSRMatrixOffd(tA);
914   if (offd) {
915     if (!sameint) { /* malloc CSR pointers */
916       PetscCall(PetscMalloc2(A->rmap->n + 1, &hoi, onnz, &hoj));
917       for (i = 0; i < A->rmap->n + 1; i++) hoi[i] = (HYPRE_Int)poi[i];
918       for (i = 0; i < onnz; i++) hoj[i] = (HYPRE_Int)poj[i];
919     }
920     hypre_CSRMatrixI(hoffd)           = hoi;
921     hypre_CSRMatrixJ(hoffd)           = hoj;
922     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex *)offd->a;
923     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
924     hypre_CSRMatrixSetRownnz(hoffd);
925     hypre_CSRMatrixSetDataOwner(hoffd, 0);
926   }
927 #if defined(PETSC_HAVE_HYPRE_DEVICE)
928   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, !boundtocpu ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST);
929 #else
930   #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
931   PetscCallExternal(hypre_ParCSRMatrixInitialize, tA);
932   #else
933   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, HYPRE_MEMORY_HOST);
934   #endif
935 #endif
936   hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA), HYPRE_MEMORY_HOST);
937   hypre_ParCSRMatrixSetNumNonzeros(tA);
938   hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt *)garray;
939   if (!hypre_ParCSRMatrixCommPkg(tA)) PetscCallExternal(hypre_MatvecCommPkgCreate, tA);
940   *hA = tA;
941   PetscFunctionReturn(PETSC_SUCCESS);
942 }
943 
944 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
945 {
946   hypre_CSRMatrix *hdiag, *hoffd;
947   PetscBool        ismpiaij, sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
948 #if defined(PETSC_HAVE_HYPRE_DEVICE)
949   PetscBool iscuda = PETSC_FALSE;
950 #endif
951 
952   PetscFunctionBegin;
953   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
954 #if defined(PETSC_HAVE_HYPRE_DEVICE)
955   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
956   if (iscuda) sameint = PETSC_TRUE;
957 #endif
958   hdiag = hypre_ParCSRMatrixDiag(*hA);
959   hoffd = hypre_ParCSRMatrixOffd(*hA);
960   /* free temporary memory allocated by PETSc
961      set pointers to NULL before destroying tA */
962   if (!sameint) {
963     HYPRE_Int *hi, *hj;
964 
965     hi = hypre_CSRMatrixI(hdiag);
966     hj = hypre_CSRMatrixJ(hdiag);
967     PetscCall(PetscFree2(hi, hj));
968     if (ismpiaij) {
969       hi = hypre_CSRMatrixI(hoffd);
970       hj = hypre_CSRMatrixJ(hoffd);
971       PetscCall(PetscFree2(hi, hj));
972     }
973   }
974   hypre_CSRMatrixI(hdiag)    = NULL;
975   hypre_CSRMatrixJ(hdiag)    = NULL;
976   hypre_CSRMatrixData(hdiag) = NULL;
977   if (ismpiaij) {
978     hypre_CSRMatrixI(hoffd)    = NULL;
979     hypre_CSRMatrixJ(hoffd)    = NULL;
980     hypre_CSRMatrixData(hoffd) = NULL;
981   }
982   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
983   hypre_ParCSRMatrixDestroy(*hA);
984   *hA = NULL;
985   PetscFunctionReturn(PETSC_SUCCESS);
986 }
987 
988 /* calls RAP from BoomerAMG:
989    the resulting ParCSR will not own the column and row starts
990    It looks like we don't need to have the diagonal entries ordered first */
991 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
992 {
993 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
994   HYPRE_Int P_owns_col_starts, R_owns_row_starts;
995 #endif
996 
997   PetscFunctionBegin;
998 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
999   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
1000   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
1001 #endif
1002   /* can be replaced by version test later */
1003 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1004   PetscStackPushExternal("hypre_ParCSRMatrixRAP");
1005   *hRAP = hypre_ParCSRMatrixRAP(hR, hA, hP);
1006   PetscStackPop;
1007 #else
1008   PetscCallExternal(hypre_BoomerAMGBuildCoarseOperator, hR, hA, hP, hRAP);
1009   PetscCallExternal(hypre_ParCSRMatrixSetNumNonzeros, *hRAP);
1010 #endif
1011   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
1012 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1013   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP, 0);
1014   hypre_ParCSRMatrixSetColStartsOwner(*hRAP, 0);
1015   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP, 1);
1016   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR, 1);
1017 #endif
1018   PetscFunctionReturn(PETSC_SUCCESS);
1019 }
1020 
1021 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat P, Mat C)
1022 {
1023   Mat                 B;
1024   hypre_ParCSRMatrix *hA, *hP, *hPtAP = NULL;
1025   Mat_Product        *product = C->product;
1026 
1027   PetscFunctionBegin;
1028   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1029   PetscCall(MatAIJGetParCSR_Private(P, &hP));
1030   PetscCall(MatHYPRE_ParCSR_RAP(hP, hA, hP, &hPtAP));
1031   PetscCall(MatCreateFromParCSR(hPtAP, MATAIJ, PETSC_OWN_POINTER, &B));
1032 
1033   PetscCall(MatHeaderMerge(C, &B));
1034   C->product = product;
1035 
1036   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1037   PetscCall(MatAIJRestoreParCSR_Private(P, &hP));
1038   PetscFunctionReturn(PETSC_SUCCESS);
1039 }
1040 
1041 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat P, PetscReal fill, Mat C)
1042 {
1043   PetscFunctionBegin;
1044   PetscCall(MatSetType(C, MATAIJ));
1045   C->ops->ptapnumeric    = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
1046   C->ops->productnumeric = MatProductNumeric_PtAP;
1047   PetscFunctionReturn(PETSC_SUCCESS);
1048 }
1049 
1050 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A, Mat P, Mat C)
1051 {
1052   Mat                 B;
1053   Mat_HYPRE          *hP;
1054   hypre_ParCSRMatrix *hA = NULL, *Pparcsr, *ptapparcsr = NULL;
1055   HYPRE_Int           type;
1056   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
1057   PetscBool           ishypre;
1058 
1059   PetscFunctionBegin;
1060   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1061   PetscCheck(ishypre, comm, PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1062   hP = (Mat_HYPRE *)P->data;
1063   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
1064   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1065   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);
1066 
1067   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1068   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, hA, Pparcsr, &ptapparcsr));
1069   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1070 
1071   /* create temporary matrix and merge to C */
1072   PetscCall(MatCreateFromParCSR(ptapparcsr, ((PetscObject)C)->type_name, PETSC_OWN_POINTER, &B));
1073   PetscCall(MatHeaderMerge(C, &B));
1074   PetscFunctionReturn(PETSC_SUCCESS);
1075 }
1076 
1077 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A, Mat P, Mat C)
1078 {
1079   Mat                 B;
1080   hypre_ParCSRMatrix *Aparcsr, *Pparcsr, *ptapparcsr = NULL;
1081   Mat_HYPRE          *hA, *hP;
1082   PetscBool           ishypre;
1083   HYPRE_Int           type;
1084 
1085   PetscFunctionBegin;
1086   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1087   PetscCheck(ishypre, PetscObjectComm((PetscObject)P), PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1088   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1089   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1090   hA = (Mat_HYPRE *)A->data;
1091   hP = (Mat_HYPRE *)P->data;
1092   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1093   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1094   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
1095   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1096   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1097   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);
1098   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, Aparcsr, Pparcsr, &ptapparcsr));
1099   PetscCall(MatCreateFromParCSR(ptapparcsr, MATHYPRE, PETSC_OWN_POINTER, &B));
1100   PetscCall(MatHeaderMerge(C, &B));
1101   PetscFunctionReturn(PETSC_SUCCESS);
1102 }
1103 
1104 /* calls hypre_ParMatmul
1105    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
1106    hypre_ParMatrixCreate does not duplicate the communicator
1107    It looks like we don't need to have the diagonal entries ordered first */
1108 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
1109 {
1110   PetscFunctionBegin;
1111   /* can be replaced by version test later */
1112 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1113   PetscStackPushExternal("hypre_ParCSRMatMat");
1114   *hAB = hypre_ParCSRMatMat(hA, hB);
1115 #else
1116   PetscStackPushExternal("hypre_ParMatmul");
1117   *hAB = hypre_ParMatmul(hA, hB);
1118 #endif
1119   PetscStackPop;
1120   PetscFunctionReturn(PETSC_SUCCESS);
1121 }
1122 
1123 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C)
1124 {
1125   Mat                 D;
1126   hypre_ParCSRMatrix *hA, *hB, *hAB = NULL;
1127   Mat_Product        *product = C->product;
1128 
1129   PetscFunctionBegin;
1130   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1131   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1132   PetscCall(MatHYPRE_ParCSR_MatMatMult(hA, hB, &hAB));
1133   PetscCall(MatCreateFromParCSR(hAB, MATAIJ, PETSC_OWN_POINTER, &D));
1134 
1135   PetscCall(MatHeaderMerge(C, &D));
1136   C->product = product;
1137 
1138   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1139   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1140   PetscFunctionReturn(PETSC_SUCCESS);
1141 }
1142 
1143 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat B, PetscReal fill, Mat C)
1144 {
1145   PetscFunctionBegin;
1146   PetscCall(MatSetType(C, MATAIJ));
1147   C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
1148   C->ops->productnumeric = MatProductNumeric_AB;
1149   PetscFunctionReturn(PETSC_SUCCESS);
1150 }
1151 
1152 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A, Mat B, Mat C)
1153 {
1154   Mat                 D;
1155   hypre_ParCSRMatrix *Aparcsr, *Bparcsr, *ABparcsr = NULL;
1156   Mat_HYPRE          *hA, *hB;
1157   PetscBool           ishypre;
1158   HYPRE_Int           type;
1159   Mat_Product        *product;
1160 
1161   PetscFunctionBegin;
1162   PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHYPRE, &ishypre));
1163   PetscCheck(ishypre, PetscObjectComm((PetscObject)B), PETSC_ERR_USER, "B should be of type %s", MATHYPRE);
1164   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1165   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1166   hA = (Mat_HYPRE *)A->data;
1167   hB = (Mat_HYPRE *)B->data;
1168   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1169   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1170   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hB->ij, &type);
1171   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1172   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1173   PetscCallExternal(HYPRE_IJMatrixGetObject, hB->ij, (void **)&Bparcsr);
1174   PetscCall(MatHYPRE_ParCSR_MatMatMult(Aparcsr, Bparcsr, &ABparcsr));
1175   PetscCall(MatCreateFromParCSR(ABparcsr, MATHYPRE, PETSC_OWN_POINTER, &D));
1176 
1177   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
1178   product    = C->product; /* save it from MatHeaderReplace() */
1179   C->product = NULL;
1180   PetscCall(MatHeaderReplace(C, &D));
1181   C->product             = product;
1182   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
1183   C->ops->productnumeric = MatProductNumeric_AB;
1184   PetscFunctionReturn(PETSC_SUCCESS);
1185 }
1186 
1187 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, Mat D)
1188 {
1189   Mat                 E;
1190   hypre_ParCSRMatrix *hA, *hB, *hC, *hABC = NULL;
1191 
1192   PetscFunctionBegin;
1193   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1194   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1195   PetscCall(MatAIJGetParCSR_Private(C, &hC));
1196   PetscCall(MatHYPRE_ParCSR_RAP(hA, hB, hC, &hABC));
1197   PetscCall(MatCreateFromParCSR(hABC, MATAIJ, PETSC_OWN_POINTER, &E));
1198   PetscCall(MatHeaderMerge(D, &E));
1199   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1200   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1201   PetscCall(MatAIJRestoreParCSR_Private(C, &hC));
1202   PetscFunctionReturn(PETSC_SUCCESS);
1203 }
1204 
1205 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, PetscReal fill, Mat D)
1206 {
1207   PetscFunctionBegin;
1208   PetscCall(MatSetType(D, MATAIJ));
1209   PetscFunctionReturn(PETSC_SUCCESS);
1210 }
1211 
1212 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C)
1213 {
1214   PetscFunctionBegin;
1215   C->ops->productnumeric = MatProductNumeric_AB;
1216   PetscFunctionReturn(PETSC_SUCCESS);
1217 }
1218 
1219 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C)
1220 {
1221   Mat_Product *product = C->product;
1222   PetscBool    Ahypre;
1223 
1224   PetscFunctionBegin;
1225   PetscCall(PetscObjectTypeCompare((PetscObject)product->A, MATHYPRE, &Ahypre));
1226   if (Ahypre) { /* A is a Hypre matrix */
1227     PetscCall(MatSetType(C, MATHYPRE));
1228     C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE;
1229     C->ops->matmultnumeric  = MatMatMultNumeric_HYPRE_HYPRE;
1230     PetscFunctionReturn(PETSC_SUCCESS);
1231   }
1232   PetscFunctionReturn(PETSC_SUCCESS);
1233 }
1234 
1235 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C)
1236 {
1237   PetscFunctionBegin;
1238   C->ops->productnumeric = MatProductNumeric_PtAP;
1239   PetscFunctionReturn(PETSC_SUCCESS);
1240 }
1241 
1242 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C)
1243 {
1244   Mat_Product *product = C->product;
1245   PetscBool    flg;
1246   PetscInt     type        = 0;
1247   const char  *outTypes[4] = {"aij", "seqaij", "mpiaij", "hypre"};
1248   PetscInt     ntype       = 4;
1249   Mat          A           = product->A;
1250   PetscBool    Ahypre;
1251 
1252   PetscFunctionBegin;
1253   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &Ahypre));
1254   if (Ahypre) { /* A is a Hypre matrix */
1255     PetscCall(MatSetType(C, MATHYPRE));
1256     C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1257     C->ops->ptapnumeric     = MatPtAPNumeric_HYPRE_HYPRE;
1258     PetscFunctionReturn(PETSC_SUCCESS);
1259   }
1260 
1261   /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */
1262   /* Get runtime option */
1263   if (product->api_user) {
1264     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatPtAP_HYPRE", "Mat");
1265     PetscCall(PetscOptionsEList("-matptap_hypre_outtype", "MatPtAP outtype", "MatPtAP outtype", outTypes, ntype, outTypes[type], &type, &flg));
1266     PetscOptionsEnd();
1267   } else {
1268     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatProduct_PtAP_HYPRE", "Mat");
1269     PetscCall(PetscOptionsEList("-mat_product_algorithm_hypre_outtype", "MatProduct_PtAP outtype", "MatProduct_PtAP", outTypes, ntype, outTypes[type], &type, &flg));
1270     PetscOptionsEnd();
1271   }
1272 
1273   if (type == 0 || type == 1 || type == 2) {
1274     PetscCall(MatSetType(C, MATAIJ));
1275   } else if (type == 3) {
1276     PetscCall(MatSetType(C, MATHYPRE));
1277   } else SETERRQ(PetscObjectComm((PetscObject)C), PETSC_ERR_SUP, "MatPtAP outtype is not supported");
1278   C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1279   C->ops->ptapnumeric     = MatPtAPNumeric_AIJ_HYPRE;
1280   PetscFunctionReturn(PETSC_SUCCESS);
1281 }
1282 
1283 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C)
1284 {
1285   Mat_Product *product = C->product;
1286 
1287   PetscFunctionBegin;
1288   switch (product->type) {
1289   case MATPRODUCT_AB:
1290     PetscCall(MatProductSetFromOptions_HYPRE_AB(C));
1291     break;
1292   case MATPRODUCT_PtAP:
1293     PetscCall(MatProductSetFromOptions_HYPRE_PtAP(C));
1294     break;
1295   default:
1296     break;
1297   }
1298   PetscFunctionReturn(PETSC_SUCCESS);
1299 }
1300 
1301 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1302 {
1303   PetscFunctionBegin;
1304   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_TRUE));
1305   PetscFunctionReturn(PETSC_SUCCESS);
1306 }
1307 
1308 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1309 {
1310   PetscFunctionBegin;
1311   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_FALSE));
1312   PetscFunctionReturn(PETSC_SUCCESS);
1313 }
1314 
1315 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1316 {
1317   PetscFunctionBegin;
1318   if (y != z) PetscCall(VecCopy(y, z));
1319   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_FALSE));
1320   PetscFunctionReturn(PETSC_SUCCESS);
1321 }
1322 
1323 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1324 {
1325   PetscFunctionBegin;
1326   if (y != z) PetscCall(VecCopy(y, z));
1327   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_TRUE));
1328   PetscFunctionReturn(PETSC_SUCCESS);
1329 }
1330 
1331 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1332 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1333 {
1334   Mat_HYPRE          *hA = (Mat_HYPRE *)A->data;
1335   hypre_ParCSRMatrix *parcsr;
1336   hypre_ParVector    *hx, *hy;
1337 
1338   PetscFunctionBegin;
1339   if (trans) {
1340     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->b, x));
1341     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->x, y));
1342     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->x, y));
1343     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hx);
1344     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hy);
1345   } else {
1346     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->x, x));
1347     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->b, y));
1348     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->b, y));
1349     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hx);
1350     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hy);
1351   }
1352   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1353   if (trans) {
1354     PetscCallExternal(hypre_ParCSRMatrixMatvecT, a, parcsr, hx, b, hy);
1355   } else {
1356     PetscCallExternal(hypre_ParCSRMatrixMatvec, a, parcsr, hx, b, hy);
1357   }
1358   PetscCall(VecHYPRE_IJVectorPopVec(hA->x));
1359   PetscCall(VecHYPRE_IJVectorPopVec(hA->b));
1360   PetscFunctionReturn(PETSC_SUCCESS);
1361 }
1362 
1363 static PetscErrorCode MatDestroy_HYPRE(Mat A)
1364 {
1365   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1366 
1367   PetscFunctionBegin;
1368   PetscCall(VecHYPRE_IJVectorDestroy(&hA->x));
1369   PetscCall(VecHYPRE_IJVectorDestroy(&hA->b));
1370   PetscCall(MatHYPRE_DestroyCOOMat(A)); /* must be called before destroying the individual CSR */
1371   if (hA->ij) {
1372     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1373     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
1374   }
1375   if (hA->comm) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)A), &hA->comm));
1376 
1377   PetscCall(MatStashDestroy_Private(&A->stash));
1378   PetscCall(PetscFree(hA->array));
1379   if (hA->rows_d) PetscStackCallExternalVoid("hypre_Free", hypre_Free(hA->rows_d, HYPRE_MEMORY_DEVICE));
1380 
1381   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_aij_C", NULL));
1382   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_is_C", NULL));
1383   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_hypre_C", NULL));
1384   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaij_hypre_C", NULL));
1385   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", NULL));
1386   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", NULL));
1387   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijcusparse_hypre_C", NULL));
1388   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", NULL));
1389   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPRESetPreallocation_C", NULL));
1390   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPREGetParCSR_C", NULL));
1391   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL));
1392   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL));
1393   PetscCall(PetscFree(A->data));
1394   PetscFunctionReturn(PETSC_SUCCESS);
1395 }
1396 
1397 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1398 {
1399   PetscFunctionBegin;
1400   if (!A->preallocated) PetscCall(MatHYPRESetPreallocation(A, PETSC_DEFAULT, NULL, PETSC_DEFAULT, NULL));
1401   PetscFunctionReturn(PETSC_SUCCESS);
1402 }
1403 
1404 //TODO FIX hypre_CSRMatrixMatvecOutOfPlace
1405 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1406 static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind)
1407 {
1408   Mat_HYPRE           *hA   = (Mat_HYPRE *)A->data;
1409   HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE;
1410 
1411   PetscFunctionBegin;
1412   A->boundtocpu = bind;
1413   if (hA->ij && hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) {
1414     hypre_ParCSRMatrix *parcsr;
1415     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1416     PetscCallExternal(hypre_ParCSRMatrixMigrate, parcsr, hmem);
1417   }
1418   if (hA->x) PetscCall(VecHYPRE_IJBindToCPU(hA->x, bind));
1419   if (hA->b) PetscCall(VecHYPRE_IJBindToCPU(hA->b, bind));
1420   PetscFunctionReturn(PETSC_SUCCESS);
1421 }
1422 #endif
1423 
1424 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1425 {
1426   Mat_HYPRE   *hA = (Mat_HYPRE *)A->data;
1427   PetscMPIInt  n;
1428   PetscInt     i, j, rstart, ncols, flg;
1429   PetscInt    *row, *col;
1430   PetscScalar *val;
1431 
1432   PetscFunctionBegin;
1433   PetscCheck(mode != MAT_FLUSH_ASSEMBLY, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1434 
1435   if (!A->nooffprocentries) {
1436     while (1) {
1437       PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
1438       if (!flg) break;
1439 
1440       for (i = 0; i < n;) {
1441         /* Now identify the consecutive vals belonging to the same row */
1442         for (j = i, rstart = row[j]; j < n; j++) {
1443           if (row[j] != rstart) break;
1444         }
1445         if (j < n) ncols = j - i;
1446         else ncols = n - i;
1447         /* Now assemble all these values with a single function call */
1448         PetscCall(MatSetValues_HYPRE(A, 1, row + i, ncols, col + i, val + i, A->insertmode));
1449 
1450         i = j;
1451       }
1452     }
1453     PetscCall(MatStashScatterEnd_Private(&A->stash));
1454   }
1455 
1456   PetscCallExternal(HYPRE_IJMatrixAssemble, hA->ij);
1457   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */
1458   /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */
1459   if (!A->sortedfull) {
1460     hypre_AuxParCSRMatrix *aux_matrix;
1461 
1462     /* call destroy just to make sure we do not leak anything */
1463     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1464     PetscCallExternal(hypre_AuxParCSRMatrixDestroy, aux_matrix);
1465     hypre_IJMatrixTranslator(hA->ij) = NULL;
1466 
1467     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1468     PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1469     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1470     if (aux_matrix) {
1471       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1472 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1473       PetscCallExternal(hypre_AuxParCSRMatrixInitialize, aux_matrix);
1474 #else
1475       PetscCallExternal(hypre_AuxParCSRMatrixInitialize_v2, aux_matrix, HYPRE_MEMORY_HOST);
1476 #endif
1477     }
1478   }
1479   {
1480     hypre_ParCSRMatrix *parcsr;
1481 
1482     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1483     if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscCallExternal(hypre_MatvecCommPkgCreate, parcsr);
1484   }
1485   if (!hA->x) PetscCall(VecHYPRE_IJVectorCreate(A->cmap, &hA->x));
1486   if (!hA->b) PetscCall(VecHYPRE_IJVectorCreate(A->rmap, &hA->b));
1487 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1488   PetscCall(MatBindToCPU_HYPRE(A, A->boundtocpu));
1489 #endif
1490   PetscFunctionReturn(PETSC_SUCCESS);
1491 }
1492 
1493 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1494 {
1495   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1496 
1497   PetscFunctionBegin;
1498   PetscCheck(hA->array_available, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Temporary space is in use");
1499 
1500   if (hA->array_size >= size) {
1501     *array = hA->array;
1502   } else {
1503     PetscCall(PetscFree(hA->array));
1504     hA->array_size = size;
1505     PetscCall(PetscMalloc(hA->array_size, &hA->array));
1506     *array = hA->array;
1507   }
1508 
1509   hA->array_available = PETSC_FALSE;
1510   PetscFunctionReturn(PETSC_SUCCESS);
1511 }
1512 
1513 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1514 {
1515   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1516 
1517   PetscFunctionBegin;
1518   *array              = NULL;
1519   hA->array_available = PETSC_TRUE;
1520   PetscFunctionReturn(PETSC_SUCCESS);
1521 }
1522 
1523 static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1524 {
1525   Mat_HYPRE     *hA   = (Mat_HYPRE *)A->data;
1526   PetscScalar   *vals = (PetscScalar *)v;
1527   HYPRE_Complex *sscr;
1528   PetscInt      *cscr[2];
1529   PetscInt       i, nzc;
1530   PetscInt       rst = A->rmap->rstart, ren = A->rmap->rend;
1531   void          *array = NULL;
1532 
1533   PetscFunctionBegin;
1534   PetscCall(MatGetArray_HYPRE(A, sizeof(PetscInt) * (2 * nc) + sizeof(HYPRE_Complex) * nc * nr, &array));
1535   cscr[0] = (PetscInt *)array;
1536   cscr[1] = ((PetscInt *)array) + nc;
1537   sscr    = (HYPRE_Complex *)(((PetscInt *)array) + nc * 2);
1538   for (i = 0, nzc = 0; i < nc; i++) {
1539     if (cols[i] >= 0) {
1540       cscr[0][nzc]   = cols[i];
1541       cscr[1][nzc++] = i;
1542     }
1543   }
1544   if (!nzc) {
1545     PetscCall(MatRestoreArray_HYPRE(A, &array));
1546     PetscFunctionReturn(PETSC_SUCCESS);
1547   }
1548 
1549 #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE)
1550   if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) {
1551     hypre_ParCSRMatrix *parcsr;
1552 
1553     PetscCallExternal(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr);
1554     PetscCallExternal(hypre_ParCSRMatrixMigrate,parcsr, HYPRE_MEMORY_HOST);
1555   }
1556 #endif
1557 
1558   if (ins == ADD_VALUES) {
1559     for (i = 0; i < nr; i++) {
1560       if (rows[i] >= 0) {
1561         PetscInt  j;
1562         HYPRE_Int hnc = (HYPRE_Int)nzc;
1563 
1564         if (!nzc) continue;
1565         /* nonlocal values */
1566         if (rows[i] < rst || rows[i] >= ren) {
1567           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1568           if (hA->donotstash) continue;
1569         }
1570         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1571         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1572         PetscCallExternal(HYPRE_IJMatrixAddToValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1573       }
1574       vals += nc;
1575     }
1576   } else { /* INSERT_VALUES */
1577     for (i = 0; i < nr; i++) {
1578       if (rows[i] >= 0) {
1579         PetscInt  j;
1580         HYPRE_Int hnc = (HYPRE_Int)nzc;
1581 
1582         if (!nzc) continue;
1583         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1584         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1585         /* nonlocal values */
1586         if (rows[i] < rst || rows[i] >= ren) {
1587           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1588           if (!hA->donotstash) PetscCall(MatStashValuesRow_Private(&A->stash, rows[i], nzc, cscr[0], (PetscScalar *)sscr, PETSC_FALSE));
1589         }
1590         /* local values */
1591         else
1592           PetscCallExternal(HYPRE_IJMatrixSetValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1593       }
1594       vals += nc;
1595     }
1596   }
1597 
1598   PetscCall(MatRestoreArray_HYPRE(A, &array));
1599   PetscFunctionReturn(PETSC_SUCCESS);
1600 }
1601 
1602 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1603 {
1604   Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
1605   HYPRE_Int  *hdnnz, *honnz;
1606   PetscInt    i, rs, re, cs, ce, bs;
1607   PetscMPIInt size;
1608 
1609   PetscFunctionBegin;
1610   PetscCall(PetscLayoutSetUp(A->rmap));
1611   PetscCall(PetscLayoutSetUp(A->cmap));
1612   rs = A->rmap->rstart;
1613   re = A->rmap->rend;
1614   cs = A->cmap->rstart;
1615   ce = A->cmap->rend;
1616   if (!hA->ij) {
1617     PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rs, re - 1, cs, ce - 1, &hA->ij);
1618     PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
1619   } else {
1620     HYPRE_BigInt hrs, hre, hcs, hce;
1621     PetscCallExternal(HYPRE_IJMatrixGetLocalRange, hA->ij, &hrs, &hre, &hcs, &hce);
1622     PetscCheck(hre - hrs + 1 == re - rs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local rows: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hrs, hre + 1, rs, re);
1623     PetscCheck(hce - hcs + 1 == ce - cs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local cols: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hcs, hce + 1, cs, ce);
1624   }
1625   PetscCall(MatHYPRE_DestroyCOOMat(A));
1626   PetscCall(MatGetBlockSize(A, &bs));
1627   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10 * bs;
1628   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10 * bs;
1629 
1630   if (!dnnz) {
1631     PetscCall(PetscMalloc1(A->rmap->n, &hdnnz));
1632     for (i = 0; i < A->rmap->n; i++) hdnnz[i] = dnz;
1633   } else {
1634     hdnnz = (HYPRE_Int *)dnnz;
1635   }
1636   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1637   if (size > 1) {
1638     hypre_AuxParCSRMatrix *aux_matrix;
1639     if (!onnz) {
1640       PetscCall(PetscMalloc1(A->rmap->n, &honnz));
1641       for (i = 0; i < A->rmap->n; i++) honnz[i] = onz;
1642     } else honnz = (HYPRE_Int *)onnz;
1643     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1644        they assume the user will input the entire row values, properly sorted
1645        In PETSc, we don't make such an assumption and set this flag to 1,
1646        unless the option MAT_SORTED_FULL is set to true.
1647        Also, to avoid possible memory leaks, we destroy and recreate the translator
1648        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1649        the IJ matrix for us */
1650     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1651     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1652     hypre_IJMatrixTranslator(hA->ij) = NULL;
1653     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, hA->ij, hdnnz, honnz);
1654     aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1655     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !A->sortedfull;
1656   } else {
1657     honnz = NULL;
1658     PetscCallExternal(HYPRE_IJMatrixSetRowSizes, hA->ij, hdnnz);
1659   }
1660 
1661   /* reset assembled flag and call the initialize method */
1662   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1663 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1664   PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1665 #else
1666   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, hA->ij, HYPRE_MEMORY_HOST);
1667 #endif
1668   if (!dnnz) PetscCall(PetscFree(hdnnz));
1669   if (!onnz && honnz) PetscCall(PetscFree(honnz));
1670   /* Match AIJ logic */
1671   A->preallocated = PETSC_TRUE;
1672   A->assembled    = PETSC_FALSE;
1673   PetscFunctionReturn(PETSC_SUCCESS);
1674 }
1675 
1676 /*@C
1677   MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1678 
1679   Collective
1680 
1681   Input Parameters:
1682 + A    - the matrix
1683 . dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1684           (same value is used for all local rows)
1685 . dnnz - array containing the number of nonzeros in the various rows of the
1686           DIAGONAL portion of the local submatrix (possibly different for each row)
1687           or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `d_nz` is used to specify the nonzero structure.
1688           The size of this array is equal to the number of local rows, i.e `m`.
1689           For matrices that will be factored, you must leave room for (and set)
1690           the diagonal entry even if it is zero.
1691 . onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1692           submatrix (same value is used for all local rows).
1693 - onnz - array containing the number of nonzeros in the various rows of the
1694           OFF-DIAGONAL portion of the local submatrix (possibly different for
1695           each row) or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `o_nz` is used to specify the nonzero
1696           structure. The size of this array is equal to the number
1697           of local rows, i.e `m`.
1698 
1699   Level: intermediate
1700 
1701   Note:
1702   If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, `onz` and `onnz` are ignored.
1703 
1704 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatMPIAIJSetPreallocation()`, `MATHYPRE`, `MATAIJ`
1705 @*/
1706 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1707 {
1708   PetscFunctionBegin;
1709   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1710   PetscValidType(A, 1);
1711   PetscTryMethod(A, "MatHYPRESetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (A, dnz, dnnz, onz, onnz));
1712   PetscFunctionReturn(PETSC_SUCCESS);
1713 }
1714 
1715 /*@C
1716   MatCreateFromParCSR - Creates a `Mat` from a `hypre_ParCSRMatrix`
1717 
1718   Collective
1719 
1720   Input Parameters:
1721 + parcsr   - the pointer to the `hypre_ParCSRMatrix`
1722 . mtype    - matrix type to be created. Currently `MATAIJ`, `MATIS` and `MATHYPRE` are supported.
1723 - copymode - PETSc copying options, see  `PetscCopyMode`
1724 
1725   Output Parameter:
1726 . A - the matrix
1727 
1728   Level: intermediate
1729 
1730 .seealso: [](ch_matrices), `Mat`, `MATHYPRE`, `PetscCopyMode`
1731 @*/
1732 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat *A)
1733 {
1734   Mat        T;
1735   Mat_HYPRE *hA;
1736   MPI_Comm   comm;
1737   PetscInt   rstart, rend, cstart, cend, M, N;
1738   PetscBool  isseqaij, isseqaijmkl, ismpiaij, isaij, ishyp, isis;
1739 
1740   PetscFunctionBegin;
1741   comm = hypre_ParCSRMatrixComm(parcsr);
1742   PetscCall(PetscStrcmp(mtype, MATSEQAIJ, &isseqaij));
1743   PetscCall(PetscStrcmp(mtype, MATSEQAIJMKL, &isseqaijmkl));
1744   PetscCall(PetscStrcmp(mtype, MATMPIAIJ, &ismpiaij));
1745   PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
1746   PetscCall(PetscStrcmp(mtype, MATHYPRE, &ishyp));
1747   PetscCall(PetscStrcmp(mtype, MATIS, &isis));
1748   isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1749   /* TODO */
1750   PetscCheck(isaij || ishyp || isis, comm, PETSC_ERR_SUP, "Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s", mtype, MATAIJ, MATSEQAIJ, MATSEQAIJMKL, MATMPIAIJ, MATIS, MATHYPRE);
1751   /* access ParCSRMatrix */
1752   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1753   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1754   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1755   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1756   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1757   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1758 
1759   /* create PETSc matrix with MatHYPRE */
1760   PetscCall(MatCreate(comm, &T));
1761   PetscCall(MatSetSizes(T, PetscMax(rend - rstart + 1, 0), PetscMax(cend - cstart + 1, 0), M, N));
1762   PetscCall(MatSetType(T, MATHYPRE));
1763   hA = (Mat_HYPRE *)T->data;
1764 
1765   /* create HYPRE_IJMatrix */
1766   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend, cstart, cend, &hA->ij);
1767   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
1768 
1769   /* create new ParCSR object if needed */
1770   if (ishyp && copymode == PETSC_COPY_VALUES) {
1771     hypre_ParCSRMatrix *new_parcsr;
1772 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
1773     hypre_CSRMatrix *hdiag, *hoffd, *ndiag, *noffd;
1774 
1775     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
1776     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1777     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1778     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1779     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1780     PetscCall(PetscArraycpy(hypre_CSRMatrixData(ndiag), hypre_CSRMatrixData(hdiag), hypre_CSRMatrixNumNonzeros(hdiag)));
1781     PetscCall(PetscArraycpy(hypre_CSRMatrixData(noffd), hypre_CSRMatrixData(hoffd), hypre_CSRMatrixNumNonzeros(hoffd)));
1782 #else
1783     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 1);
1784 #endif
1785     parcsr   = new_parcsr;
1786     copymode = PETSC_OWN_POINTER;
1787   }
1788 
1789   /* set ParCSR object */
1790   hypre_IJMatrixObject(hA->ij) = parcsr;
1791   T->preallocated              = PETSC_TRUE;
1792 
1793   /* set assembled flag */
1794   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1795 #if 0
1796   PetscCallExternal(HYPRE_IJMatrixInitialize,hA->ij);
1797 #endif
1798   if (ishyp) {
1799     PetscMPIInt myid = 0;
1800 
1801     /* make sure we always have row_starts and col_starts available */
1802     if (HYPRE_AssumedPartitionCheck()) PetscCallMPI(MPI_Comm_rank(comm, &myid));
1803 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1804     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1805       PetscLayout map;
1806 
1807       PetscCall(MatGetLayouts(T, NULL, &map));
1808       PetscCall(PetscLayoutSetUp(map));
1809       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1810     }
1811     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1812       PetscLayout map;
1813 
1814       PetscCall(MatGetLayouts(T, &map, NULL));
1815       PetscCall(PetscLayoutSetUp(map));
1816       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1817     }
1818 #endif
1819     /* prevent from freeing the pointer */
1820     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1821     *A = T;
1822     PetscCall(MatSetOption(*A, MAT_SORTED_FULL, PETSC_TRUE));
1823     PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY));
1824     PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY));
1825   } else if (isaij) {
1826     if (copymode != PETSC_OWN_POINTER) {
1827       /* prevent from freeing the pointer */
1828       hA->inner_free = PETSC_FALSE;
1829       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INITIAL_MATRIX, A));
1830       PetscCall(MatDestroy(&T));
1831     } else { /* AIJ return type with PETSC_OWN_POINTER */
1832       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INPLACE_MATRIX, &T));
1833       *A = T;
1834     }
1835   } else if (isis) {
1836     PetscCall(MatConvert_HYPRE_IS(T, MATIS, MAT_INITIAL_MATRIX, A));
1837     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1838     PetscCall(MatDestroy(&T));
1839   }
1840   PetscFunctionReturn(PETSC_SUCCESS);
1841 }
1842 
1843 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1844 {
1845   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1846   HYPRE_Int  type;
1847 
1848   PetscFunctionBegin;
1849   PetscCheck(hA->ij, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE_IJMatrix not present");
1850   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1851   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1852   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)parcsr);
1853   PetscFunctionReturn(PETSC_SUCCESS);
1854 }
1855 
1856 /*@C
1857   MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1858 
1859   Not Collective, No Fortran Support
1860 
1861   Input Parameter:
1862 . A - the `MATHYPRE` object
1863 
1864   Output Parameter:
1865 . parcsr - the pointer to the `hypre_ParCSRMatrix`
1866 
1867   Level: intermediate
1868 
1869 .seealso: [](ch_matrices), `Mat`, `MATHYPRE`, `PetscCopyMode`
1870 @*/
1871 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1872 {
1873   PetscFunctionBegin;
1874   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1875   PetscValidType(A, 1);
1876   PetscUseMethod(A, "MatHYPREGetParCSR_C", (Mat, hypre_ParCSRMatrix **), (A, parcsr));
1877   PetscFunctionReturn(PETSC_SUCCESS);
1878 }
1879 
1880 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1881 {
1882   hypre_ParCSRMatrix *parcsr;
1883   hypre_CSRMatrix    *ha;
1884   PetscInt            rst;
1885 
1886   PetscFunctionBegin;
1887   PetscCheck(A->rmap->n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not implemented with non-square diagonal blocks");
1888   PetscCall(MatGetOwnershipRange(A, &rst, NULL));
1889   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1890   if (missing) *missing = PETSC_FALSE;
1891   if (dd) *dd = -1;
1892   ha = hypre_ParCSRMatrixDiag(parcsr);
1893   if (ha) {
1894     PetscInt   size, i;
1895     HYPRE_Int *ii, *jj;
1896 
1897     size = hypre_CSRMatrixNumRows(ha);
1898     ii   = hypre_CSRMatrixI(ha);
1899     jj   = hypre_CSRMatrixJ(ha);
1900     for (i = 0; i < size; i++) {
1901       PetscInt  j;
1902       PetscBool found = PETSC_FALSE;
1903 
1904       for (j = ii[i]; j < ii[i + 1] && !found; j++) found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1905 
1906       if (!found) {
1907         PetscCall(PetscInfo(A, "Matrix is missing local diagonal entry %" PetscInt_FMT "\n", i));
1908         if (missing) *missing = PETSC_TRUE;
1909         if (dd) *dd = i + rst;
1910         PetscFunctionReturn(PETSC_SUCCESS);
1911       }
1912     }
1913     if (!size) {
1914       PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1915       if (missing) *missing = PETSC_TRUE;
1916       if (dd) *dd = rst;
1917     }
1918   } else {
1919     PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1920     if (missing) *missing = PETSC_TRUE;
1921     if (dd) *dd = rst;
1922   }
1923   PetscFunctionReturn(PETSC_SUCCESS);
1924 }
1925 
1926 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1927 {
1928   hypre_ParCSRMatrix *parcsr;
1929 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1930   hypre_CSRMatrix *ha;
1931 #endif
1932   HYPRE_Complex hs;
1933 
1934   PetscFunctionBegin;
1935   PetscCall(PetscHYPREScalarCast(s, &hs));
1936   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1937 #if PETSC_PKG_HYPRE_VERSION_GE(2, 19, 0)
1938   PetscCallExternal(hypre_ParCSRMatrixScale, parcsr, hs);
1939 #else /* diagonal part */
1940   ha = hypre_ParCSRMatrixDiag(parcsr);
1941   if (ha) {
1942     PetscInt       size, i;
1943     HYPRE_Int     *ii;
1944     HYPRE_Complex *a;
1945 
1946     size = hypre_CSRMatrixNumRows(ha);
1947     a    = hypre_CSRMatrixData(ha);
1948     ii   = hypre_CSRMatrixI(ha);
1949     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1950   }
1951   /* off-diagonal part */
1952   ha = hypre_ParCSRMatrixOffd(parcsr);
1953   if (ha) {
1954     PetscInt       size, i;
1955     HYPRE_Int     *ii;
1956     HYPRE_Complex *a;
1957 
1958     size = hypre_CSRMatrixNumRows(ha);
1959     a    = hypre_CSRMatrixData(ha);
1960     ii   = hypre_CSRMatrixI(ha);
1961     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1962   }
1963 #endif
1964   PetscFunctionReturn(PETSC_SUCCESS);
1965 }
1966 
1967 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1968 {
1969   hypre_ParCSRMatrix *parcsr;
1970   HYPRE_Int          *lrows;
1971   PetscInt            rst, ren, i;
1972 
1973   PetscFunctionBegin;
1974   PetscCheck(!x && !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "To be implemented");
1975   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1976   PetscCall(PetscMalloc1(numRows, &lrows));
1977   PetscCall(MatGetOwnershipRange(A, &rst, &ren));
1978   for (i = 0; i < numRows; i++) {
1979     PetscCheck(rows[i] >= rst && rows[i] < ren, PETSC_COMM_SELF, PETSC_ERR_SUP, "Non-local rows not yet supported");
1980     lrows[i] = rows[i] - rst;
1981   }
1982   PetscCallExternal(hypre_ParCSRMatrixEliminateRowsCols, parcsr, numRows, lrows);
1983   PetscCall(PetscFree(lrows));
1984   PetscFunctionReturn(PETSC_SUCCESS);
1985 }
1986 
1987 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1988 {
1989   PetscFunctionBegin;
1990   if (ha) {
1991     HYPRE_Int     *ii, size;
1992     HYPRE_Complex *a;
1993 
1994     size = hypre_CSRMatrixNumRows(ha);
1995     a    = hypre_CSRMatrixData(ha);
1996     ii   = hypre_CSRMatrixI(ha);
1997 
1998     if (a) PetscCall(PetscArrayzero(a, ii[size]));
1999   }
2000   PetscFunctionReturn(PETSC_SUCCESS);
2001 }
2002 
2003 static PetscErrorCode MatZeroEntries_HYPRE(Mat A)
2004 {
2005   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2006 
2007   PetscFunctionBegin;
2008   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) {
2009     PetscCallExternal(HYPRE_IJMatrixSetConstantValues, hA->ij, 0.0);
2010   } else {
2011     hypre_ParCSRMatrix *parcsr;
2012 
2013     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2014     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr)));
2015     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr)));
2016   }
2017   PetscFunctionReturn(PETSC_SUCCESS);
2018 }
2019 
2020 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA, PetscInt N, const PetscInt rows[], HYPRE_Complex diag)
2021 {
2022   PetscInt       ii;
2023   HYPRE_Int     *i, *j;
2024   HYPRE_Complex *a;
2025 
2026   PetscFunctionBegin;
2027   if (!hA) PetscFunctionReturn(PETSC_SUCCESS);
2028 
2029   i = hypre_CSRMatrixI(hA);
2030   j = hypre_CSRMatrixJ(hA);
2031   a = hypre_CSRMatrixData(hA);
2032 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2033   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hA)) {
2034   #if defined(HYPRE_USING_CUDA)
2035     MatZeroRows_CUDA(N, rows, i, j, a, diag);
2036   #elif defined(HYPRE_USING_HIP)
2037     MatZeroRows_HIP(N, rows, i, j, a, diag);
2038   #elif defined(PETSC_HAVE_KOKKOS)
2039     MatZeroRows_Kokkos(N, rows, i, j, a, diag);
2040   #else
2041     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for MatZeroRows on a hypre matrix in this memory location");
2042   #endif
2043   } else
2044 #endif
2045   {
2046     for (ii = 0; ii < N; ii++) {
2047       HYPRE_Int jj, ibeg, iend, irow;
2048 
2049       irow = rows[ii];
2050       ibeg = i[irow];
2051       iend = i[irow + 1];
2052       for (jj = ibeg; jj < iend; jj++)
2053         if (j[jj] == irow) a[jj] = diag;
2054         else a[jj] = 0.0;
2055     }
2056   }
2057   PetscFunctionReturn(PETSC_SUCCESS);
2058 }
2059 
2060 static PetscErrorCode MatZeroRows_HYPRE(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2061 {
2062   hypre_ParCSRMatrix *parcsr;
2063   PetscInt           *lrows, len, *lrows2;
2064   HYPRE_Complex       hdiag;
2065 
2066   PetscFunctionBegin;
2067   PetscCheck(!x && !b, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
2068   PetscCall(PetscHYPREScalarCast(diag, &hdiag));
2069   /* retrieve the internal matrix */
2070   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2071   /* get locally owned rows */
2072   PetscCall(MatZeroRowsMapLocal_Private(A, N, rows, &len, &lrows));
2073 
2074 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2075   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hypre_ParCSRMatrixDiag(parcsr))) {
2076     Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2077     PetscInt   m;
2078     PetscCall(MatGetLocalSize(A, &m, NULL));
2079     if (!hA->rows_d) {
2080       hA->rows_d = hypre_TAlloc(PetscInt, m, HYPRE_MEMORY_DEVICE);
2081       if (m) PetscCheck(hA->rows_d, PETSC_COMM_SELF, PETSC_ERR_MEM, "HYPRE_TAlloc failed");
2082     }
2083     PetscCheck(len <= m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Too many rows in rows[]");
2084     PetscStackCallExternalVoid("hypre_Memcpy", hypre_Memcpy(hA->rows_d, lrows, sizeof(PetscInt) * len, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST));
2085     lrows2 = hA->rows_d;
2086   } else
2087 #endif
2088   {
2089     lrows2 = lrows;
2090   }
2091 
2092   /* zero diagonal part */
2093   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr), len, lrows2, hdiag));
2094   /* zero off-diagonal part */
2095   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr), len, lrows2, 0.0));
2096 
2097   PetscCall(PetscFree(lrows));
2098   PetscFunctionReturn(PETSC_SUCCESS);
2099 }
2100 
2101 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat, MatAssemblyType mode)
2102 {
2103   PetscFunctionBegin;
2104   if (mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);
2105 
2106   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
2107   PetscFunctionReturn(PETSC_SUCCESS);
2108 }
2109 
2110 static PetscErrorCode MatGetRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2111 {
2112   hypre_ParCSRMatrix *parcsr;
2113   HYPRE_Int           hnz;
2114 
2115   PetscFunctionBegin;
2116   /* retrieve the internal matrix */
2117   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2118   /* call HYPRE API */
2119   PetscCallExternal(HYPRE_ParCSRMatrixGetRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2120   if (nz) *nz = (PetscInt)hnz;
2121   PetscFunctionReturn(PETSC_SUCCESS);
2122 }
2123 
2124 static PetscErrorCode MatRestoreRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2125 {
2126   hypre_ParCSRMatrix *parcsr;
2127   HYPRE_Int           hnz;
2128 
2129   PetscFunctionBegin;
2130   /* retrieve the internal matrix */
2131   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2132   /* call HYPRE API */
2133   hnz = nz ? (HYPRE_Int)(*nz) : 0;
2134   PetscCallExternal(HYPRE_ParCSRMatrixRestoreRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2135   PetscFunctionReturn(PETSC_SUCCESS);
2136 }
2137 
2138 static PetscErrorCode MatGetValues_HYPRE(Mat A, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2139 {
2140   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2141   PetscInt   i;
2142 
2143   PetscFunctionBegin;
2144   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2145   /* Ignore negative row indices
2146    * And negative column indices should be automatically ignored in hypre
2147    * */
2148   for (i = 0; i < m; i++) {
2149     if (idxm[i] >= 0) {
2150       HYPRE_Int hn = (HYPRE_Int)n;
2151       PetscCallExternal(HYPRE_IJMatrixGetValues, hA->ij, 1, &hn, (HYPRE_BigInt *)&idxm[i], (HYPRE_BigInt *)idxn, (HYPRE_Complex *)(v + i * n));
2152     }
2153   }
2154   PetscFunctionReturn(PETSC_SUCCESS);
2155 }
2156 
2157 static PetscErrorCode MatSetOption_HYPRE(Mat A, MatOption op, PetscBool flg)
2158 {
2159   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2160 
2161   PetscFunctionBegin;
2162   switch (op) {
2163   case MAT_NO_OFF_PROC_ENTRIES:
2164     if (flg) PetscCallExternal(HYPRE_IJMatrixSetMaxOffProcElmts, hA->ij, 0);
2165     break;
2166   case MAT_IGNORE_OFF_PROC_ENTRIES:
2167     hA->donotstash = flg;
2168     break;
2169   default:
2170     break;
2171   }
2172   PetscFunctionReturn(PETSC_SUCCESS);
2173 }
2174 
2175 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
2176 {
2177   PetscViewerFormat format;
2178 
2179   PetscFunctionBegin;
2180   PetscCall(PetscViewerGetFormat(view, &format));
2181   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(PETSC_SUCCESS);
2182   if (format != PETSC_VIEWER_NATIVE) {
2183     Mat                 B;
2184     hypre_ParCSRMatrix *parcsr;
2185     PetscErrorCode (*mview)(Mat, PetscViewer) = NULL;
2186 
2187     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2188     PetscCall(MatCreateFromParCSR(parcsr, MATAIJ, PETSC_USE_POINTER, &B));
2189     PetscCall(MatGetOperation(B, MATOP_VIEW, (void (**)(void))&mview));
2190     PetscCheck(mview, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing view operation");
2191     PetscCall((*mview)(B, view));
2192     PetscCall(MatDestroy(&B));
2193   } else {
2194     Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
2195     PetscMPIInt size;
2196     PetscBool   isascii;
2197     const char *filename;
2198 
2199     /* HYPRE uses only text files */
2200     PetscCall(PetscObjectTypeCompare((PetscObject)view, PETSCVIEWERASCII, &isascii));
2201     PetscCheck(isascii, PetscObjectComm((PetscObject)view), PETSC_ERR_SUP, "PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII", ((PetscObject)view)->type_name);
2202     PetscCall(PetscViewerFileGetName(view, &filename));
2203     PetscCallExternal(HYPRE_IJMatrixPrint, hA->ij, filename);
2204     PetscCallMPI(MPI_Comm_size(hA->comm, &size));
2205     if (size > 1) {
2206       PetscCall(PetscViewerASCIIPrintf(view, "Matrix files: %s.%05d ... %s.%05d\n", filename, 0, filename, size - 1));
2207     } else {
2208       PetscCall(PetscViewerASCIIPrintf(view, "Matrix file: %s.%05d\n", filename, 0));
2209     }
2210   }
2211   PetscFunctionReturn(PETSC_SUCCESS);
2212 }
2213 
2214 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
2215 {
2216   hypre_ParCSRMatrix *acsr, *bcsr;
2217 
2218   PetscFunctionBegin;
2219   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
2220     PetscCall(MatHYPREGetParCSR_HYPRE(A, &acsr));
2221     PetscCall(MatHYPREGetParCSR_HYPRE(B, &bcsr));
2222     PetscCallExternal(hypre_ParCSRMatrixCopy, acsr, bcsr, 1);
2223     PetscCall(MatSetOption(B, MAT_SORTED_FULL, PETSC_TRUE)); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
2224     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2225     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2226   } else {
2227     PetscCall(MatCopy_Basic(A, B, str));
2228   }
2229   PetscFunctionReturn(PETSC_SUCCESS);
2230 }
2231 
2232 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
2233 {
2234   hypre_ParCSRMatrix *parcsr;
2235   hypre_CSRMatrix    *dmat;
2236   HYPRE_Complex      *a;
2237   PetscBool           cong;
2238 
2239   PetscFunctionBegin;
2240   PetscCall(MatHasCongruentLayouts(A, &cong));
2241   PetscCheck(cong, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only for square matrices with same local distributions of rows and columns");
2242   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2243   dmat = hypre_ParCSRMatrixDiag(parcsr);
2244   if (dmat) {
2245 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2246     HYPRE_MemoryLocation mem = hypre_CSRMatrixMemoryLocation(dmat);
2247 #else
2248     HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
2249 #endif
2250 
2251     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecGetArrayWriteAndMemType(d, (PetscScalar **)&a, NULL));
2252     else PetscCall(VecGetArrayWrite(d, (PetscScalar **)&a));
2253     hypre_CSRMatrixExtractDiagonal(dmat, a, 0);
2254     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecRestoreArrayWriteAndMemType(d, (PetscScalar **)&a));
2255     else PetscCall(VecRestoreArrayWrite(d, (PetscScalar **)&a));
2256   }
2257   PetscFunctionReturn(PETSC_SUCCESS);
2258 }
2259 
2260 #include <petscblaslapack.h>
2261 
2262 static PetscErrorCode MatAXPY_HYPRE(Mat Y, PetscScalar a, Mat X, MatStructure str)
2263 {
2264   PetscFunctionBegin;
2265 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2266   {
2267     Mat                 B;
2268     hypre_ParCSRMatrix *x, *y, *z;
2269 
2270     PetscCall(MatHYPREGetParCSR(Y, &y));
2271     PetscCall(MatHYPREGetParCSR(X, &x));
2272     PetscCallExternal(hypre_ParCSRMatrixAdd, 1.0, y, 1.0, x, &z);
2273     PetscCall(MatCreateFromParCSR(z, MATHYPRE, PETSC_OWN_POINTER, &B));
2274     PetscCall(MatHeaderMerge(Y, &B));
2275   }
2276 #else
2277   if (str == SAME_NONZERO_PATTERN) {
2278     hypre_ParCSRMatrix *x, *y;
2279     hypre_CSRMatrix    *xloc, *yloc;
2280     PetscInt            xnnz, ynnz;
2281     HYPRE_Complex      *xarr, *yarr;
2282     PetscBLASInt        one = 1, bnz;
2283 
2284     PetscCall(MatHYPREGetParCSR(Y, &y));
2285     PetscCall(MatHYPREGetParCSR(X, &x));
2286 
2287     /* diagonal block */
2288     xloc = hypre_ParCSRMatrixDiag(x);
2289     yloc = hypre_ParCSRMatrixDiag(y);
2290     xnnz = 0;
2291     ynnz = 0;
2292     xarr = NULL;
2293     yarr = NULL;
2294     if (xloc) {
2295       xarr = hypre_CSRMatrixData(xloc);
2296       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2297     }
2298     if (yloc) {
2299       yarr = hypre_CSRMatrixData(yloc);
2300       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2301     }
2302     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2303     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2304     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2305 
2306     /* off-diagonal block */
2307     xloc = hypre_ParCSRMatrixOffd(x);
2308     yloc = hypre_ParCSRMatrixOffd(y);
2309     xnnz = 0;
2310     ynnz = 0;
2311     xarr = NULL;
2312     yarr = NULL;
2313     if (xloc) {
2314       xarr = hypre_CSRMatrixData(xloc);
2315       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2316     }
2317     if (yloc) {
2318       yarr = hypre_CSRMatrixData(yloc);
2319       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2320     }
2321     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in off-diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2322     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2323     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2324   } else if (str == SUBSET_NONZERO_PATTERN) {
2325     PetscCall(MatAXPY_Basic(Y, a, X, str));
2326   } else {
2327     Mat B;
2328 
2329     PetscCall(MatAXPY_Basic_Preallocate(Y, X, &B));
2330     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
2331     PetscCall(MatHeaderReplace(Y, &B));
2332   }
2333 #endif
2334   PetscFunctionReturn(PETSC_SUCCESS);
2335 }
2336 
2337 static PetscErrorCode MatDuplicate_HYPRE(Mat A, MatDuplicateOption op, Mat *B)
2338 {
2339   hypre_ParCSRMatrix *parcsr = NULL;
2340   PetscCopyMode       cpmode;
2341   Mat_HYPRE          *hA;
2342 
2343   PetscFunctionBegin;
2344   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2345   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
2346     parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
2347     cpmode = PETSC_OWN_POINTER;
2348   } else {
2349     cpmode = PETSC_COPY_VALUES;
2350   }
2351   PetscCall(MatCreateFromParCSR(parcsr, MATHYPRE, cpmode, B));
2352   hA = (Mat_HYPRE *)A->data;
2353   if (hA->cooMat) {
2354     Mat_HYPRE *hB = (Mat_HYPRE *)((*B)->data);
2355     op            = (op == MAT_DO_NOT_COPY_VALUES) ? op : MAT_COPY_VALUES;
2356     /* Cannot simply increase the reference count of hA->cooMat, since B needs to share cooMat's data array */
2357     PetscCall(MatDuplicate(hA->cooMat, op, &hB->cooMat));
2358     PetscCall(MatHYPRE_AttachCOOMat(*B));
2359   }
2360   PetscFunctionReturn(PETSC_SUCCESS);
2361 }
2362 
2363 static PetscErrorCode MatSetPreallocationCOO_HYPRE(Mat mat, PetscCount coo_n, PetscInt coo_i[], PetscInt coo_j[])
2364 {
2365   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;
2366 
2367   PetscFunctionBegin;
2368   /* Build an agent matrix cooMat with AIJ format
2369      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
2370    */
2371   PetscCall(MatHYPRE_CreateCOOMat(mat));
2372   PetscCall(MatSetOption(hmat->cooMat, MAT_IGNORE_OFF_PROC_ENTRIES, hmat->donotstash));
2373   PetscCall(MatSetOption(hmat->cooMat, MAT_NO_OFF_PROC_ENTRIES, mat->nooffprocentries));
2374 
2375   /* MatSetPreallocationCOO_SeqAIJ and MatSetPreallocationCOO_MPIAIJ uses this specific
2376      name to automatically put the diagonal entries first */
2377   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
2378   PetscCall(MatSetPreallocationCOO(hmat->cooMat, coo_n, coo_i, coo_j));
2379   hmat->cooMat->assembled = PETSC_TRUE;
2380 
2381   /* Copy the sparsity pattern from cooMat to hypre IJMatrix hmat->ij */
2382   PetscCall(MatSetOption(mat, MAT_SORTED_FULL, PETSC_TRUE));
2383   PetscCall(MatHYPRE_CreateFromMat(hmat->cooMat, hmat));      /* Create hmat->ij and preallocate it */
2384   PetscCall(MatHYPRE_IJMatrixCopyIJ(hmat->cooMat, hmat->ij)); /* Copy A's (i,j) to hmat->ij */
2385 
2386   mat->preallocated = PETSC_TRUE;
2387   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
2388   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY)); /* Migrate mat to device if it is bound to. Hypre builds its own SpMV context here */
2389 
2390   /* Attach cooMat to mat */
2391   PetscCall(MatHYPRE_AttachCOOMat(mat));
2392   PetscFunctionReturn(PETSC_SUCCESS);
2393 }
2394 
2395 static PetscErrorCode MatSetValuesCOO_HYPRE(Mat mat, const PetscScalar v[], InsertMode imode)
2396 {
2397   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;
2398 
2399   PetscFunctionBegin;
2400   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
2401   PetscCall(MatSetValuesCOO(hmat->cooMat, v, imode));
2402   PetscCall(MatViewFromOptions(hmat->cooMat, (PetscObject)mat, "-cooMat_view"));
2403   PetscFunctionReturn(PETSC_SUCCESS);
2404 }
2405 
2406 /*MC
2407    MATHYPRE - "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2408           based on the hypre IJ interface.
2409 
2410    Level: intermediate
2411 
2412 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatHYPRESetPreallocation`
2413 M*/
2414 
2415 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2416 {
2417   Mat_HYPRE *hB;
2418 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2419   HYPRE_MemoryLocation memory_location;
2420 #endif
2421 
2422   PetscFunctionBegin;
2423   PetscHYPREInitialize();
2424   PetscCall(PetscNew(&hB));
2425 
2426   hB->inner_free      = PETSC_TRUE;
2427   hB->array_available = PETSC_TRUE;
2428 
2429   B->data = (void *)hB;
2430 
2431   PetscCall(PetscMemzero(B->ops, sizeof(struct _MatOps)));
2432   B->ops->mult                  = MatMult_HYPRE;
2433   B->ops->multtranspose         = MatMultTranspose_HYPRE;
2434   B->ops->multadd               = MatMultAdd_HYPRE;
2435   B->ops->multtransposeadd      = MatMultTransposeAdd_HYPRE;
2436   B->ops->setup                 = MatSetUp_HYPRE;
2437   B->ops->destroy               = MatDestroy_HYPRE;
2438   B->ops->assemblyend           = MatAssemblyEnd_HYPRE;
2439   B->ops->assemblybegin         = MatAssemblyBegin_HYPRE;
2440   B->ops->setvalues             = MatSetValues_HYPRE;
2441   B->ops->missingdiagonal       = MatMissingDiagonal_HYPRE;
2442   B->ops->scale                 = MatScale_HYPRE;
2443   B->ops->zerorowscolumns       = MatZeroRowsColumns_HYPRE;
2444   B->ops->zeroentries           = MatZeroEntries_HYPRE;
2445   B->ops->zerorows              = MatZeroRows_HYPRE;
2446   B->ops->getrow                = MatGetRow_HYPRE;
2447   B->ops->restorerow            = MatRestoreRow_HYPRE;
2448   B->ops->getvalues             = MatGetValues_HYPRE;
2449   B->ops->setoption             = MatSetOption_HYPRE;
2450   B->ops->duplicate             = MatDuplicate_HYPRE;
2451   B->ops->copy                  = MatCopy_HYPRE;
2452   B->ops->view                  = MatView_HYPRE;
2453   B->ops->getdiagonal           = MatGetDiagonal_HYPRE;
2454   B->ops->axpy                  = MatAXPY_HYPRE;
2455   B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE;
2456 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2457   B->ops->bindtocpu = MatBindToCPU_HYPRE;
2458   /* Get hypre's default memory location. Users can control this using the corresponding HYPRE_SetMemoryLocation API */
2459   PetscCallExternal(HYPRE_GetMemoryLocation, &memory_location);
2460   B->boundtocpu = (memory_location == HYPRE_MEMORY_HOST) ? PETSC_TRUE : PETSC_FALSE;
2461 #endif
2462 
2463   /* build cache for off array entries formed */
2464   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));
2465 
2466   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)B), &hB->comm));
2467   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATHYPRE));
2468   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_aij_C", MatConvert_HYPRE_AIJ));
2469   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_is_C", MatConvert_HYPRE_IS));
2470   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_hypre_C", MatProductSetFromOptions_HYPRE));
2471   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaij_hypre_C", MatProductSetFromOptions_HYPRE));
2472   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPRESetPreallocation_C", MatHYPRESetPreallocation_HYPRE));
2473   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPREGetParCSR_C", MatHYPREGetParCSR_HYPRE));
2474   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_HYPRE));
2475   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetValuesCOO_C", MatSetValuesCOO_HYPRE));
2476 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2477   #if defined(HYPRE_USING_HIP)
2478   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2479   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2480   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2481   PetscCall(MatSetVecType(B, VECHIP));
2482   #endif
2483   #if defined(HYPRE_USING_CUDA)
2484   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2485   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2486   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2487   PetscCall(MatSetVecType(B, VECCUDA));
2488   #endif
2489 #endif
2490   PetscFunctionReturn(PETSC_SUCCESS);
2491 }
2492