xref: /petsc/src/mat/impls/hypre/mhypre.c (revision 503c0ea9b45bcfbcebbb1ea5341243bbc69f0bea)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 #include <petscpkg_version.h>
7 #include <petsc/private/petschypre.h>
8 #include <petscmathypre.h>
9 #include <petsc/private/matimpl.h>
10 #include <petsc/private/deviceimpl.h>
11 #include <../src/mat/impls/hypre/mhypre.h>
12 #include <../src/mat/impls/aij/mpi/mpiaij.h>
13 #include <../src/vec/vec/impls/hypre/vhyp.h>
14 #include <HYPRE.h>
15 #include <HYPRE_utilities.h>
16 #include <_hypre_parcsr_ls.h>
17 #include <_hypre_sstruct_ls.h>
18 
19 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0)
20 #define  hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A)
21 #endif
22 
23 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
24 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
25 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
27 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool);
28 static PetscErrorCode hypre_array_destroy(void*);
29 static PetscErrorCode MatSetValues_HYPRE(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);
30 
31 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
32 {
33   PetscInt       i,n_d,n_o;
34   const PetscInt *ia_d,*ia_o;
35   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
36   HYPRE_Int      *nnz_d=NULL,*nnz_o=NULL;
37 
38   PetscFunctionBegin;
39   if (A_d) { /* determine number of nonzero entries in local diagonal part */
40     PetscCall(MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d));
41     if (done_d) {
42       PetscCall(PetscMalloc1(n_d,&nnz_d));
43       for (i=0; i<n_d; i++) {
44         nnz_d[i] = ia_d[i+1] - ia_d[i];
45       }
46     }
47     PetscCall(MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d));
48   }
49   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
50     PetscCall(MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o));
51     if (done_o) {
52       PetscCall(PetscMalloc1(n_o,&nnz_o));
53       for (i=0; i<n_o; i++) {
54         nnz_o[i] = ia_o[i+1] - ia_o[i];
55       }
56     }
57     PetscCall(MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o));
58   }
59   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
60     if (!done_o) { /* only diagonal part */
61       PetscCall(PetscCalloc1(n_d,&nnz_o));
62     }
63 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
64     { /* If we don't do this, the columns of the matrix will be all zeros! */
65       hypre_AuxParCSRMatrix *aux_matrix;
66       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
67       hypre_AuxParCSRMatrixDestroy(aux_matrix);
68       hypre_IJMatrixTranslator(ij) = NULL;
69       PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,ij,nnz_d,nnz_o);
70       /* it seems they partially fixed it in 2.19.0 */
71 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
72       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
73       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
74 #endif
75     }
76 #else
77     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,ij,nnz_d,nnz_o);
78 #endif
79     PetscCall(PetscFree(nnz_d));
80     PetscCall(PetscFree(nnz_o));
81   }
82   PetscFunctionReturn(0);
83 }
84 
85 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
86 {
87   PetscInt       rstart,rend,cstart,cend;
88 
89   PetscFunctionBegin;
90   PetscCall(PetscLayoutSetUp(A->rmap));
91   PetscCall(PetscLayoutSetUp(A->cmap));
92   rstart = A->rmap->rstart;
93   rend   = A->rmap->rend;
94   cstart = A->cmap->rstart;
95   cend   = A->cmap->rend;
96   PetscStackCallStandard(HYPRE_IJMatrixCreate,hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij);
97   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,hA->ij,HYPRE_PARCSR);
98   {
99     PetscBool      same;
100     Mat            A_d,A_o;
101     const PetscInt *colmap;
102     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same));
103     if (same) {
104       PetscCall(MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap));
105       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij));
106       PetscFunctionReturn(0);
107     }
108     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same));
109     if (same) {
110       PetscCall(MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap));
111       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij));
112       PetscFunctionReturn(0);
113     }
114     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same));
115     if (same) {
116       PetscCall(MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij));
117       PetscFunctionReturn(0);
118     }
119     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same));
120     if (same) {
121       PetscCall(MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij));
122       PetscFunctionReturn(0);
123     }
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
129 {
130   PetscInt          i,rstart,rend,ncols,nr,nc;
131   const PetscScalar *values;
132   const PetscInt    *cols;
133   PetscBool         flg;
134 
135   PetscFunctionBegin;
136 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
137   PetscStackCallStandard(HYPRE_IJMatrixInitialize,ij);
138 #else
139   PetscStackCallStandard(HYPRE_IJMatrixInitialize_v2,ij,HYPRE_MEMORY_HOST);
140 #endif
141   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg));
142   PetscCall(MatGetSize(A,&nr,&nc));
143   if (flg && nr == nc) {
144     PetscCall(MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij));
145     PetscFunctionReturn(0);
146   }
147   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg));
148   if (flg) {
149     PetscCall(MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij));
150     PetscFunctionReturn(0);
151   }
152 
153   PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
154   for (i=rstart; i<rend; i++) {
155     PetscCall(MatGetRow(A,i,&ncols,&cols,&values));
156     if (ncols) {
157       HYPRE_Int nc = (HYPRE_Int)ncols;
158 
159       PetscCheckFalse((PetscInt)nc != ncols,PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT,ncols,i);
160       PetscStackCallStandard(HYPRE_IJMatrixSetValues,ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values);
161     }
162     PetscCall(MatRestoreRow(A,i,&ncols,&cols,&values));
163   }
164   PetscFunctionReturn(0);
165 }
166 
167 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
168 {
169   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
170   HYPRE_Int             type;
171   hypre_ParCSRMatrix    *par_matrix;
172   hypre_AuxParCSRMatrix *aux_matrix;
173   hypre_CSRMatrix       *hdiag;
174   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
175   const PetscScalar     *pa;
176 
177   PetscFunctionBegin;
178   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,ij,&type);
179   PetscCheckFalse(type != HYPRE_PARCSR,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
180   PetscStackCallStandard(HYPRE_IJMatrixGetObject,ij,(void**)&par_matrix);
181   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
182   /*
183        this is the Hack part where we monkey directly with the hypre datastructures
184   */
185   if (sameint) {
186     PetscCall(PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1));
187     PetscCall(PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz));
188   } else {
189     PetscInt i;
190 
191     for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
192     for (i=0;i<pdiag->nz;i++)      hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
193   }
194 
195   PetscCall(MatSeqAIJGetArrayRead(A,&pa));
196   PetscCall(PetscArraycpy(hdiag->data,pa,pdiag->nz));
197   PetscCall(MatSeqAIJRestoreArrayRead(A,&pa));
198 
199   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
200   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
201   PetscFunctionReturn(0);
202 }
203 
204 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
205 {
206   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
207   Mat_SeqAIJ            *pdiag,*poffd;
208   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
209   HYPRE_Int             *hjj,type;
210   hypre_ParCSRMatrix    *par_matrix;
211   hypre_AuxParCSRMatrix *aux_matrix;
212   hypre_CSRMatrix       *hdiag,*hoffd;
213   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
214   const PetscScalar     *pa;
215 
216   PetscFunctionBegin;
217   pdiag = (Mat_SeqAIJ*) pA->A->data;
218   poffd = (Mat_SeqAIJ*) pA->B->data;
219   /* cstart is only valid for square MPIAIJ layed out in the usual way */
220   PetscCall(MatGetOwnershipRange(A,&cstart,NULL));
221 
222   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,ij,&type);
223   PetscCheckFalse(type != HYPRE_PARCSR,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
224   PetscStackCallStandard(HYPRE_IJMatrixGetObject,ij,(void**)&par_matrix);
225   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
226   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
227 
228   /*
229        this is the Hack part where we monkey directly with the hypre datastructures
230   */
231   if (sameint) {
232     PetscCall(PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1));
233   } else {
234     for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]);
235   }
236   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
237   hjj = hdiag->j;
238   pjj = pdiag->j;
239 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
240   for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i];
241 #else
242   for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i];
243 #endif
244   PetscCall(MatSeqAIJGetArrayRead(pA->A,&pa));
245   PetscCall(PetscArraycpy(hdiag->data,pa,pdiag->nz));
246   PetscCall(MatSeqAIJRestoreArrayRead(pA->A,&pa));
247   if (sameint) {
248     PetscCall(PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1));
249   } else {
250     for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]);
251   }
252 
253   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
254      If we hacked a hypre a bit more we might be able to avoid this step */
255 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
256   PetscStackCallStandard(hypre_CSRMatrixBigInitialize,hoffd);
257   jj  = (PetscInt*) hoffd->big_j;
258 #else
259   jj  = (PetscInt*) hoffd->j;
260 #endif
261   pjj = poffd->j;
262   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
263 
264   PetscCall(MatSeqAIJGetArrayRead(pA->B,&pa));
265   PetscCall(PetscArraycpy(hoffd->data,pa,poffd->nz));
266   PetscCall(MatSeqAIJRestoreArrayRead(pA->B,&pa));
267 
268   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
269   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
270   PetscFunctionReturn(0);
271 }
272 
273 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
274 {
275   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
276   Mat                    lA;
277   ISLocalToGlobalMapping rl2g,cl2g;
278   IS                     is;
279   hypre_ParCSRMatrix     *hA;
280   hypre_CSRMatrix        *hdiag,*hoffd;
281   MPI_Comm               comm;
282   HYPRE_Complex          *hdd,*hod,*aa;
283   PetscScalar            *data;
284   HYPRE_BigInt           *col_map_offd;
285   HYPRE_Int              *hdi,*hdj,*hoi,*hoj;
286   PetscInt               *ii,*jj,*iptr,*jptr;
287   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
288   HYPRE_Int              type;
289 
290   PetscFunctionBegin;
291   comm = PetscObjectComm((PetscObject)A);
292   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,mhA->ij,&type);
293   PetscCheckFalse(type != HYPRE_PARCSR,comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
294   PetscStackCallStandard(HYPRE_IJMatrixGetObject,mhA->ij,(void**)&hA);
295   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
296   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
297   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
298   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
299   hdiag = hypre_ParCSRMatrixDiag(hA);
300   hoffd = hypre_ParCSRMatrixOffd(hA);
301   dr    = hypre_CSRMatrixNumRows(hdiag);
302   dc    = hypre_CSRMatrixNumCols(hdiag);
303   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
304   hdi   = hypre_CSRMatrixI(hdiag);
305   hdj   = hypre_CSRMatrixJ(hdiag);
306   hdd   = hypre_CSRMatrixData(hdiag);
307   oc    = hypre_CSRMatrixNumCols(hoffd);
308   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
309   hoi   = hypre_CSRMatrixI(hoffd);
310   hoj   = hypre_CSRMatrixJ(hoffd);
311   hod   = hypre_CSRMatrixData(hoffd);
312   if (reuse != MAT_REUSE_MATRIX) {
313     PetscInt *aux;
314 
315     /* generate l2g maps for rows and cols */
316     PetscCall(ISCreateStride(comm,dr,str,1,&is));
317     PetscCall(ISLocalToGlobalMappingCreateIS(is,&rl2g));
318     PetscCall(ISDestroy(&is));
319     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
320     PetscCall(PetscMalloc1(dc+oc,&aux));
321     for (i=0; i<dc; i++) aux[i] = i+stc;
322     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
323     PetscCall(ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is));
324     PetscCall(ISLocalToGlobalMappingCreateIS(is,&cl2g));
325     PetscCall(ISDestroy(&is));
326     /* create MATIS object */
327     PetscCall(MatCreate(comm,B));
328     PetscCall(MatSetSizes(*B,dr,dc,M,N));
329     PetscCall(MatSetType(*B,MATIS));
330     PetscCall(MatSetLocalToGlobalMapping(*B,rl2g,cl2g));
331     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
332     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
333 
334     /* allocate CSR for local matrix */
335     PetscCall(PetscMalloc1(dr+1,&iptr));
336     PetscCall(PetscMalloc1(nnz,&jptr));
337     PetscCall(PetscMalloc1(nnz,&data));
338   } else {
339     PetscInt  nr;
340     PetscBool done;
341     PetscCall(MatISGetLocalMat(*B,&lA));
342     PetscCall(MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done));
343     PetscCheckFalse(nr != dr,PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %" PetscInt_FMT " != %" PetscInt_FMT,nr,dr);
344     PetscCheckFalse(iptr[nr] < nnz,PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %" PetscInt_FMT " requested %" PetscInt_FMT,iptr[nr],nnz);
345     PetscCall(MatSeqAIJGetArray(lA,&data));
346   }
347   /* merge local matrices */
348   ii  = iptr;
349   jj  = jptr;
350   aa  = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
351   *ii = *(hdi++) + *(hoi++);
352   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
353     PetscScalar *aold = (PetscScalar*)aa;
354     PetscInt    *jold = jj,nc = jd+jo;
355     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
356     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
357     *(++ii) = *(hdi++) + *(hoi++);
358     PetscCall(PetscSortIntWithScalarArray(jd+jo-nc,jold,aold));
359   }
360   for (; cum<dr; cum++) *(++ii) = nnz;
361   if (reuse != MAT_REUSE_MATRIX) {
362     Mat_SeqAIJ* a;
363 
364     PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA));
365     PetscCall(MatISSetLocalMat(*B,lA));
366     /* hack SeqAIJ */
367     a          = (Mat_SeqAIJ*)(lA->data);
368     a->free_a  = PETSC_TRUE;
369     a->free_ij = PETSC_TRUE;
370     PetscCall(MatDestroy(&lA));
371   }
372   PetscCall(MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY));
373   PetscCall(MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY));
374   if (reuse == MAT_INPLACE_MATRIX) {
375     PetscCall(MatHeaderReplace(A,B));
376   }
377   PetscFunctionReturn(0);
378 }
379 
380 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
381 {
382   Mat            M = NULL;
383   Mat_HYPRE      *hB;
384   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
385 
386   PetscFunctionBegin;
387   if (reuse == MAT_REUSE_MATRIX) {
388     /* always destroy the old matrix and create a new memory;
389        hope this does not churn the memory too much. The problem
390        is I do not know if it is possible to put the matrix back to
391        its initial state so that we can directly copy the values
392        the second time through. */
393     hB = (Mat_HYPRE*)((*B)->data);
394     PetscStackCallStandard(HYPRE_IJMatrixDestroy,hB->ij);
395   } else {
396     PetscCall(MatCreate(comm,&M));
397     PetscCall(MatSetType(M,MATHYPRE));
398     PetscCall(MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N));
399     hB   = (Mat_HYPRE*)(M->data);
400     if (reuse == MAT_INITIAL_MATRIX) *B = M;
401   }
402   PetscCall(MatSetOption(*B,MAT_SORTED_FULL,PETSC_TRUE)); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
403   PetscCall(MatSetOption(*B,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE));
404   PetscCall(MatHYPRE_CreateFromMat(A,hB));
405   PetscCall(MatHYPRE_IJMatrixCopy(A,hB->ij));
406   if (reuse == MAT_INPLACE_MATRIX) {
407     PetscCall(MatHeaderReplace(A,&M));
408   }
409   (*B)->preallocated = PETSC_TRUE;
410   PetscCall(MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY));
411   PetscCall(MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY));
412   PetscFunctionReturn(0);
413 }
414 
415 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
416 {
417   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
418   hypre_ParCSRMatrix *parcsr;
419   hypre_CSRMatrix    *hdiag,*hoffd;
420   MPI_Comm           comm;
421   PetscScalar        *da,*oa,*aptr;
422   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
423   PetscInt           i,dnnz,onnz,m,n;
424   HYPRE_Int          type;
425   PetscMPIInt        size;
426   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
427 
428   PetscFunctionBegin;
429   comm = PetscObjectComm((PetscObject)A);
430   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,hA->ij,&type);
431   PetscCheckFalse(type != HYPRE_PARCSR,comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
432   if (reuse == MAT_REUSE_MATRIX) {
433     PetscBool ismpiaij,isseqaij;
434     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij));
435     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij));
436     PetscCheckFalse(!ismpiaij && !isseqaij,comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
437   }
438 #if defined(PETSC_HAVE_HYPRE_DEVICE)
439   PetscCheckFalse(HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij),comm,PETSC_ERR_SUP,"Not yet implemented");
440 #endif
441   PetscCallMPI(MPI_Comm_size(comm,&size));
442 
443   PetscStackCallStandard(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr);
444   hdiag = hypre_ParCSRMatrixDiag(parcsr);
445   hoffd = hypre_ParCSRMatrixOffd(parcsr);
446   m     = hypre_CSRMatrixNumRows(hdiag);
447   n     = hypre_CSRMatrixNumCols(hdiag);
448   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
449   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
450   if (reuse == MAT_INITIAL_MATRIX) {
451     PetscCall(PetscMalloc1(m+1,&dii));
452     PetscCall(PetscMalloc1(dnnz,&djj));
453     PetscCall(PetscMalloc1(dnnz,&da));
454   } else if (reuse == MAT_REUSE_MATRIX) {
455     PetscInt  nr;
456     PetscBool done;
457     if (size > 1) {
458       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
459 
460       PetscCall(MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done));
461       PetscCheckFalse(nr != m,PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %" PetscInt_FMT " != %" PetscInt_FMT,nr,m);
462       PetscCheckFalse(dii[nr] < dnnz,PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %" PetscInt_FMT " hypre %" PetscInt_FMT,dii[nr],dnnz);
463       PetscCall(MatSeqAIJGetArray(b->A,&da));
464     } else {
465       PetscCall(MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done));
466       PetscCheckFalse(nr != m,PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %" PetscInt_FMT " != %" PetscInt_FMT,nr,m);
467       PetscCheckFalse(dii[nr] < dnnz,PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %" PetscInt_FMT " hypre %" PetscInt_FMT,dii[nr],dnnz);
468       PetscCall(MatSeqAIJGetArray(*B,&da));
469     }
470   } else { /* MAT_INPLACE_MATRIX */
471     if (!sameint) {
472       PetscCall(PetscMalloc1(m+1,&dii));
473       PetscCall(PetscMalloc1(dnnz,&djj));
474     } else {
475       dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
476       djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
477     }
478     da = (PetscScalar*)hypre_CSRMatrixData(hdiag);
479   }
480 
481   if (!sameint) {
482     if (reuse != MAT_REUSE_MATRIX) { for (i=0;i<m+1;i++)  dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]); }
483     for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]);
484   } else {
485     if (reuse != MAT_REUSE_MATRIX) PetscCall(PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1));
486     PetscCall(PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz));
487   }
488   PetscCall(PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz));
489   iptr = djj;
490   aptr = da;
491   for (i=0; i<m; i++) {
492     PetscInt nc = dii[i+1]-dii[i];
493     PetscCall(PetscSortIntWithScalarArray(nc,iptr,aptr));
494     iptr += nc;
495     aptr += nc;
496   }
497   if (size > 1) {
498     HYPRE_BigInt *coffd;
499     HYPRE_Int    *offdj;
500 
501     if (reuse == MAT_INITIAL_MATRIX) {
502       PetscCall(PetscMalloc1(m+1,&oii));
503       PetscCall(PetscMalloc1(onnz,&ojj));
504       PetscCall(PetscMalloc1(onnz,&oa));
505     } else if (reuse == MAT_REUSE_MATRIX) {
506       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
507       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
508       PetscBool  done;
509 
510       PetscCall(MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done));
511       PetscCheckFalse(nr != hr,PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %" PetscInt_FMT " != %" PetscInt_FMT,nr,hr);
512       PetscCheckFalse(oii[nr] < onnz,PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %" PetscInt_FMT " hypre %" PetscInt_FMT,oii[nr],onnz);
513       PetscCall(MatSeqAIJGetArray(b->B,&oa));
514     } else { /* MAT_INPLACE_MATRIX */
515       if (!sameint) {
516         PetscCall(PetscMalloc1(m+1,&oii));
517         PetscCall(PetscMalloc1(onnz,&ojj));
518       } else {
519         oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
520         ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
521       }
522       oa = (PetscScalar*)hypre_CSRMatrixData(hoffd);
523     }
524     if (reuse != MAT_REUSE_MATRIX) {
525       if (!sameint) {
526         for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]);
527       } else {
528         PetscCall(PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1));
529       }
530     }
531     PetscCall(PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz));
532 
533     offdj = hypre_CSRMatrixJ(hoffd);
534     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
535     /* we only need the permutation to be computed properly, I don't know if HYPRE
536        messes up with the ordering. Just in case, allocate some memory and free it
537        later */
538     if (reuse == MAT_REUSE_MATRIX) {
539       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
540       PetscInt   mnz;
541 
542       PetscCall(MatSeqAIJGetMaxRowNonzeros(b->B,&mnz));
543       PetscCall(PetscMalloc1(mnz,&ojj));
544     } else for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
545     iptr = ojj;
546     aptr = oa;
547     for (i=0; i<m; i++) {
548        PetscInt nc = oii[i+1]-oii[i];
549        if (reuse == MAT_REUSE_MATRIX) {
550          PetscInt j;
551 
552          iptr = ojj;
553          for (j=0; j<nc; j++) iptr[j] = coffd[offdj[oii[i] + j]];
554        }
555        PetscCall(PetscSortIntWithScalarArray(nc,iptr,aptr));
556        iptr += nc;
557        aptr += nc;
558     }
559     if (reuse == MAT_REUSE_MATRIX) PetscCall(PetscFree(ojj));
560     if (reuse == MAT_INITIAL_MATRIX) {
561       Mat_MPIAIJ *b;
562       Mat_SeqAIJ *d,*o;
563 
564       PetscCall(MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B));
565       /* hack MPIAIJ */
566       b          = (Mat_MPIAIJ*)((*B)->data);
567       d          = (Mat_SeqAIJ*)b->A->data;
568       o          = (Mat_SeqAIJ*)b->B->data;
569       d->free_a  = PETSC_TRUE;
570       d->free_ij = PETSC_TRUE;
571       o->free_a  = PETSC_TRUE;
572       o->free_ij = PETSC_TRUE;
573     } else if (reuse == MAT_INPLACE_MATRIX) {
574       Mat T;
575 
576       PetscCall(MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T));
577       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
578         hypre_CSRMatrixI(hdiag) = NULL;
579         hypre_CSRMatrixJ(hdiag) = NULL;
580         hypre_CSRMatrixI(hoffd) = NULL;
581         hypre_CSRMatrixJ(hoffd) = NULL;
582       } else { /* Hack MPIAIJ -> free ij but not a */
583         Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data);
584         Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data);
585         Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data);
586 
587         d->free_ij = PETSC_TRUE;
588         o->free_ij = PETSC_TRUE;
589       }
590       hypre_CSRMatrixData(hdiag) = NULL;
591       hypre_CSRMatrixData(hoffd) = NULL;
592       PetscCall(MatHeaderReplace(A,&T));
593     }
594   } else {
595     oii  = NULL;
596     ojj  = NULL;
597     oa   = NULL;
598     if (reuse == MAT_INITIAL_MATRIX) {
599       Mat_SeqAIJ* b;
600 
601       PetscCall(MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B));
602       /* hack SeqAIJ */
603       b          = (Mat_SeqAIJ*)((*B)->data);
604       b->free_a  = PETSC_TRUE;
605       b->free_ij = PETSC_TRUE;
606     } else if (reuse == MAT_INPLACE_MATRIX) {
607       Mat T;
608 
609       PetscCall(MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T));
610       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
611         hypre_CSRMatrixI(hdiag) = NULL;
612         hypre_CSRMatrixJ(hdiag) = NULL;
613       } else { /* free ij but not a */
614         Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data);
615 
616         b->free_ij = PETSC_TRUE;
617       }
618       hypre_CSRMatrixData(hdiag) = NULL;
619       PetscCall(MatHeaderReplace(A,&T));
620     }
621   }
622 
623   /* we have to use hypre_Tfree to free the HYPRE arrays
624      that PETSc now onws */
625   if (reuse == MAT_INPLACE_MATRIX) {
626     PetscInt nh;
627     void *ptrs[6] = {da,oa,dii,djj,oii,ojj};
628     const char *names[6] = {"_hypre_csr_da",
629                             "_hypre_csr_oa",
630                             "_hypre_csr_dii",
631                             "_hypre_csr_djj",
632                             "_hypre_csr_oii",
633                             "_hypre_csr_ojj"};
634     nh = sameint ? 6 : 2;
635     for (i=0; i<nh; i++) {
636       PetscContainer c;
637 
638       PetscCall(PetscContainerCreate(comm,&c));
639       PetscCall(PetscContainerSetPointer(c,ptrs[i]));
640       PetscCall(PetscContainerSetUserDestroy(c,hypre_array_destroy));
641       PetscCall(PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c));
642       PetscCall(PetscContainerDestroy(&c));
643     }
644   }
645   PetscFunctionReturn(0);
646 }
647 
648 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
649 {
650   hypre_ParCSRMatrix *tA;
651   hypre_CSRMatrix    *hdiag,*hoffd;
652   Mat_SeqAIJ         *diag,*offd;
653   PetscInt           *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts;
654   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
655   PetscBool          ismpiaij,isseqaij;
656   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
657   HYPRE_Int          *hdi = NULL,*hdj = NULL,*hoi = NULL,*hoj = NULL;
658   PetscInt           *pdi = NULL,*pdj = NULL,*poi = NULL,*poj = NULL;
659 #if defined(PETSC_HAVE_HYPRE_DEVICE)
660   PetscBool          iscuda = PETSC_FALSE;
661 #endif
662 
663   PetscFunctionBegin;
664   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij));
665   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij));
666   PetscCheckFalse(!ismpiaij && !isseqaij,comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name);
667   if (ismpiaij) {
668     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
669 
670     diag = (Mat_SeqAIJ*)a->A->data;
671     offd = (Mat_SeqAIJ*)a->B->data;
672 #if defined(PETSC_HAVE_CUDA) && defined(PETSC_HAVE_HYPRE_DEVICE) && defined(HYPRE_USING_CUDA)
673     PetscCall(PetscObjectTypeCompare((PetscObject)A,MATMPIAIJCUSPARSE,&iscuda));
674     if (iscuda && !A->boundtocpu) {
675       sameint = PETSC_TRUE;
676       PetscCall(MatSeqAIJCUSPARSEGetIJ(a->A,PETSC_FALSE,(const HYPRE_Int**)&hdi,(const HYPRE_Int**)&hdj));
677       PetscCall(MatSeqAIJCUSPARSEGetIJ(a->B,PETSC_FALSE,(const HYPRE_Int**)&hoi,(const HYPRE_Int**)&hoj));
678     } else {
679 #else
680     {
681 #endif
682       pdi = diag->i;
683       pdj = diag->j;
684       poi = offd->i;
685       poj = offd->j;
686       if (sameint) {
687         hdi = (HYPRE_Int*)pdi;
688         hdj = (HYPRE_Int*)pdj;
689         hoi = (HYPRE_Int*)poi;
690         hoj = (HYPRE_Int*)poj;
691       }
692     }
693     garray = a->garray;
694     noffd  = a->B->cmap->N;
695     dnnz   = diag->nz;
696     onnz   = offd->nz;
697   } else {
698     diag = (Mat_SeqAIJ*)A->data;
699     offd = NULL;
700 #if defined(PETSC_HAVE_CUDA) && defined(PETSC_HAVE_HYPRE_DEVICE)
701     PetscCall(PetscObjectTypeCompare((PetscObject)A,MATSEQAIJCUSPARSE,&iscuda));
702     if (iscuda && !A->boundtocpu) {
703       sameint = PETSC_TRUE;
704       PetscCall(MatSeqAIJCUSPARSEGetIJ(A,PETSC_FALSE,(const HYPRE_Int**)&hdi,(const HYPRE_Int**)&hdj));
705     } else {
706 #else
707     {
708 #endif
709       pdi = diag->i;
710       pdj = diag->j;
711       if (sameint) {
712         hdi = (HYPRE_Int*)pdi;
713         hdj = (HYPRE_Int*)pdj;
714       }
715     }
716     garray = NULL;
717     noffd  = 0;
718     dnnz   = diag->nz;
719     onnz   = 0;
720   }
721 
722   /* create a temporary ParCSR */
723   if (HYPRE_AssumedPartitionCheck()) {
724     PetscMPIInt myid;
725 
726     PetscCallMPI(MPI_Comm_rank(comm,&myid));
727     row_starts = A->rmap->range + myid;
728     col_starts = A->cmap->range + myid;
729   } else {
730     row_starts = A->rmap->range;
731     col_starts = A->cmap->range;
732   }
733   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz);
734 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
735   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
736   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
737 #endif
738 
739   /* set diagonal part */
740   hdiag = hypre_ParCSRMatrixDiag(tA);
741   if (!sameint) { /* malloc CSR pointers */
742     PetscCall(PetscMalloc2(A->rmap->n+1,&hdi,dnnz,&hdj));
743     for (i = 0; i < A->rmap->n+1; i++) hdi[i] = (HYPRE_Int)(pdi[i]);
744     for (i = 0; i < dnnz; i++)         hdj[i] = (HYPRE_Int)(pdj[i]);
745   }
746   hypre_CSRMatrixI(hdiag)           = hdi;
747   hypre_CSRMatrixJ(hdiag)           = hdj;
748   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex*)diag->a;
749   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
750   hypre_CSRMatrixSetRownnz(hdiag);
751   hypre_CSRMatrixSetDataOwner(hdiag,0);
752 
753   /* set offdiagonal part */
754   hoffd = hypre_ParCSRMatrixOffd(tA);
755   if (offd) {
756     if (!sameint) { /* malloc CSR pointers */
757       PetscCall(PetscMalloc2(A->rmap->n+1,&hoi,onnz,&hoj));
758       for (i = 0; i < A->rmap->n+1; i++) hoi[i] = (HYPRE_Int)(poi[i]);
759       for (i = 0; i < onnz; i++)         hoj[i] = (HYPRE_Int)(poj[i]);
760     }
761     hypre_CSRMatrixI(hoffd)           = hoi;
762     hypre_CSRMatrixJ(hoffd)           = hoj;
763     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex*)offd->a;
764     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
765     hypre_CSRMatrixSetRownnz(hoffd);
766     hypre_CSRMatrixSetDataOwner(hoffd,0);
767   }
768 #if defined(PETSC_HAVE_HYPRE_DEVICE)
769   PetscStackCallStandard(hypre_ParCSRMatrixInitialize_v2,tA,iscuda ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST);
770 #else
771 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0)
772   PetscStackCallStandard(hypre_ParCSRMatrixInitialize,tA);
773 #else
774   PetscStackCallStandard(hypre_ParCSRMatrixInitialize_v2,tA,HYPRE_MEMORY_HOST);
775 #endif
776 #endif
777   hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA),HYPRE_MEMORY_HOST);
778   hypre_ParCSRMatrixSetNumNonzeros(tA);
779   hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray;
780   if (!hypre_ParCSRMatrixCommPkg(tA)) PetscStackCallStandard(hypre_MatvecCommPkgCreate,tA);
781   *hA = tA;
782   PetscFunctionReturn(0);
783 }
784 
785 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
786 {
787   hypre_CSRMatrix *hdiag,*hoffd;
788   PetscBool       ismpiaij,sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
789 #if defined(PETSC_HAVE_HYPRE_DEVICE)
790   PetscBool       iscuda = PETSC_FALSE;
791 #endif
792 
793   PetscFunctionBegin;
794   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij));
795 #if defined(PETSC_HAVE_HYPRE_DEVICE)
796   PetscCall(PetscObjectTypeCompareAny((PetscObject)A,&iscuda,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,""));
797   if (iscuda) sameint = PETSC_TRUE;
798 #endif
799   hdiag = hypre_ParCSRMatrixDiag(*hA);
800   hoffd = hypre_ParCSRMatrixOffd(*hA);
801   /* free temporary memory allocated by PETSc
802      set pointers to NULL before destroying tA */
803   if (!sameint) {
804     HYPRE_Int *hi,*hj;
805 
806     hi = hypre_CSRMatrixI(hdiag);
807     hj = hypre_CSRMatrixJ(hdiag);
808     PetscCall(PetscFree2(hi,hj));
809     if (ismpiaij) {
810       hi = hypre_CSRMatrixI(hoffd);
811       hj = hypre_CSRMatrixJ(hoffd);
812       PetscCall(PetscFree2(hi,hj));
813     }
814   }
815   hypre_CSRMatrixI(hdiag)    = NULL;
816   hypre_CSRMatrixJ(hdiag)    = NULL;
817   hypre_CSRMatrixData(hdiag) = NULL;
818   if (ismpiaij) {
819     hypre_CSRMatrixI(hoffd)    = NULL;
820     hypre_CSRMatrixJ(hoffd)    = NULL;
821     hypre_CSRMatrixData(hoffd) = NULL;
822   }
823   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
824   hypre_ParCSRMatrixDestroy(*hA);
825   *hA = NULL;
826   PetscFunctionReturn(0);
827 }
828 
829 /* calls RAP from BoomerAMG:
830    the resulting ParCSR will not own the column and row starts
831    It looks like we don't need to have the diagonal entries ordered first */
832 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
833 {
834 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
835   HYPRE_Int P_owns_col_starts,R_owns_row_starts;
836 #endif
837 
838   PetscFunctionBegin;
839 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
840   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
841   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
842 #endif
843   /* can be replaced by version test later */
844 #if defined(PETSC_HAVE_HYPRE_DEVICE)
845   PetscStackPush("hypre_ParCSRMatrixRAP");
846   *hRAP = hypre_ParCSRMatrixRAP(hR,hA,hP);
847   PetscStackPop;
848 #else
849   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,hR,hA,hP,hRAP);
850   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,*hRAP);
851 #endif
852   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
853 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
854   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
855   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
856   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
857   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
858 #endif
859   PetscFunctionReturn(0);
860 }
861 
862 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
863 {
864   Mat                B;
865   hypre_ParCSRMatrix *hA,*hP,*hPtAP = NULL;
866   Mat_Product        *product=C->product;
867 
868   PetscFunctionBegin;
869   PetscCall(MatAIJGetParCSR_Private(A,&hA));
870   PetscCall(MatAIJGetParCSR_Private(P,&hP));
871   PetscCall(MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP));
872   PetscCall(MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B));
873 
874   PetscCall(MatHeaderMerge(C,&B));
875   C->product = product;
876 
877   PetscCall(MatAIJRestoreParCSR_Private(A,&hA));
878   PetscCall(MatAIJRestoreParCSR_Private(P,&hP));
879   PetscFunctionReturn(0);
880 }
881 
882 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat C)
883 {
884   PetscFunctionBegin;
885   PetscCall(MatSetType(C,MATAIJ));
886   C->ops->ptapnumeric    = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
887   C->ops->productnumeric = MatProductNumeric_PtAP;
888   PetscFunctionReturn(0);
889 }
890 
891 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
892 {
893   Mat                B;
894   Mat_HYPRE          *hP;
895   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr = NULL;
896   HYPRE_Int          type;
897   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
898   PetscBool          ishypre;
899 
900   PetscFunctionBegin;
901   PetscCall(PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre));
902   PetscCheck(ishypre,comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
903   hP = (Mat_HYPRE*)P->data;
904   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,hP->ij,&type);
905   PetscCheckFalse(type != HYPRE_PARCSR,comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
906   PetscStackCallStandard(HYPRE_IJMatrixGetObject,hP->ij,(void**)&Pparcsr);
907 
908   PetscCall(MatAIJGetParCSR_Private(A,&hA));
909   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr));
910   PetscCall(MatAIJRestoreParCSR_Private(A,&hA));
911 
912   /* create temporary matrix and merge to C */
913   PetscCall(MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B));
914   PetscCall(MatHeaderMerge(C,&B));
915   PetscFunctionReturn(0);
916 }
917 
918 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
919 {
920   Mat                B;
921   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr = NULL;
922   Mat_HYPRE          *hA,*hP;
923   PetscBool          ishypre;
924   HYPRE_Int          type;
925 
926   PetscFunctionBegin;
927   PetscCall(PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre));
928   PetscCheck(ishypre,PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
929   PetscCall(PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre));
930   PetscCheck(ishypre,PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
931   hA = (Mat_HYPRE*)A->data;
932   hP = (Mat_HYPRE*)P->data;
933   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,hA->ij,&type);
934   PetscCheckFalse(type != HYPRE_PARCSR,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
935   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,hP->ij,&type);
936   PetscCheckFalse(type != HYPRE_PARCSR,PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
937   PetscStackCallStandard(HYPRE_IJMatrixGetObject,hA->ij,(void**)&Aparcsr);
938   PetscStackCallStandard(HYPRE_IJMatrixGetObject,hP->ij,(void**)&Pparcsr);
939   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr));
940   PetscCall(MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B));
941   PetscCall(MatHeaderMerge(C,&B));
942   PetscFunctionReturn(0);
943 }
944 
945 /* calls hypre_ParMatmul
946    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
947    hypre_ParMatrixCreate does not duplicate the communicator
948    It looks like we don't need to have the diagonal entries ordered first */
949 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
950 {
951   PetscFunctionBegin;
952   /* can be replaced by version test later */
953 #if defined(PETSC_HAVE_HYPRE_DEVICE)
954   PetscStackPush("hypre_ParCSRMatMat");
955   *hAB = hypre_ParCSRMatMat(hA,hB);
956 #else
957   PetscStackPush("hypre_ParMatmul");
958   *hAB = hypre_ParMatmul(hA,hB);
959 #endif
960   PetscStackPop;
961   PetscFunctionReturn(0);
962 }
963 
964 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
965 {
966   Mat                D;
967   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
968   Mat_Product        *product=C->product;
969 
970   PetscFunctionBegin;
971   PetscCall(MatAIJGetParCSR_Private(A,&hA));
972   PetscCall(MatAIJGetParCSR_Private(B,&hB));
973   PetscCall(MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB));
974   PetscCall(MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D));
975 
976   PetscCall(MatHeaderMerge(C,&D));
977   C->product = product;
978 
979   PetscCall(MatAIJRestoreParCSR_Private(A,&hA));
980   PetscCall(MatAIJRestoreParCSR_Private(B,&hB));
981   PetscFunctionReturn(0);
982 }
983 
984 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat C)
985 {
986   PetscFunctionBegin;
987   PetscCall(MatSetType(C,MATAIJ));
988   C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
989   C->ops->productnumeric = MatProductNumeric_AB;
990   PetscFunctionReturn(0);
991 }
992 
993 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
994 {
995   Mat                D;
996   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
997   Mat_HYPRE          *hA,*hB;
998   PetscBool          ishypre;
999   HYPRE_Int          type;
1000   Mat_Product        *product;
1001 
1002   PetscFunctionBegin;
1003   PetscCall(PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre));
1004   PetscCheck(ishypre,PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
1005   PetscCall(PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre));
1006   PetscCheck(ishypre,PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
1007   hA = (Mat_HYPRE*)A->data;
1008   hB = (Mat_HYPRE*)B->data;
1009   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,hA->ij,&type);
1010   PetscCheckFalse(type != HYPRE_PARCSR,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
1011   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,hB->ij,&type);
1012   PetscCheckFalse(type != HYPRE_PARCSR,PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
1013   PetscStackCallStandard(HYPRE_IJMatrixGetObject,hA->ij,(void**)&Aparcsr);
1014   PetscStackCallStandard(HYPRE_IJMatrixGetObject,hB->ij,(void**)&Bparcsr);
1015   PetscCall(MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr));
1016   PetscCall(MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D));
1017 
1018   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
1019   product    = C->product;  /* save it from MatHeaderReplace() */
1020   C->product = NULL;
1021   PetscCall(MatHeaderReplace(C,&D));
1022   C->product = product;
1023   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
1024   C->ops->productnumeric = MatProductNumeric_AB;
1025   PetscFunctionReturn(0);
1026 }
1027 
1028 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
1029 {
1030   Mat                E;
1031   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC = NULL;
1032 
1033   PetscFunctionBegin;
1034   PetscCall(MatAIJGetParCSR_Private(A,&hA));
1035   PetscCall(MatAIJGetParCSR_Private(B,&hB));
1036   PetscCall(MatAIJGetParCSR_Private(C,&hC));
1037   PetscCall(MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC));
1038   PetscCall(MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E));
1039   PetscCall(MatHeaderMerge(D,&E));
1040   PetscCall(MatAIJRestoreParCSR_Private(A,&hA));
1041   PetscCall(MatAIJRestoreParCSR_Private(B,&hB));
1042   PetscCall(MatAIJRestoreParCSR_Private(C,&hC));
1043   PetscFunctionReturn(0);
1044 }
1045 
1046 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat D)
1047 {
1048   PetscFunctionBegin;
1049   PetscCall(MatSetType(D,MATAIJ));
1050   PetscFunctionReturn(0);
1051 }
1052 
1053 /* ---------------------------------------------------- */
1054 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C)
1055 {
1056   PetscFunctionBegin;
1057   C->ops->productnumeric = MatProductNumeric_AB;
1058   PetscFunctionReturn(0);
1059 }
1060 
1061 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C)
1062 {
1063   Mat_Product    *product = C->product;
1064   PetscBool      Ahypre;
1065 
1066   PetscFunctionBegin;
1067   PetscCall(PetscObjectTypeCompare((PetscObject)product->A,MATHYPRE,&Ahypre));
1068   if (Ahypre) { /* A is a Hypre matrix */
1069     PetscCall(MatSetType(C,MATHYPRE));
1070     C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE;
1071     C->ops->matmultnumeric  = MatMatMultNumeric_HYPRE_HYPRE;
1072     PetscFunctionReturn(0);
1073   }
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C)
1078 {
1079   PetscFunctionBegin;
1080   C->ops->productnumeric = MatProductNumeric_PtAP;
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C)
1085 {
1086   Mat_Product    *product = C->product;
1087   PetscBool      flg;
1088   PetscInt       type = 0;
1089   const char     *outTypes[4] = {"aij","seqaij","mpiaij","hypre"};
1090   PetscInt       ntype = 4;
1091   Mat            A = product->A;
1092   PetscBool      Ahypre;
1093   PetscErrorCode ierr;
1094 
1095   PetscFunctionBegin;
1096   PetscCall(PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&Ahypre));
1097   if (Ahypre) { /* A is a Hypre matrix */
1098     PetscCall(MatSetType(C,MATHYPRE));
1099     C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1100     C->ops->ptapnumeric     = MatPtAPNumeric_HYPRE_HYPRE;
1101     PetscFunctionReturn(0);
1102   }
1103 
1104   /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */
1105   /* Get runtime option */
1106   if (product->api_user) {
1107     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatPtAP_HYPRE","Mat");PetscCall(ierr);
1108     PetscCall(PetscOptionsEList("-matptap_hypre_outtype","MatPtAP outtype","MatPtAP outtype",outTypes,ntype,outTypes[type],&type,&flg));
1109     ierr = PetscOptionsEnd();PetscCall(ierr);
1110   } else {
1111     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatProduct_PtAP_HYPRE","Mat");PetscCall(ierr);
1112     PetscCall(PetscOptionsEList("-mat_product_algorithm_hypre_outtype","MatProduct_PtAP outtype","MatProduct_PtAP",outTypes,ntype,outTypes[type],&type,&flg));
1113     ierr = PetscOptionsEnd();PetscCall(ierr);
1114   }
1115 
1116   if (type == 0 || type == 1 || type == 2) {
1117     PetscCall(MatSetType(C,MATAIJ));
1118   } else if (type == 3) {
1119     PetscCall(MatSetType(C,MATHYPRE));
1120   } else SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"MatPtAP outtype is not supported");
1121   C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1122   C->ops->ptapnumeric     = MatPtAPNumeric_AIJ_HYPRE;
1123   PetscFunctionReturn(0);
1124 }
1125 
1126 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C)
1127 {
1128   Mat_Product    *product = C->product;
1129 
1130   PetscFunctionBegin;
1131   switch (product->type) {
1132   case MATPRODUCT_AB:
1133     PetscCall(MatProductSetFromOptions_HYPRE_AB(C));
1134     break;
1135   case MATPRODUCT_PtAP:
1136     PetscCall(MatProductSetFromOptions_HYPRE_PtAP(C));
1137     break;
1138   default:
1139     break;
1140   }
1141   PetscFunctionReturn(0);
1142 }
1143 
1144 /* -------------------------------------------------------- */
1145 
1146 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1147 {
1148   PetscFunctionBegin;
1149   PetscCall(MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE));
1150   PetscFunctionReturn(0);
1151 }
1152 
1153 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1154 {
1155   PetscFunctionBegin;
1156   PetscCall(MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE));
1157   PetscFunctionReturn(0);
1158 }
1159 
1160 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1161 {
1162   PetscFunctionBegin;
1163   if (y != z) {
1164     PetscCall(VecCopy(y,z));
1165   }
1166   PetscCall(MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE));
1167   PetscFunctionReturn(0);
1168 }
1169 
1170 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1171 {
1172   PetscFunctionBegin;
1173   if (y != z) {
1174     PetscCall(VecCopy(y,z));
1175   }
1176   PetscCall(MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE));
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1181 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1182 {
1183   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1184   hypre_ParCSRMatrix *parcsr;
1185   hypre_ParVector    *hx,*hy;
1186 
1187   PetscFunctionBegin;
1188   if (trans) {
1189     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->b,x));
1190     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->x,y));
1191     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->x,y));
1192     PetscStackCallStandard(HYPRE_IJVectorGetObject,hA->b->ij,(void**)&hx);
1193     PetscStackCallStandard(HYPRE_IJVectorGetObject,hA->x->ij,(void**)&hy);
1194   } else {
1195     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->x,x));
1196     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->b,y));
1197     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->b,y));
1198     PetscStackCallStandard(HYPRE_IJVectorGetObject,hA->x->ij,(void**)&hx);
1199     PetscStackCallStandard(HYPRE_IJVectorGetObject,hA->b->ij,(void**)&hy);
1200   }
1201   PetscStackCallStandard(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr);
1202   if (trans) {
1203     PetscStackCallStandard(hypre_ParCSRMatrixMatvecT,a,parcsr,hx,b,hy);
1204   } else {
1205     PetscStackCallStandard(hypre_ParCSRMatrixMatvec,a,parcsr,hx,b,hy);
1206   }
1207   PetscCall(VecHYPRE_IJVectorPopVec(hA->x));
1208   PetscCall(VecHYPRE_IJVectorPopVec(hA->b));
1209   PetscFunctionReturn(0);
1210 }
1211 
1212 static PetscErrorCode MatDestroy_HYPRE(Mat A)
1213 {
1214   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1215 
1216   PetscFunctionBegin;
1217   PetscCall(VecHYPRE_IJVectorDestroy(&hA->x));
1218   PetscCall(VecHYPRE_IJVectorDestroy(&hA->b));
1219   if (hA->ij) {
1220     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1221     PetscStackCallStandard(HYPRE_IJMatrixDestroy,hA->ij);
1222   }
1223   if (hA->comm) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)A),&hA->comm));
1224 
1225   PetscCall(MatStashDestroy_Private(&A->stash));
1226 
1227   PetscCall(PetscFree(hA->array));
1228 
1229   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL));
1230   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL));
1231   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_seqaij_hypre_C",NULL));
1232   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_mpiaij_hypre_C",NULL));
1233   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL));
1234   PetscCall(PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL));
1235   PetscCall(PetscFree(A->data));
1236   PetscFunctionReturn(0);
1237 }
1238 
1239 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1240 {
1241   PetscFunctionBegin;
1242   PetscCall(MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL));
1243   PetscFunctionReturn(0);
1244 }
1245 
1246 //TODO FIX hypre_CSRMatrixMatvecOutOfPlace
1247 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1248 static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind)
1249 {
1250   Mat_HYPRE            *hA = (Mat_HYPRE*)A->data;
1251   HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE;
1252 
1253   PetscFunctionBegin;
1254   A->boundtocpu = bind;
1255   if (hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) {
1256     hypre_ParCSRMatrix *parcsr;
1257     PetscStackCallStandard(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr);
1258     PetscStackCallStandard(hypre_ParCSRMatrixMigrate,parcsr, hmem);
1259   }
1260   if (hA->x) PetscCall(VecHYPRE_IJBindToCPU(hA->x,bind));
1261   if (hA->b) PetscCall(VecHYPRE_IJBindToCPU(hA->b,bind));
1262   PetscFunctionReturn(0);
1263 }
1264 #endif
1265 
1266 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1267 {
1268   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1269   PetscMPIInt        n;
1270   PetscInt           i,j,rstart,ncols,flg;
1271   PetscInt           *row,*col;
1272   PetscScalar        *val;
1273 
1274   PetscFunctionBegin;
1275   PetscCheckFalse(mode == MAT_FLUSH_ASSEMBLY,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1276 
1277   if (!A->nooffprocentries) {
1278     while (1) {
1279       PetscCall(MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg));
1280       if (!flg) break;
1281 
1282       for (i=0; i<n;) {
1283         /* Now identify the consecutive vals belonging to the same row */
1284         for (j=i,rstart=row[j]; j<n; j++) {
1285           if (row[j] != rstart) break;
1286         }
1287         if (j < n) ncols = j-i;
1288         else       ncols = n-i;
1289         /* Now assemble all these values with a single function call */
1290         PetscCall(MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode));
1291 
1292         i = j;
1293       }
1294     }
1295     PetscCall(MatStashScatterEnd_Private(&A->stash));
1296   }
1297 
1298   PetscStackCallStandard(HYPRE_IJMatrixAssemble,hA->ij);
1299   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */
1300   /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */
1301   if (!hA->sorted_full) {
1302     hypre_AuxParCSRMatrix *aux_matrix;
1303 
1304     /* call destroy just to make sure we do not leak anything */
1305     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1306     PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,aux_matrix);
1307     hypre_IJMatrixTranslator(hA->ij) = NULL;
1308 
1309     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1310     PetscStackCallStandard(HYPRE_IJMatrixInitialize,hA->ij);
1311     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1312     if (aux_matrix) {
1313       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1314 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
1315       PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,aux_matrix);
1316 #else
1317       PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize_v2,aux_matrix,HYPRE_MEMORY_HOST);
1318 #endif
1319     }
1320   }
1321   {
1322     hypre_ParCSRMatrix *parcsr;
1323 
1324     PetscStackCallStandard(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr);
1325     if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscStackCallStandard(hypre_MatvecCommPkgCreate,parcsr);
1326   }
1327   if (!hA->x) PetscCall(VecHYPRE_IJVectorCreate(A->cmap,&hA->x));
1328   if (!hA->b) PetscCall(VecHYPRE_IJVectorCreate(A->rmap,&hA->b));
1329 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1330   PetscCall(MatBindToCPU_HYPRE(A,A->boundtocpu));
1331 #endif
1332   PetscFunctionReturn(0);
1333 }
1334 
1335 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1336 {
1337   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1338 
1339   PetscFunctionBegin;
1340   PetscCheck(hA->available,PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");
1341 
1342   if (hA->size >= size) {
1343     *array = hA->array;
1344   } else {
1345     PetscCall(PetscFree(hA->array));
1346     hA->size = size;
1347     PetscCall(PetscMalloc(hA->size,&hA->array));
1348     *array = hA->array;
1349   }
1350 
1351   hA->available = PETSC_FALSE;
1352   PetscFunctionReturn(0);
1353 }
1354 
1355 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1356 {
1357   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1358 
1359   PetscFunctionBegin;
1360   *array = NULL;
1361   hA->available = PETSC_TRUE;
1362   PetscFunctionReturn(0);
1363 }
1364 
1365 static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1366 {
1367   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1368   PetscScalar    *vals = (PetscScalar *)v;
1369   HYPRE_Complex  *sscr;
1370   PetscInt       *cscr[2];
1371   PetscInt       i,nzc;
1372   void           *array = NULL;
1373 
1374   PetscFunctionBegin;
1375   PetscCall(MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array));
1376   cscr[0] = (PetscInt*)array;
1377   cscr[1] = ((PetscInt*)array)+nc;
1378   sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2);
1379   for (i=0,nzc=0;i<nc;i++) {
1380     if (cols[i] >= 0) {
1381       cscr[0][nzc  ] = cols[i];
1382       cscr[1][nzc++] = i;
1383     }
1384   }
1385   if (!nzc) {
1386     PetscCall(MatRestoreArray_HYPRE(A,&array));
1387     PetscFunctionReturn(0);
1388   }
1389 
1390 #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE)
1391   if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) {
1392     hypre_ParCSRMatrix *parcsr;
1393 
1394     PetscStackCallStandard(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr);
1395     PetscStackCallStandard(hypre_ParCSRMatrixMigrate,parcsr, HYPRE_MEMORY_HOST);
1396   }
1397 #endif
1398 
1399   if (ins == ADD_VALUES) {
1400     for (i=0;i<nr;i++) {
1401       if (rows[i] >= 0) {
1402         PetscInt  j;
1403         HYPRE_Int hnc = (HYPRE_Int)nzc;
1404 
1405         PetscCheckFalse((PetscInt)hnc != nzc,PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT,nzc,rows[i]);
1406         for (j=0;j<nzc;j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]));
1407         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr);
1408       }
1409       vals += nc;
1410     }
1411   } else { /* INSERT_VALUES */
1412     PetscInt rst,ren;
1413 
1414     PetscCall(MatGetOwnershipRange(A,&rst,&ren));
1415     for (i=0;i<nr;i++) {
1416       if (rows[i] >= 0) {
1417         PetscInt  j;
1418         HYPRE_Int hnc = (HYPRE_Int)nzc;
1419 
1420         PetscCheckFalse((PetscInt)hnc != nzc,PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT,nzc,rows[i]);
1421         for (j=0;j<nzc;j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]));
1422         /* nonlocal values */
1423         if (rows[i] < rst || rows[i] >= ren) PetscCall(MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE));
1424         /* local values */
1425         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr);
1426       }
1427       vals += nc;
1428     }
1429   }
1430 
1431   PetscCall(MatRestoreArray_HYPRE(A,&array));
1432   PetscFunctionReturn(0);
1433 }
1434 
1435 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1436 {
1437   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1438   HYPRE_Int      *hdnnz,*honnz;
1439   PetscInt       i,rs,re,cs,ce,bs;
1440   PetscMPIInt    size;
1441 
1442   PetscFunctionBegin;
1443   PetscCall(PetscLayoutSetUp(A->rmap));
1444   PetscCall(PetscLayoutSetUp(A->cmap));
1445   rs   = A->rmap->rstart;
1446   re   = A->rmap->rend;
1447   cs   = A->cmap->rstart;
1448   ce   = A->cmap->rend;
1449   if (!hA->ij) {
1450     PetscStackCallStandard(HYPRE_IJMatrixCreate,hA->comm,rs,re-1,cs,ce-1,&hA->ij);
1451     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,hA->ij,HYPRE_PARCSR);
1452   } else {
1453     HYPRE_BigInt hrs,hre,hcs,hce;
1454     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,hA->ij,&hrs,&hre,&hcs,&hce);
1455     PetscCheckFalse(hre-hrs+1 != re -rs,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%" PetscInt_FMT ",%" PetscInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")",hrs,hre+1,rs,re);
1456     PetscCheckFalse(hce-hcs+1 != ce -cs,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%" PetscInt_FMT ",%" PetscInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")",hcs,hce+1,cs,ce);
1457   }
1458   PetscCall(MatGetBlockSize(A,&bs));
1459   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1460   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1461 
1462   if (!dnnz) {
1463     PetscCall(PetscMalloc1(A->rmap->n,&hdnnz));
1464     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1465   } else {
1466     hdnnz = (HYPRE_Int*)dnnz;
1467   }
1468   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
1469   if (size > 1) {
1470     hypre_AuxParCSRMatrix *aux_matrix;
1471     if (!onnz) {
1472       PetscCall(PetscMalloc1(A->rmap->n,&honnz));
1473       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1474     } else honnz = (HYPRE_Int*)onnz;
1475     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1476        they assume the user will input the entire row values, properly sorted
1477        In PETSc, we don't make such an assumption and set this flag to 1,
1478        unless the option MAT_SORTED_FULL is set to true.
1479        Also, to avoid possible memory leaks, we destroy and recreate the translator
1480        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1481        the IJ matrix for us */
1482     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1483     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1484     hypre_IJMatrixTranslator(hA->ij) = NULL;
1485     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,hA->ij,hdnnz,honnz);
1486     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1487     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !hA->sorted_full;
1488   } else {
1489     honnz = NULL;
1490     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,hA->ij,hdnnz);
1491   }
1492 
1493   /* reset assembled flag and call the initialize method */
1494   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1495 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
1496   PetscStackCallStandard(HYPRE_IJMatrixInitialize,hA->ij);
1497 #else
1498   PetscStackCallStandard(HYPRE_IJMatrixInitialize_v2,hA->ij,HYPRE_MEMORY_HOST);
1499 #endif
1500   if (!dnnz) {
1501     PetscCall(PetscFree(hdnnz));
1502   }
1503   if (!onnz && honnz) {
1504     PetscCall(PetscFree(honnz));
1505   }
1506   /* Match AIJ logic */
1507   A->preallocated = PETSC_TRUE;
1508   A->assembled    = PETSC_FALSE;
1509   PetscFunctionReturn(0);
1510 }
1511 
1512 /*@C
1513    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1514 
1515    Collective on Mat
1516 
1517    Input Parameters:
1518 +  A - the matrix
1519 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1520           (same value is used for all local rows)
1521 .  dnnz - array containing the number of nonzeros in the various rows of the
1522           DIAGONAL portion of the local submatrix (possibly different for each row)
1523           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1524           The size of this array is equal to the number of local rows, i.e 'm'.
1525           For matrices that will be factored, you must leave room for (and set)
1526           the diagonal entry even if it is zero.
1527 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1528           submatrix (same value is used for all local rows).
1529 -  onnz - array containing the number of nonzeros in the various rows of the
1530           OFF-DIAGONAL portion of the local submatrix (possibly different for
1531           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1532           structure. The size of this array is equal to the number
1533           of local rows, i.e 'm'.
1534 
1535    Notes:
1536     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1537 
1538    Level: intermediate
1539 
1540 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE
1541 @*/
1542 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1543 {
1544   PetscFunctionBegin;
1545   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1546   PetscValidType(A,1);
1547   PetscCall(PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz)));
1548   PetscFunctionReturn(0);
1549 }
1550 
1551 /*
1552    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1553 
1554    Collective
1555 
1556    Input Parameters:
1557 +  parcsr   - the pointer to the hypre_ParCSRMatrix
1558 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1559 -  copymode - PETSc copying options
1560 
1561    Output Parameter:
1562 .  A  - the matrix
1563 
1564    Level: intermediate
1565 
1566 .seealso: MatHYPRE, PetscCopyMode
1567 */
1568 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1569 {
1570   Mat            T;
1571   Mat_HYPRE      *hA;
1572   MPI_Comm       comm;
1573   PetscInt       rstart,rend,cstart,cend,M,N;
1574   PetscBool      isseqaij,isseqaijmkl,ismpiaij,isaij,ishyp,isis;
1575 
1576   PetscFunctionBegin;
1577   comm  = hypre_ParCSRMatrixComm(parcsr);
1578   PetscCall(PetscStrcmp(mtype,MATSEQAIJ,&isseqaij));
1579   PetscCall(PetscStrcmp(mtype,MATSEQAIJMKL,&isseqaijmkl));
1580   PetscCall(PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij));
1581   PetscCall(PetscStrcmp(mtype,MATAIJ,&isaij));
1582   PetscCall(PetscStrcmp(mtype,MATHYPRE,&ishyp));
1583   PetscCall(PetscStrcmp(mtype,MATIS,&isis));
1584   isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1585   /* TODO */
1586   PetscCheckFalse(!isaij && !ishyp && !isis,comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATSEQAIJMKL,MATMPIAIJ,MATIS,MATHYPRE);
1587   /* access ParCSRMatrix */
1588   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1589   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1590   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1591   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1592   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1593   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1594 
1595   /* fix for empty local rows/columns */
1596   if (rend < rstart) rend = rstart;
1597   if (cend < cstart) cend = cstart;
1598 
1599   /* PETSc convention */
1600   rend++;
1601   cend++;
1602   rend = PetscMin(rend,M);
1603   cend = PetscMin(cend,N);
1604 
1605   /* create PETSc matrix with MatHYPRE */
1606   PetscCall(MatCreate(comm,&T));
1607   PetscCall(MatSetSizes(T,rend-rstart,cend-cstart,M,N));
1608   PetscCall(MatSetType(T,MATHYPRE));
1609   hA   = (Mat_HYPRE*)(T->data);
1610 
1611   /* create HYPRE_IJMatrix */
1612   PetscStackCallStandard(HYPRE_IJMatrixCreate,hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij);
1613   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,hA->ij,HYPRE_PARCSR);
1614 
1615 // TODO DEV
1616   /* create new ParCSR object if needed */
1617   if (ishyp && copymode == PETSC_COPY_VALUES) {
1618     hypre_ParCSRMatrix *new_parcsr;
1619 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0)
1620     hypre_CSRMatrix    *hdiag,*hoffd,*ndiag,*noffd;
1621 
1622     new_parcsr = hypre_ParCSRMatrixClone(parcsr,0);
1623     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1624     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1625     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1626     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1627     PetscCall(PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag)));
1628     PetscCall(PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd)));
1629 #else
1630     new_parcsr = hypre_ParCSRMatrixClone(parcsr,1);
1631 #endif
1632     parcsr     = new_parcsr;
1633     copymode   = PETSC_OWN_POINTER;
1634   }
1635 
1636   /* set ParCSR object */
1637   hypre_IJMatrixObject(hA->ij) = parcsr;
1638   T->preallocated = PETSC_TRUE;
1639 
1640   /* set assembled flag */
1641   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1642 #if 0
1643   PetscStackCallStandard(HYPRE_IJMatrixInitialize,hA->ij);
1644 #endif
1645   if (ishyp) {
1646     PetscMPIInt myid = 0;
1647 
1648     /* make sure we always have row_starts and col_starts available */
1649     if (HYPRE_AssumedPartitionCheck()) {
1650       PetscCallMPI(MPI_Comm_rank(comm,&myid));
1651     }
1652 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1653     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1654       PetscLayout map;
1655 
1656       PetscCall(MatGetLayouts(T,NULL,&map));
1657       PetscCall(PetscLayoutSetUp(map));
1658       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1659     }
1660     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1661       PetscLayout map;
1662 
1663       PetscCall(MatGetLayouts(T,&map,NULL));
1664       PetscCall(PetscLayoutSetUp(map));
1665       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1666     }
1667 #endif
1668     /* prevent from freeing the pointer */
1669     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1670     *A   = T;
1671     PetscCall(MatSetOption(*A,MAT_SORTED_FULL,PETSC_TRUE));
1672     PetscCall(MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY));
1673     PetscCall(MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY));
1674   } else if (isaij) {
1675     if (copymode != PETSC_OWN_POINTER) {
1676       /* prevent from freeing the pointer */
1677       hA->inner_free = PETSC_FALSE;
1678       PetscCall(MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A));
1679       PetscCall(MatDestroy(&T));
1680     } else { /* AIJ return type with PETSC_OWN_POINTER */
1681       PetscCall(MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T));
1682       *A   = T;
1683     }
1684   } else if (isis) {
1685     PetscCall(MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A));
1686     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1687     PetscCall(MatDestroy(&T));
1688   }
1689   PetscFunctionReturn(0);
1690 }
1691 
1692 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1693 {
1694   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1695   HYPRE_Int type;
1696 
1697   PetscFunctionBegin;
1698   PetscCheck(hA->ij,PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1699   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,hA->ij,&type);
1700   PetscCheckFalse(type != HYPRE_PARCSR,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1701   PetscStackCallStandard(HYPRE_IJMatrixGetObject,hA->ij,(void**)parcsr);
1702   PetscFunctionReturn(0);
1703 }
1704 
1705 /*
1706    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1707 
1708    Not collective
1709 
1710    Input Parameters:
1711 +  A  - the MATHYPRE object
1712 
1713    Output Parameter:
1714 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1715 
1716    Level: intermediate
1717 
1718 .seealso: MatHYPRE, PetscCopyMode
1719 */
1720 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1721 {
1722   PetscFunctionBegin;
1723   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1724   PetscValidType(A,1);
1725   PetscCall(PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr)));
1726   PetscFunctionReturn(0);
1727 }
1728 
1729 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1730 {
1731   hypre_ParCSRMatrix *parcsr;
1732   hypre_CSRMatrix    *ha;
1733   PetscInt           rst;
1734 
1735   PetscFunctionBegin;
1736   PetscCheckFalse(A->rmap->n != A->cmap->n,PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1737   PetscCall(MatGetOwnershipRange(A,&rst,NULL));
1738   PetscCall(MatHYPREGetParCSR_HYPRE(A,&parcsr));
1739   if (missing) *missing = PETSC_FALSE;
1740   if (dd) *dd = -1;
1741   ha = hypre_ParCSRMatrixDiag(parcsr);
1742   if (ha) {
1743     PetscInt  size,i;
1744     HYPRE_Int *ii,*jj;
1745 
1746     size = hypre_CSRMatrixNumRows(ha);
1747     ii   = hypre_CSRMatrixI(ha);
1748     jj   = hypre_CSRMatrixJ(ha);
1749     for (i = 0; i < size; i++) {
1750       PetscInt  j;
1751       PetscBool found = PETSC_FALSE;
1752 
1753       for (j = ii[i]; j < ii[i+1] && !found; j++)
1754         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1755 
1756       if (!found) {
1757         PetscInfo(A,"Matrix is missing local diagonal entry %" PetscInt_FMT "\n",i);
1758         if (missing) *missing = PETSC_TRUE;
1759         if (dd) *dd = i+rst;
1760         PetscFunctionReturn(0);
1761       }
1762     }
1763     if (!size) {
1764       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1765       if (missing) *missing = PETSC_TRUE;
1766       if (dd) *dd = rst;
1767     }
1768   } else {
1769     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1770     if (missing) *missing = PETSC_TRUE;
1771     if (dd) *dd = rst;
1772   }
1773   PetscFunctionReturn(0);
1774 }
1775 
1776 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1777 {
1778   hypre_ParCSRMatrix *parcsr;
1779 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
1780   hypre_CSRMatrix    *ha;
1781 #endif
1782   HYPRE_Complex      hs;
1783 
1784   PetscFunctionBegin;
1785   PetscCall(PetscHYPREScalarCast(s,&hs));
1786   PetscCall(MatHYPREGetParCSR_HYPRE(A,&parcsr));
1787 #if PETSC_PKG_HYPRE_VERSION_GE(2,19,0)
1788   PetscStackCallStandard(hypre_ParCSRMatrixScale,parcsr,hs);
1789 #else  /* diagonal part */
1790   ha = hypre_ParCSRMatrixDiag(parcsr);
1791   if (ha) {
1792     PetscInt      size,i;
1793     HYPRE_Int     *ii;
1794     HYPRE_Complex *a;
1795 
1796     size = hypre_CSRMatrixNumRows(ha);
1797     a    = hypre_CSRMatrixData(ha);
1798     ii   = hypre_CSRMatrixI(ha);
1799     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1800   }
1801   /* offdiagonal part */
1802   ha = hypre_ParCSRMatrixOffd(parcsr);
1803   if (ha) {
1804     PetscInt      size,i;
1805     HYPRE_Int     *ii;
1806     HYPRE_Complex *a;
1807 
1808     size = hypre_CSRMatrixNumRows(ha);
1809     a    = hypre_CSRMatrixData(ha);
1810     ii   = hypre_CSRMatrixI(ha);
1811     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1812   }
1813 #endif
1814   PetscFunctionReturn(0);
1815 }
1816 
1817 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1818 {
1819   hypre_ParCSRMatrix *parcsr;
1820   HYPRE_Int          *lrows;
1821   PetscInt           rst,ren,i;
1822 
1823   PetscFunctionBegin;
1824   PetscCheckFalse(x || b,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1825   PetscCall(MatHYPREGetParCSR_HYPRE(A,&parcsr));
1826   PetscCall(PetscMalloc1(numRows,&lrows));
1827   PetscCall(MatGetOwnershipRange(A,&rst,&ren));
1828   for (i=0;i<numRows;i++) {
1829     if (rows[i] < rst || rows[i] >= ren)
1830       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1831     lrows[i] = rows[i] - rst;
1832   }
1833   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,parcsr,numRows,lrows);
1834   PetscCall(PetscFree(lrows));
1835   PetscFunctionReturn(0);
1836 }
1837 
1838 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1839 {
1840   PetscFunctionBegin;
1841   if (ha) {
1842     HYPRE_Int     *ii, size;
1843     HYPRE_Complex *a;
1844 
1845     size = hypre_CSRMatrixNumRows(ha);
1846     a    = hypre_CSRMatrixData(ha);
1847     ii   = hypre_CSRMatrixI(ha);
1848 
1849     if (a) PetscCall(PetscArrayzero(a,ii[size]));
1850   }
1851   PetscFunctionReturn(0);
1852 }
1853 
1854 PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1855 {
1856   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1857 
1858   PetscFunctionBegin;
1859   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) {
1860     PetscStackCallStandard(HYPRE_IJMatrixSetConstantValues,hA->ij,0.0);
1861   } else {
1862     hypre_ParCSRMatrix *parcsr;
1863 
1864     PetscCall(MatHYPREGetParCSR_HYPRE(A,&parcsr));
1865     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr)));
1866     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr)));
1867   }
1868   PetscFunctionReturn(0);
1869 }
1870 
1871 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag)
1872 {
1873   PetscInt        ii;
1874   HYPRE_Int       *i, *j;
1875   HYPRE_Complex   *a;
1876 
1877   PetscFunctionBegin;
1878   if (!hA) PetscFunctionReturn(0);
1879 
1880   i = hypre_CSRMatrixI(hA);
1881   j = hypre_CSRMatrixJ(hA);
1882   a = hypre_CSRMatrixData(hA);
1883 
1884   for (ii = 0; ii < N; ii++) {
1885     HYPRE_Int jj, ibeg, iend, irow;
1886 
1887     irow = rows[ii];
1888     ibeg = i[irow];
1889     iend = i[irow+1];
1890     for (jj = ibeg; jj < iend; jj++)
1891       if (j[jj] == irow) a[jj] = diag;
1892       else a[jj] = 0.0;
1893    }
1894    PetscFunctionReturn(0);
1895 }
1896 
1897 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1898 {
1899   hypre_ParCSRMatrix  *parcsr;
1900   PetscInt            *lrows,len;
1901   HYPRE_Complex       hdiag;
1902 
1903   PetscFunctionBegin;
1904   PetscCheckFalse(x || b,PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
1905   PetscCall(PetscHYPREScalarCast(diag,&hdiag));
1906   /* retrieve the internal matrix */
1907   PetscCall(MatHYPREGetParCSR_HYPRE(A,&parcsr));
1908   /* get locally owned rows */
1909   PetscCall(MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows));
1910   /* zero diagonal part */
1911   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag));
1912   /* zero off-diagonal part */
1913   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0));
1914 
1915   PetscCall(PetscFree(lrows));
1916   PetscFunctionReturn(0);
1917 }
1918 
1919 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1920 {
1921   PetscFunctionBegin;
1922   if (mat->nooffprocentries) PetscFunctionReturn(0);
1923 
1924   PetscCall(MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range));
1925   PetscFunctionReturn(0);
1926 }
1927 
1928 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1929 {
1930   hypre_ParCSRMatrix  *parcsr;
1931   HYPRE_Int           hnz;
1932 
1933   PetscFunctionBegin;
1934   /* retrieve the internal matrix */
1935   PetscCall(MatHYPREGetParCSR_HYPRE(A,&parcsr));
1936   /* call HYPRE API */
1937   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v);
1938   if (nz) *nz = (PetscInt)hnz;
1939   PetscFunctionReturn(0);
1940 }
1941 
1942 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1943 {
1944   hypre_ParCSRMatrix  *parcsr;
1945   HYPRE_Int           hnz;
1946 
1947   PetscFunctionBegin;
1948   /* retrieve the internal matrix */
1949   PetscCall(MatHYPREGetParCSR_HYPRE(A,&parcsr));
1950   /* call HYPRE API */
1951   hnz = nz ? (HYPRE_Int)(*nz) : 0;
1952   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v);
1953   PetscFunctionReturn(0);
1954 }
1955 
1956 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1957 {
1958   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1959   PetscInt  i;
1960 
1961   PetscFunctionBegin;
1962   if (!m || !n) PetscFunctionReturn(0);
1963   /* Ignore negative row indices
1964    * And negative column indices should be automatically ignored in hypre
1965    * */
1966   for (i=0; i<m; i++) {
1967     if (idxm[i] >= 0) {
1968       HYPRE_Int hn = (HYPRE_Int)n;
1969       PetscStackCallStandard(HYPRE_IJMatrixGetValues,hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n));
1970     }
1971   }
1972   PetscFunctionReturn(0);
1973 }
1974 
1975 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
1976 {
1977   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1978 
1979   PetscFunctionBegin;
1980   switch (op) {
1981   case MAT_NO_OFF_PROC_ENTRIES:
1982     if (flg) {
1983       PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,hA->ij,0);
1984     }
1985     break;
1986   case MAT_SORTED_FULL:
1987     hA->sorted_full = flg;
1988     break;
1989   default:
1990     break;
1991   }
1992   PetscFunctionReturn(0);
1993 }
1994 
1995 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
1996 {
1997   PetscViewerFormat  format;
1998 
1999   PetscFunctionBegin;
2000   PetscCall(PetscViewerGetFormat(view,&format));
2001   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(0);
2002   if (format != PETSC_VIEWER_NATIVE) {
2003     Mat                B;
2004     hypre_ParCSRMatrix *parcsr;
2005     PetscErrorCode     (*mview)(Mat,PetscViewer) = NULL;
2006 
2007     PetscCall(MatHYPREGetParCSR_HYPRE(A,&parcsr));
2008     PetscCall(MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B));
2009     PetscCall(MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview));
2010     PetscCheck(mview,PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");
2011     PetscCall((*mview)(B,view));
2012     PetscCall(MatDestroy(&B));
2013   } else {
2014     Mat_HYPRE  *hA = (Mat_HYPRE*)A->data;
2015     PetscMPIInt size;
2016     PetscBool   isascii;
2017     const char *filename;
2018 
2019     /* HYPRE uses only text files */
2020     PetscCall(PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii));
2021     PetscCheck(isascii,PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name);
2022     PetscCall(PetscViewerFileGetName(view,&filename));
2023     PetscStackCallStandard(HYPRE_IJMatrixPrint,hA->ij,filename);
2024     PetscCallMPI(MPI_Comm_size(hA->comm,&size));
2025     if (size > 1) {
2026       PetscCall(PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1));
2027     } else {
2028       PetscCall(PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0));
2029     }
2030   }
2031   PetscFunctionReturn(0);
2032 }
2033 
2034 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B)
2035 {
2036   hypre_ParCSRMatrix *parcsr = NULL;
2037   PetscCopyMode      cpmode;
2038 
2039   PetscFunctionBegin;
2040   PetscCall(MatHYPREGetParCSR_HYPRE(A,&parcsr));
2041   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
2042     parcsr = hypre_ParCSRMatrixClone(parcsr,0);
2043     cpmode = PETSC_OWN_POINTER;
2044   } else {
2045     cpmode = PETSC_COPY_VALUES;
2046   }
2047   PetscCall(MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B));
2048   PetscFunctionReturn(0);
2049 }
2050 
2051 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
2052 {
2053   hypre_ParCSRMatrix *acsr,*bcsr;
2054 
2055   PetscFunctionBegin;
2056   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
2057     PetscCall(MatHYPREGetParCSR_HYPRE(A,&acsr));
2058     PetscCall(MatHYPREGetParCSR_HYPRE(B,&bcsr));
2059     PetscStackCallStandard(hypre_ParCSRMatrixCopy,acsr,bcsr,1);
2060     PetscCall(MatSetOption(B,MAT_SORTED_FULL,PETSC_TRUE)); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
2061     PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
2062     PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
2063   } else {
2064     PetscCall(MatCopy_Basic(A,B,str));
2065   }
2066   PetscFunctionReturn(0);
2067 }
2068 
2069 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
2070 {
2071   hypre_ParCSRMatrix *parcsr;
2072   hypre_CSRMatrix    *dmat;
2073   HYPRE_Complex      *a;
2074   HYPRE_Complex      *data = NULL;
2075   HYPRE_Int          *diag = NULL;
2076   PetscInt           i;
2077   PetscBool          cong;
2078 
2079   PetscFunctionBegin;
2080   PetscCall(MatHasCongruentLayouts(A,&cong));
2081   PetscCheck(cong,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns");
2082   if (PetscDefined(USE_DEBUG)) {
2083     PetscBool miss;
2084     PetscCall(MatMissingDiagonal(A,&miss,NULL));
2085     PetscCheckFalse(miss && A->rmap->n,PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing");
2086   }
2087   PetscCall(MatHYPREGetParCSR_HYPRE(A,&parcsr));
2088   dmat = hypre_ParCSRMatrixDiag(parcsr);
2089   if (dmat) {
2090     /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
2091     PetscCall(VecGetArray(d,(PetscScalar**)&a));
2092     diag = hypre_CSRMatrixI(dmat);
2093     data = hypre_CSRMatrixData(dmat);
2094     for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]];
2095     PetscCall(VecRestoreArray(d,(PetscScalar**)&a));
2096   }
2097   PetscFunctionReturn(0);
2098 }
2099 
2100 #include <petscblaslapack.h>
2101 
2102 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str)
2103 {
2104   PetscFunctionBegin;
2105 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2106   {
2107     Mat                B;
2108     hypre_ParCSRMatrix *x,*y,*z;
2109 
2110     PetscCall(MatHYPREGetParCSR(Y,&y));
2111     PetscCall(MatHYPREGetParCSR(X,&x));
2112     PetscStackCallStandard(hypre_ParCSRMatrixAdd,1.0,y,1.0,x,&z);
2113     PetscCall(MatCreateFromParCSR(z,MATHYPRE,PETSC_OWN_POINTER,&B));
2114     PetscCall(MatHeaderMerge(Y,&B));
2115   }
2116 #else
2117   if (str == SAME_NONZERO_PATTERN) {
2118     hypre_ParCSRMatrix *x,*y;
2119     hypre_CSRMatrix    *xloc,*yloc;
2120     PetscInt           xnnz,ynnz;
2121     HYPRE_Complex      *xarr,*yarr;
2122     PetscBLASInt       one=1,bnz;
2123 
2124     PetscCall(MatHYPREGetParCSR(Y,&y));
2125     PetscCall(MatHYPREGetParCSR(X,&x));
2126 
2127     /* diagonal block */
2128     xloc = hypre_ParCSRMatrixDiag(x);
2129     yloc = hypre_ParCSRMatrixDiag(y);
2130     xnnz = 0;
2131     ynnz = 0;
2132     xarr = NULL;
2133     yarr = NULL;
2134     if (xloc) {
2135       xarr = hypre_CSRMatrixData(xloc);
2136       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2137     }
2138     if (yloc) {
2139       yarr = hypre_CSRMatrixData(yloc);
2140       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2141     }
2142     PetscCheckFalse(xnnz != ynnz,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %" PetscInt_FMT " != %" PetscInt_FMT,xnnz,ynnz);
2143     PetscCall(PetscBLASIntCast(xnnz,&bnz));
2144     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
2145 
2146     /* off-diagonal block */
2147     xloc = hypre_ParCSRMatrixOffd(x);
2148     yloc = hypre_ParCSRMatrixOffd(y);
2149     xnnz = 0;
2150     ynnz = 0;
2151     xarr = NULL;
2152     yarr = NULL;
2153     if (xloc) {
2154       xarr = hypre_CSRMatrixData(xloc);
2155       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2156     }
2157     if (yloc) {
2158       yarr = hypre_CSRMatrixData(yloc);
2159       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2160     }
2161     PetscCheckFalse(xnnz != ynnz,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %" PetscInt_FMT " != %" PetscInt_FMT,xnnz,ynnz);
2162     PetscCall(PetscBLASIntCast(xnnz,&bnz));
2163     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
2164   } else if (str == SUBSET_NONZERO_PATTERN) {
2165     PetscCall(MatAXPY_Basic(Y,a,X,str));
2166   } else {
2167     Mat B;
2168 
2169     PetscCall(MatAXPY_Basic_Preallocate(Y,X,&B));
2170     PetscCall(MatAXPY_BasicWithPreallocation(B,Y,a,X,str));
2171     PetscCall(MatHeaderReplace(Y,&B));
2172   }
2173 #endif
2174   PetscFunctionReturn(0);
2175 }
2176 
2177 /*MC
2178    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2179           based on the hypre IJ interface.
2180 
2181    Level: intermediate
2182 
2183 .seealso: MatCreate()
2184 M*/
2185 
2186 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2187 {
2188   Mat_HYPRE      *hB;
2189 
2190   PetscFunctionBegin;
2191   PetscCall(PetscNewLog(B,&hB));
2192 
2193   hB->inner_free  = PETSC_TRUE;
2194   hB->available   = PETSC_TRUE;
2195   hB->sorted_full = PETSC_FALSE; /* no assumption whether column indices are sorted or not */
2196   hB->size        = 0;
2197   hB->array       = NULL;
2198 
2199   B->data       = (void*)hB;
2200   B->assembled  = PETSC_FALSE;
2201 
2202   PetscCall(PetscMemzero(B->ops,sizeof(struct _MatOps)));
2203   B->ops->mult                  = MatMult_HYPRE;
2204   B->ops->multtranspose         = MatMultTranspose_HYPRE;
2205   B->ops->multadd               = MatMultAdd_HYPRE;
2206   B->ops->multtransposeadd      = MatMultTransposeAdd_HYPRE;
2207   B->ops->setup                 = MatSetUp_HYPRE;
2208   B->ops->destroy               = MatDestroy_HYPRE;
2209   B->ops->assemblyend           = MatAssemblyEnd_HYPRE;
2210   B->ops->assemblybegin         = MatAssemblyBegin_HYPRE;
2211   B->ops->setvalues             = MatSetValues_HYPRE;
2212   B->ops->missingdiagonal       = MatMissingDiagonal_HYPRE;
2213   B->ops->scale                 = MatScale_HYPRE;
2214   B->ops->zerorowscolumns       = MatZeroRowsColumns_HYPRE;
2215   B->ops->zeroentries           = MatZeroEntries_HYPRE;
2216   B->ops->zerorows              = MatZeroRows_HYPRE;
2217   B->ops->getrow                = MatGetRow_HYPRE;
2218   B->ops->restorerow            = MatRestoreRow_HYPRE;
2219   B->ops->getvalues             = MatGetValues_HYPRE;
2220   B->ops->setoption             = MatSetOption_HYPRE;
2221   B->ops->duplicate             = MatDuplicate_HYPRE;
2222   B->ops->copy                  = MatCopy_HYPRE;
2223   B->ops->view                  = MatView_HYPRE;
2224   B->ops->getdiagonal           = MatGetDiagonal_HYPRE;
2225   B->ops->axpy                  = MatAXPY_HYPRE;
2226   B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE;
2227 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2228   B->ops->bindtocpu             = MatBindToCPU_HYPRE;
2229   B->boundtocpu                 = PETSC_FALSE;
2230 #endif
2231 
2232   /* build cache for off array entries formed */
2233   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash));
2234 
2235   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)B),&hB->comm));
2236   PetscCall(PetscObjectChangeTypeName((PetscObject)B,MATHYPRE));
2237   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ));
2238   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS));
2239   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_seqaij_hypre_C",MatProductSetFromOptions_HYPRE));
2240   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_mpiaij_hypre_C",MatProductSetFromOptions_HYPRE));
2241   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE));
2242   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE));
2243 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2244 #if defined(HYPRE_USING_HIP)
2245   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2246   PetscCall(MatSetVecType(B,VECHIP));
2247 #endif
2248 #if defined(HYPRE_USING_CUDA)
2249   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2250   PetscCall(MatSetVecType(B,VECCUDA));
2251 #endif
2252 #endif
2253   PetscFunctionReturn(0);
2254 }
2255 
2256 static PetscErrorCode hypre_array_destroy(void *ptr)
2257 {
2258    PetscFunctionBegin;
2259    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
2260    PetscFunctionReturn(0);
2261 }
2262