xref: /petsc/src/mat/impls/hypre/mhypre.c (revision ccb4e88a40f0b86eaeca07ff64c64e4de2fae686)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 #include <petscpkg_version.h>
7 #include <petsc/private/petschypre.h>
8 #include <petscmathypre.h>
9 #include <petsc/private/matimpl.h>
10 #include <petsc/private/deviceimpl.h>
11 #include <../src/mat/impls/hypre/mhypre.h>
12 #include <../src/mat/impls/aij/mpi/mpiaij.h>
13 #include <../src/vec/vec/impls/hypre/vhyp.h>
14 #include <HYPRE.h>
15 #include <HYPRE_utilities.h>
16 #include <_hypre_parcsr_ls.h>
17 #include <_hypre_sstruct_ls.h>
18 
19 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0)
20 #define  hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A)
21 #endif
22 
23 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
24 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
25 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
27 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool);
28 static PetscErrorCode hypre_array_destroy(void*);
29 static PetscErrorCode MatSetValues_HYPRE(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);
30 
31 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
32 {
33   PetscErrorCode ierr;
34   PetscInt       i,n_d,n_o;
35   const PetscInt *ia_d,*ia_o;
36   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
37   HYPRE_Int      *nnz_d=NULL,*nnz_o=NULL;
38 
39   PetscFunctionBegin;
40   if (A_d) { /* determine number of nonzero entries in local diagonal part */
41     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
42     if (done_d) {
43       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
44       for (i=0; i<n_d; i++) {
45         nnz_d[i] = ia_d[i+1] - ia_d[i];
46       }
47     }
48     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
49   }
50   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
51     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
52     if (done_o) {
53       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
54       for (i=0; i<n_o; i++) {
55         nnz_o[i] = ia_o[i+1] - ia_o[i];
56       }
57     }
58     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
59   }
60   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
61     if (!done_o) { /* only diagonal part */
62       ierr = PetscCalloc1(n_d,&nnz_o);CHKERRQ(ierr);
63     }
64 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
65     { /* If we don't do this, the columns of the matrix will be all zeros! */
66       hypre_AuxParCSRMatrix *aux_matrix;
67       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
68       hypre_AuxParCSRMatrixDestroy(aux_matrix);
69       hypre_IJMatrixTranslator(ij) = NULL;
70       PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
71       /* it seems they partially fixed it in 2.19.0 */
72 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
73       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
74       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
75 #endif
76     }
77 #else
78     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
79 #endif
80     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
81     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
82   }
83   PetscFunctionReturn(0);
84 }
85 
86 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
87 {
88   PetscErrorCode ierr;
89   PetscInt       rstart,rend,cstart,cend;
90 
91   PetscFunctionBegin;
92   ierr   = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
93   ierr   = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
94   rstart = A->rmap->rstart;
95   rend   = A->rmap->rend;
96   cstart = A->cmap->rstart;
97   cend   = A->cmap->rend;
98   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
99   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
100   {
101     PetscBool      same;
102     Mat            A_d,A_o;
103     const PetscInt *colmap;
104     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
105     if (same) {
106       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
107       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
108       PetscFunctionReturn(0);
109     }
110     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
111     if (same) {
112       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
113       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
114       PetscFunctionReturn(0);
115     }
116     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
117     if (same) {
118       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
119       PetscFunctionReturn(0);
120     }
121     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
122     if (same) {
123       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
124       PetscFunctionReturn(0);
125     }
126   }
127   PetscFunctionReturn(0);
128 }
129 
130 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
131 {
132   PetscErrorCode    ierr;
133   PetscInt          i,rstart,rend,ncols,nr,nc;
134   const PetscScalar *values;
135   const PetscInt    *cols;
136   PetscBool         flg;
137 
138   PetscFunctionBegin;
139 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
140   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
141 #else
142   PetscStackCallStandard(HYPRE_IJMatrixInitialize_v2,(ij,HYPRE_MEMORY_HOST));
143 #endif
144   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
145   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
146   if (flg && nr == nc) {
147     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
148     PetscFunctionReturn(0);
149   }
150   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
151   if (flg) {
152     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
153     PetscFunctionReturn(0);
154   }
155 
156   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
157   for (i=rstart; i<rend; i++) {
158     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
159     if (ncols) {
160       HYPRE_Int nc = (HYPRE_Int)ncols;
161 
162       if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i);
163       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values));
164     }
165     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
166   }
167   PetscFunctionReturn(0);
168 }
169 
170 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
171 {
172   PetscErrorCode        ierr;
173   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
174   HYPRE_Int             type;
175   hypre_ParCSRMatrix    *par_matrix;
176   hypre_AuxParCSRMatrix *aux_matrix;
177   hypre_CSRMatrix       *hdiag;
178   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
179   const PetscScalar     *pa;
180 
181   PetscFunctionBegin;
182   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
183   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
184   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
185   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
186   /*
187        this is the Hack part where we monkey directly with the hypre datastructures
188   */
189   if (sameint) {
190     ierr = PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);CHKERRQ(ierr);
191     ierr = PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);CHKERRQ(ierr);
192   } else {
193     PetscInt i;
194 
195     for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
196     for (i=0;i<pdiag->nz;i++)      hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
197   }
198 
199   ierr = MatSeqAIJGetArrayRead(A,&pa);CHKERRQ(ierr);
200   ierr = PetscArraycpy(hdiag->data,pa,pdiag->nz);CHKERRQ(ierr);
201   ierr = MatSeqAIJRestoreArrayRead(A,&pa);CHKERRQ(ierr);
202 
203   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
204   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
205   PetscFunctionReturn(0);
206 }
207 
208 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
209 {
210   PetscErrorCode        ierr;
211   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
212   Mat_SeqAIJ            *pdiag,*poffd;
213   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
214   HYPRE_Int             *hjj,type;
215   hypre_ParCSRMatrix    *par_matrix;
216   hypre_AuxParCSRMatrix *aux_matrix;
217   hypre_CSRMatrix       *hdiag,*hoffd;
218   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
219   const PetscScalar     *pa;
220 
221   PetscFunctionBegin;
222   pdiag = (Mat_SeqAIJ*) pA->A->data;
223   poffd = (Mat_SeqAIJ*) pA->B->data;
224   /* cstart is only valid for square MPIAIJ layed out in the usual way */
225   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
226 
227   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
228   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
229   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
230   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
231   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
232 
233   /*
234        this is the Hack part where we monkey directly with the hypre datastructures
235   */
236   if (sameint) {
237     ierr = PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);CHKERRQ(ierr);
238   } else {
239     for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]);
240   }
241   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
242   hjj = hdiag->j;
243   pjj = pdiag->j;
244 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
245   for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i];
246 #else
247   for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i];
248 #endif
249   ierr = MatSeqAIJGetArrayRead(pA->A,&pa);CHKERRQ(ierr);
250   ierr = PetscArraycpy(hdiag->data,pa,pdiag->nz);CHKERRQ(ierr);
251   ierr = MatSeqAIJRestoreArrayRead(pA->A,&pa);CHKERRQ(ierr);
252   if (sameint) {
253     ierr = PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);CHKERRQ(ierr);
254   } else {
255     for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]);
256   }
257 
258   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
259      If we hacked a hypre a bit more we might be able to avoid this step */
260 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
261   PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd));
262   jj  = (PetscInt*) hoffd->big_j;
263 #else
264   jj  = (PetscInt*) hoffd->j;
265 #endif
266   pjj = poffd->j;
267   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
268 
269   ierr = MatSeqAIJGetArrayRead(pA->B,&pa);CHKERRQ(ierr);
270   ierr = PetscArraycpy(hoffd->data,pa,poffd->nz);CHKERRQ(ierr);
271   ierr = MatSeqAIJRestoreArrayRead(pA->B,&pa);CHKERRQ(ierr);
272 
273   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
274   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
275   PetscFunctionReturn(0);
276 }
277 
278 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
279 {
280   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
281   Mat                    lA;
282   ISLocalToGlobalMapping rl2g,cl2g;
283   IS                     is;
284   hypre_ParCSRMatrix     *hA;
285   hypre_CSRMatrix        *hdiag,*hoffd;
286   MPI_Comm               comm;
287   HYPRE_Complex          *hdd,*hod,*aa;
288   PetscScalar            *data;
289   HYPRE_BigInt           *col_map_offd;
290   HYPRE_Int              *hdi,*hdj,*hoi,*hoj;
291   PetscInt               *ii,*jj,*iptr,*jptr;
292   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
293   HYPRE_Int              type;
294   PetscErrorCode         ierr;
295 
296   PetscFunctionBegin;
297   comm = PetscObjectComm((PetscObject)A);
298   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
299   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
300   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
301   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
302   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
303   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
304   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
305   hdiag = hypre_ParCSRMatrixDiag(hA);
306   hoffd = hypre_ParCSRMatrixOffd(hA);
307   dr    = hypre_CSRMatrixNumRows(hdiag);
308   dc    = hypre_CSRMatrixNumCols(hdiag);
309   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
310   hdi   = hypre_CSRMatrixI(hdiag);
311   hdj   = hypre_CSRMatrixJ(hdiag);
312   hdd   = hypre_CSRMatrixData(hdiag);
313   oc    = hypre_CSRMatrixNumCols(hoffd);
314   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
315   hoi   = hypre_CSRMatrixI(hoffd);
316   hoj   = hypre_CSRMatrixJ(hoffd);
317   hod   = hypre_CSRMatrixData(hoffd);
318   if (reuse != MAT_REUSE_MATRIX) {
319     PetscInt *aux;
320 
321     /* generate l2g maps for rows and cols */
322     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
323     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
324     ierr = ISDestroy(&is);CHKERRQ(ierr);
325     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
326     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
327     for (i=0; i<dc; i++) aux[i] = i+stc;
328     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
329     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
330     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
331     ierr = ISDestroy(&is);CHKERRQ(ierr);
332     /* create MATIS object */
333     ierr = MatCreate(comm,B);CHKERRQ(ierr);
334     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
335     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
336     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
337     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
338     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
339 
340     /* allocate CSR for local matrix */
341     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
342     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
343     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
344   } else {
345     PetscInt  nr;
346     PetscBool done;
347     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
348     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
349     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
350     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
351     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
352   }
353   /* merge local matrices */
354   ii  = iptr;
355   jj  = jptr;
356   aa  = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
357   *ii = *(hdi++) + *(hoi++);
358   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
359     PetscScalar *aold = (PetscScalar*)aa;
360     PetscInt    *jold = jj,nc = jd+jo;
361     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
362     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
363     *(++ii) = *(hdi++) + *(hoi++);
364     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
365   }
366   for (; cum<dr; cum++) *(++ii) = nnz;
367   if (reuse != MAT_REUSE_MATRIX) {
368     Mat_SeqAIJ* a;
369 
370     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
371     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
372     /* hack SeqAIJ */
373     a          = (Mat_SeqAIJ*)(lA->data);
374     a->free_a  = PETSC_TRUE;
375     a->free_ij = PETSC_TRUE;
376     ierr = MatDestroy(&lA);CHKERRQ(ierr);
377   }
378   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
379   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
380   if (reuse == MAT_INPLACE_MATRIX) {
381     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
382   }
383   PetscFunctionReturn(0);
384 }
385 
386 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
387 {
388   Mat            M = NULL;
389   Mat_HYPRE      *hB;
390   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
391   PetscErrorCode ierr;
392 
393   PetscFunctionBegin;
394   if (reuse == MAT_REUSE_MATRIX) {
395     /* always destroy the old matrix and create a new memory;
396        hope this does not churn the memory too much. The problem
397        is I do not know if it is possible to put the matrix back to
398        its initial state so that we can directly copy the values
399        the second time through. */
400     hB = (Mat_HYPRE*)((*B)->data);
401     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
402   } else {
403     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
404     ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr);
405     ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
406     hB   = (Mat_HYPRE*)(M->data);
407     if (reuse == MAT_INITIAL_MATRIX) *B = M;
408   }
409   ierr = MatSetOption(*B,MAT_SORTED_FULL,PETSC_TRUE);CHKERRQ(ierr); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
410   ierr = MatSetOption(*B,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
411   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
412   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
413   if (reuse == MAT_INPLACE_MATRIX) {
414     ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr);
415   }
416   (*B)->preallocated = PETSC_TRUE;
417   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
418   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
419   PetscFunctionReturn(0);
420 }
421 
422 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
423 {
424   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
425   hypre_ParCSRMatrix *parcsr;
426   hypre_CSRMatrix    *hdiag,*hoffd;
427   MPI_Comm           comm;
428   PetscScalar        *da,*oa,*aptr;
429   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
430   PetscInt           i,dnnz,onnz,m,n;
431   HYPRE_Int          type;
432   PetscMPIInt        size;
433   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
434   PetscErrorCode     ierr;
435 
436   PetscFunctionBegin;
437   comm = PetscObjectComm((PetscObject)A);
438   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
439   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
440   if (reuse == MAT_REUSE_MATRIX) {
441     PetscBool ismpiaij,isseqaij;
442     ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
443     ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
444     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
445   }
446 #if defined(PETSC_HAVE_HYPRE_DEVICE)
447   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) SETERRQ(comm,PETSC_ERR_SUP,"Not yet implemented");
448 #endif
449   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
450 
451   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
452   hdiag = hypre_ParCSRMatrixDiag(parcsr);
453   hoffd = hypre_ParCSRMatrixOffd(parcsr);
454   m     = hypre_CSRMatrixNumRows(hdiag);
455   n     = hypre_CSRMatrixNumCols(hdiag);
456   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
457   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
458   if (reuse == MAT_INITIAL_MATRIX) {
459     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
460     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
461     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
462   } else if (reuse == MAT_REUSE_MATRIX) {
463     PetscInt  nr;
464     PetscBool done;
465     if (size > 1) {
466       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
467 
468       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
469       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
470       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
471       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
472     } else {
473       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
474       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
475       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
476       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
477     }
478   } else { /* MAT_INPLACE_MATRIX */
479     if (!sameint) {
480       ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
481       ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
482     } else {
483       dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
484       djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
485     }
486     da = (PetscScalar*)hypre_CSRMatrixData(hdiag);
487   }
488 
489   if (!sameint) {
490     for (i=0;i<m+1;i++)  dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]);
491     for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]);
492   } else {
493     ierr = PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1);CHKERRQ(ierr);
494     ierr = PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);CHKERRQ(ierr);
495   }
496   ierr = PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);CHKERRQ(ierr);
497   iptr = djj;
498   aptr = da;
499   for (i=0; i<m; i++) {
500     PetscInt nc = dii[i+1]-dii[i];
501     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
502     iptr += nc;
503     aptr += nc;
504   }
505   if (size > 1) {
506     HYPRE_BigInt *coffd;
507     HYPRE_Int    *offdj;
508 
509     if (reuse == MAT_INITIAL_MATRIX) {
510       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
511       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
512       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
513     } else if (reuse == MAT_REUSE_MATRIX) {
514       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
515       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
516       PetscBool  done;
517 
518       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
519       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
520       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
521       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
522     } else { /* MAT_INPLACE_MATRIX */
523       if (!sameint) {
524         ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
525         ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
526       } else {
527         oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
528         ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
529       }
530       oa = (PetscScalar*)hypre_CSRMatrixData(hoffd);
531     }
532     if (!sameint) {
533       for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]);
534     } else {
535       ierr = PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);CHKERRQ(ierr);
536     }
537     offdj = hypre_CSRMatrixJ(hoffd);
538     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
539     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
540     ierr = PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);CHKERRQ(ierr);
541     iptr = ojj;
542     aptr = oa;
543     for (i=0; i<m; i++) {
544        PetscInt nc = oii[i+1]-oii[i];
545        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
546        iptr += nc;
547        aptr += nc;
548     }
549     if (reuse == MAT_INITIAL_MATRIX) {
550       Mat_MPIAIJ *b;
551       Mat_SeqAIJ *d,*o;
552 
553       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
554       /* hack MPIAIJ */
555       b          = (Mat_MPIAIJ*)((*B)->data);
556       d          = (Mat_SeqAIJ*)b->A->data;
557       o          = (Mat_SeqAIJ*)b->B->data;
558       d->free_a  = PETSC_TRUE;
559       d->free_ij = PETSC_TRUE;
560       o->free_a  = PETSC_TRUE;
561       o->free_ij = PETSC_TRUE;
562     } else if (reuse == MAT_INPLACE_MATRIX) {
563       Mat T;
564 
565       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
566       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
567         hypre_CSRMatrixI(hdiag) = NULL;
568         hypre_CSRMatrixJ(hdiag) = NULL;
569         hypre_CSRMatrixI(hoffd) = NULL;
570         hypre_CSRMatrixJ(hoffd) = NULL;
571       } else { /* Hack MPIAIJ -> free ij but not a */
572         Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data);
573         Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data);
574         Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data);
575 
576         d->free_ij = PETSC_TRUE;
577         o->free_ij = PETSC_TRUE;
578       }
579       hypre_CSRMatrixData(hdiag) = NULL;
580       hypre_CSRMatrixData(hoffd) = NULL;
581       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
582     }
583   } else {
584     oii  = NULL;
585     ojj  = NULL;
586     oa   = NULL;
587     if (reuse == MAT_INITIAL_MATRIX) {
588       Mat_SeqAIJ* b;
589 
590       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
591       /* hack SeqAIJ */
592       b          = (Mat_SeqAIJ*)((*B)->data);
593       b->free_a  = PETSC_TRUE;
594       b->free_ij = PETSC_TRUE;
595     } else if (reuse == MAT_INPLACE_MATRIX) {
596       Mat T;
597 
598       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
599       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
600         hypre_CSRMatrixI(hdiag) = NULL;
601         hypre_CSRMatrixJ(hdiag) = NULL;
602       } else { /* free ij but not a */
603         Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data);
604 
605         b->free_ij = PETSC_TRUE;
606       }
607       hypre_CSRMatrixData(hdiag) = NULL;
608       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
609     }
610   }
611 
612   /* we have to use hypre_Tfree to free the HYPRE arrays
613      that PETSc now onws */
614   if (reuse == MAT_INPLACE_MATRIX) {
615     PetscInt nh;
616     void *ptrs[6] = {da,oa,dii,djj,oii,ojj};
617     const char *names[6] = {"_hypre_csr_da",
618                             "_hypre_csr_oa",
619                             "_hypre_csr_dii",
620                             "_hypre_csr_djj",
621                             "_hypre_csr_oii",
622                             "_hypre_csr_ojj"};
623     nh = sameint ? 6 : 2;
624     for (i=0; i<nh; i++) {
625       PetscContainer c;
626 
627       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
628       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
629       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
630       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
631       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
632     }
633   }
634   PetscFunctionReturn(0);
635 }
636 
637 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
638 {
639   hypre_ParCSRMatrix *tA;
640   hypre_CSRMatrix    *hdiag,*hoffd;
641   Mat_SeqAIJ         *diag,*offd;
642   PetscInt           *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts;
643   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
644   PetscBool          ismpiaij,isseqaij;
645   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
646   PetscErrorCode     ierr;
647   HYPRE_Int          *hdi = NULL,*hdj = NULL,*hoi = NULL,*hoj = NULL;
648   PetscInt           *pdi,*pdj,*poi,*poj;
649 #if defined(PETSC_HAVE_HYPRE_DEVICE)
650   PetscBool          iscuda = PETSC_FALSE;
651 #endif
652 
653   PetscFunctionBegin;
654   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
655   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
656   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name);
657   if (ismpiaij) {
658     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
659 
660     diag = (Mat_SeqAIJ*)a->A->data;
661     offd = (Mat_SeqAIJ*)a->B->data;
662 #if defined(PETSC_HAVE_CUDA) && defined(PETSC_HAVE_HYPRE_DEVICE) && defined(HYPRE_USING_CUDA)
663     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJCUSPARSE,&iscuda);CHKERRQ(ierr);
664     if (iscuda && !A->boundtocpu) {
665       sameint = PETSC_TRUE;
666       ierr = MatSeqAIJCUSPARSEGetIJ(a->A,PETSC_FALSE,(const HYPRE_Int**)&hdi,(const HYPRE_Int**)&hdj);CHKERRQ(ierr);
667       ierr = MatSeqAIJCUSPARSEGetIJ(a->B,PETSC_FALSE,(const HYPRE_Int**)&hoi,(const HYPRE_Int**)&hoj);CHKERRQ(ierr);
668     } else {
669 #else
670     {
671 #endif
672       pdi = diag->i;
673       pdj = diag->j;
674       poi = offd->i;
675       poj = offd->j;
676       if (sameint) {
677         hdi = (HYPRE_Int*)pdi;
678         hdj = (HYPRE_Int*)pdj;
679         hoi = (HYPRE_Int*)poi;
680         hoj = (HYPRE_Int*)poj;
681       }
682     }
683     garray = a->garray;
684     noffd  = a->B->cmap->N;
685     dnnz   = diag->nz;
686     onnz   = offd->nz;
687   } else {
688     diag = (Mat_SeqAIJ*)A->data;
689     offd = NULL;
690 #if defined(PETSC_HAVE_CUDA) && defined(PETSC_HAVE_HYPRE_DEVICE)
691     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJCUSPARSE,&iscuda);CHKERRQ(ierr);
692     if (iscuda && !A->boundtocpu) {
693       sameint = PETSC_TRUE;
694       ierr = MatSeqAIJCUSPARSEGetIJ(A,PETSC_FALSE,(const HYPRE_Int**)&hdi,(const HYPRE_Int**)&hdj);CHKERRQ(ierr);
695     } else {
696 #else
697     {
698 #endif
699       pdi = diag->i;
700       pdj = diag->j;
701       if (sameint) {
702         hdi = (HYPRE_Int*)pdi;
703         hdj = (HYPRE_Int*)pdj;
704       }
705     }
706     garray = NULL;
707     noffd  = 0;
708     dnnz   = diag->nz;
709     onnz   = 0;
710   }
711 
712   /* create a temporary ParCSR */
713   if (HYPRE_AssumedPartitionCheck()) {
714     PetscMPIInt myid;
715 
716     ierr       = MPI_Comm_rank(comm,&myid);CHKERRMPI(ierr);
717     row_starts = A->rmap->range + myid;
718     col_starts = A->cmap->range + myid;
719   } else {
720     row_starts = A->rmap->range;
721     col_starts = A->cmap->range;
722   }
723   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz);
724 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
725   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
726   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
727 #endif
728 
729   /* set diagonal part */
730   hdiag = hypre_ParCSRMatrixDiag(tA);
731   if (!sameint) { /* malloc CSR pointers */
732     ierr = PetscMalloc2(A->rmap->n+1,&hdi,dnnz,&hdj);CHKERRQ(ierr);
733     for (i = 0; i < A->rmap->n+1; i++) hdi[i] = (HYPRE_Int)(pdi[i]);
734     for (i = 0; i < dnnz; i++)         hdj[i] = (HYPRE_Int)(pdj[i]);
735   }
736   hypre_CSRMatrixI(hdiag)           = hdi;
737   hypre_CSRMatrixJ(hdiag)           = hdj;
738   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex*)diag->a;
739   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
740   hypre_CSRMatrixSetRownnz(hdiag);
741   hypre_CSRMatrixSetDataOwner(hdiag,0);
742 
743   /* set offdiagonal part */
744   hoffd = hypre_ParCSRMatrixOffd(tA);
745   if (offd) {
746     if (!sameint) { /* malloc CSR pointers */
747       ierr = PetscMalloc2(A->rmap->n+1,&hoi,onnz,&hoj);CHKERRQ(ierr);
748       for (i = 0; i < A->rmap->n+1; i++) hoi[i] = (HYPRE_Int)(poi[i]);
749       for (i = 0; i < onnz; i++)         hoj[i] = (HYPRE_Int)(poj[i]);
750     }
751     hypre_CSRMatrixI(hoffd)           = hoi;
752     hypre_CSRMatrixJ(hoffd)           = hoj;
753     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex*)offd->a;
754     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
755     hypre_CSRMatrixSetRownnz(hoffd);
756     hypre_CSRMatrixSetDataOwner(hoffd,0);
757   }
758 #if defined(PETSC_HAVE_HYPRE_DEVICE)
759   PetscStackCallStandard(hypre_ParCSRMatrixInitialize_v2,(tA,iscuda ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST));
760 #else
761 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0)
762   PetscStackCallStandard(hypre_ParCSRMatrixInitialize,(tA));
763 #else
764   PetscStackCallStandard(hypre_ParCSRMatrixInitialize_v2,(tA,HYPRE_MEMORY_HOST));
765 #endif
766 #endif
767   hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA),HYPRE_MEMORY_HOST);
768   hypre_ParCSRMatrixSetNumNonzeros(tA);
769   hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray;
770   if (!hypre_ParCSRMatrixCommPkg(tA)) PetscStackCallStandard(hypre_MatvecCommPkgCreate,(tA));
771   *hA = tA;
772   PetscFunctionReturn(0);
773 }
774 
775 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
776 {
777   hypre_CSRMatrix *hdiag,*hoffd;
778   PetscBool       ismpiaij,sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
779   PetscErrorCode  ierr;
780 #if defined(PETSC_HAVE_HYPRE_DEVICE)
781   PetscBool       iscuda = PETSC_FALSE;
782 #endif
783 
784   PetscFunctionBegin;
785   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
786 #if defined(PETSC_HAVE_HYPRE_DEVICE)
787   ierr = PetscObjectTypeCompareAny((PetscObject)A,&iscuda,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,"");CHKERRQ(ierr);
788   if (iscuda) sameint = PETSC_TRUE;
789 #endif
790   hdiag = hypre_ParCSRMatrixDiag(*hA);
791   hoffd = hypre_ParCSRMatrixOffd(*hA);
792   /* free temporary memory allocated by PETSc
793      set pointers to NULL before destroying tA */
794   if (!sameint) {
795     HYPRE_Int *hi,*hj;
796 
797     hi = hypre_CSRMatrixI(hdiag);
798     hj = hypre_CSRMatrixJ(hdiag);
799     ierr = PetscFree2(hi,hj);CHKERRQ(ierr);
800     if (ismpiaij) {
801       hi = hypre_CSRMatrixI(hoffd);
802       hj = hypre_CSRMatrixJ(hoffd);
803       ierr = PetscFree2(hi,hj);CHKERRQ(ierr);
804     }
805   }
806   hypre_CSRMatrixI(hdiag)    = NULL;
807   hypre_CSRMatrixJ(hdiag)    = NULL;
808   hypre_CSRMatrixData(hdiag) = NULL;
809   if (ismpiaij) {
810     hypre_CSRMatrixI(hoffd)    = NULL;
811     hypre_CSRMatrixJ(hoffd)    = NULL;
812     hypre_CSRMatrixData(hoffd) = NULL;
813   }
814   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
815   hypre_ParCSRMatrixDestroy(*hA);
816   *hA = NULL;
817   PetscFunctionReturn(0);
818 }
819 
820 /* calls RAP from BoomerAMG:
821    the resulting ParCSR will not own the column and row starts
822    It looks like we don't need to have the diagonal entries ordered first */
823 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
824 {
825 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
826   HYPRE_Int P_owns_col_starts,R_owns_row_starts;
827 #endif
828 
829   PetscFunctionBegin;
830 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
831   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
832   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
833 #endif
834   /* can be replaced by version test later */
835 #if defined(PETSC_HAVE_HYPRE_DEVICE)
836   PetscStackPush("hypre_ParCSRMatrixRAP");
837   *hRAP = hypre_ParCSRMatrixRAP(hR,hA,hP);
838   PetscStackPop;
839 #else
840   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
841   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
842 #endif
843   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
844 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
845   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
846   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
847   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
848   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
849 #endif
850   PetscFunctionReturn(0);
851 }
852 
853 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
854 {
855   Mat                B;
856   hypre_ParCSRMatrix *hA,*hP,*hPtAP = NULL;
857   PetscErrorCode     ierr;
858   Mat_Product        *product=C->product;
859 
860   PetscFunctionBegin;
861   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
862   ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr);
863   ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr);
864   ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
865 
866   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
867   C->product = product;
868 
869   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
870   ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr);
871   PetscFunctionReturn(0);
872 }
873 
874 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat C)
875 {
876   PetscErrorCode ierr;
877 
878   PetscFunctionBegin;
879   ierr                   = MatSetType(C,MATAIJ);CHKERRQ(ierr);
880   C->ops->ptapnumeric    = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
881   C->ops->productnumeric = MatProductNumeric_PtAP;
882   PetscFunctionReturn(0);
883 }
884 
885 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
886 {
887   Mat                B;
888   Mat_HYPRE          *hP;
889   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr = NULL;
890   HYPRE_Int          type;
891   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
892   PetscBool          ishypre;
893   PetscErrorCode     ierr;
894 
895   PetscFunctionBegin;
896   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
897   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
898   hP = (Mat_HYPRE*)P->data;
899   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
900   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
901   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
902 
903   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
904   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
905   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
906 
907   /* create temporary matrix and merge to C */
908   ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
909   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
910   PetscFunctionReturn(0);
911 }
912 
913 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
914 {
915   Mat                B;
916   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr = NULL;
917   Mat_HYPRE          *hA,*hP;
918   PetscBool          ishypre;
919   HYPRE_Int          type;
920   PetscErrorCode     ierr;
921 
922   PetscFunctionBegin;
923   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
924   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
925   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
926   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
927   hA = (Mat_HYPRE*)A->data;
928   hP = (Mat_HYPRE*)P->data;
929   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
930   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
931   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
932   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
933   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
934   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
935   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
936   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
937   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
938   PetscFunctionReturn(0);
939 }
940 
941 /* calls hypre_ParMatmul
942    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
943    hypre_ParMatrixCreate does not duplicate the communicator
944    It looks like we don't need to have the diagonal entries ordered first */
945 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
946 {
947   PetscFunctionBegin;
948   /* can be replaced by version test later */
949 #if defined(PETSC_HAVE_HYPRE_DEVICE)
950   PetscStackPush("hypre_ParCSRMatMat");
951   *hAB = hypre_ParCSRMatMat(hA,hB);
952 #else
953   PetscStackPush("hypre_ParMatmul");
954   *hAB = hypre_ParMatmul(hA,hB);
955 #endif
956   PetscStackPop;
957   PetscFunctionReturn(0);
958 }
959 
960 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
961 {
962   Mat                D;
963   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
964   PetscErrorCode     ierr;
965   Mat_Product        *product=C->product;
966 
967   PetscFunctionBegin;
968   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
969   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
970   ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr);
971   ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
972 
973   ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr);
974   C->product = product;
975 
976   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
977   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
978   PetscFunctionReturn(0);
979 }
980 
981 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat C)
982 {
983   PetscErrorCode ierr;
984 
985   PetscFunctionBegin;
986   ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr);
987   C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
988   C->ops->productnumeric = MatProductNumeric_AB;
989   PetscFunctionReturn(0);
990 }
991 
992 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
993 {
994   Mat                D;
995   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
996   Mat_HYPRE          *hA,*hB;
997   PetscBool          ishypre;
998   HYPRE_Int          type;
999   PetscErrorCode     ierr;
1000   Mat_Product        *product;
1001 
1002   PetscFunctionBegin;
1003   ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr);
1004   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
1005   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
1006   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
1007   hA = (Mat_HYPRE*)A->data;
1008   hB = (Mat_HYPRE*)B->data;
1009   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1010   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
1011   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
1012   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
1013   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
1014   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
1015   ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr);
1016   ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
1017 
1018   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
1019   product    = C->product;  /* save it from MatHeaderReplace() */
1020   C->product = NULL;
1021   ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr);
1022   C->product = product;
1023   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
1024   C->ops->productnumeric = MatProductNumeric_AB;
1025   PetscFunctionReturn(0);
1026 }
1027 
1028 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
1029 {
1030   Mat                E;
1031   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC = NULL;
1032   PetscErrorCode     ierr;
1033 
1034   PetscFunctionBegin;
1035   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
1036   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
1037   ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr);
1038   ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr);
1039   ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr);
1040   ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr);
1041   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
1042   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
1043   ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr);
1044   PetscFunctionReturn(0);
1045 }
1046 
1047 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat D)
1048 {
1049   PetscErrorCode ierr;
1050 
1051   PetscFunctionBegin;
1052   ierr = MatSetType(D,MATAIJ);CHKERRQ(ierr);
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 /* ---------------------------------------------------- */
1057 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C)
1058 {
1059   PetscFunctionBegin;
1060   C->ops->productnumeric = MatProductNumeric_AB;
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C)
1065 {
1066   PetscErrorCode ierr;
1067   Mat_Product    *product = C->product;
1068   PetscBool      Ahypre;
1069 
1070   PetscFunctionBegin;
1071   ierr = PetscObjectTypeCompare((PetscObject)product->A,MATHYPRE,&Ahypre);CHKERRQ(ierr);
1072   if (Ahypre) { /* A is a Hypre matrix */
1073     ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr);
1074     C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE;
1075     C->ops->matmultnumeric  = MatMatMultNumeric_HYPRE_HYPRE;
1076     PetscFunctionReturn(0);
1077   }
1078   PetscFunctionReturn(0);
1079 }
1080 
1081 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C)
1082 {
1083   PetscFunctionBegin;
1084   C->ops->productnumeric = MatProductNumeric_PtAP;
1085   PetscFunctionReturn(0);
1086 }
1087 
1088 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C)
1089 {
1090   PetscErrorCode ierr;
1091   Mat_Product    *product = C->product;
1092   PetscBool      flg;
1093   PetscInt       type = 0;
1094   const char     *outTypes[4] = {"aij","seqaij","mpiaij","hypre"};
1095   PetscInt       ntype = 4;
1096   Mat            A = product->A;
1097   PetscBool      Ahypre;
1098 
1099   PetscFunctionBegin;
1100   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&Ahypre);CHKERRQ(ierr);
1101   if (Ahypre) { /* A is a Hypre matrix */
1102     ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr);
1103     C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1104     C->ops->ptapnumeric     = MatPtAPNumeric_HYPRE_HYPRE;
1105     PetscFunctionReturn(0);
1106   }
1107 
1108   /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */
1109   /* Get runtime option */
1110   if (product->api_user) {
1111     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatPtAP_HYPRE","Mat");CHKERRQ(ierr);
1112     ierr = PetscOptionsEList("-matptap_hypre_outtype","MatPtAP outtype","MatPtAP outtype",outTypes,ntype,outTypes[type],&type,&flg);CHKERRQ(ierr);
1113     ierr = PetscOptionsEnd();CHKERRQ(ierr);
1114   } else {
1115     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatProduct_PtAP_HYPRE","Mat");CHKERRQ(ierr);
1116     ierr = PetscOptionsEList("-matproduct_ptap_hypre_outtype","MatProduct_PtAP outtype","MatProduct_PtAP",outTypes,ntype,outTypes[type],&type,&flg);CHKERRQ(ierr);
1117     ierr = PetscOptionsEnd();CHKERRQ(ierr);
1118   }
1119 
1120   if (type == 0 || type == 1 || type == 2) {
1121     ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr);
1122   } else if (type == 3) {
1123     ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr);
1124   } else SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"MatPtAP outtype is not supported");
1125   C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1126   C->ops->ptapnumeric     = MatPtAPNumeric_AIJ_HYPRE;
1127   PetscFunctionReturn(0);
1128 }
1129 
1130 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C)
1131 {
1132   PetscErrorCode ierr;
1133   Mat_Product    *product = C->product;
1134 
1135   PetscFunctionBegin;
1136   switch (product->type) {
1137   case MATPRODUCT_AB:
1138     ierr = MatProductSetFromOptions_HYPRE_AB(C);CHKERRQ(ierr);
1139     break;
1140   case MATPRODUCT_PtAP:
1141     ierr = MatProductSetFromOptions_HYPRE_PtAP(C);CHKERRQ(ierr);
1142     break;
1143   default:
1144     break;
1145   }
1146   PetscFunctionReturn(0);
1147 }
1148 
1149 /* -------------------------------------------------------- */
1150 
1151 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1152 {
1153   PetscErrorCode ierr;
1154 
1155   PetscFunctionBegin;
1156   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);CHKERRQ(ierr);
1157   PetscFunctionReturn(0);
1158 }
1159 
1160 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1161 {
1162   PetscErrorCode ierr;
1163 
1164   PetscFunctionBegin;
1165   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);CHKERRQ(ierr);
1166   PetscFunctionReturn(0);
1167 }
1168 
1169 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1170 {
1171   PetscErrorCode ierr;
1172 
1173   PetscFunctionBegin;
1174   if (y != z) {
1175     ierr = VecCopy(y,z);CHKERRQ(ierr);
1176   }
1177   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);CHKERRQ(ierr);
1178   PetscFunctionReturn(0);
1179 }
1180 
1181 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1182 {
1183   PetscErrorCode ierr;
1184 
1185   PetscFunctionBegin;
1186   if (y != z) {
1187     ierr = VecCopy(y,z);CHKERRQ(ierr);
1188   }
1189   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);CHKERRQ(ierr);
1190   PetscFunctionReturn(0);
1191 }
1192 
1193 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1194 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1195 {
1196   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1197   hypre_ParCSRMatrix *parcsr;
1198   hypre_ParVector    *hx,*hy;
1199   PetscErrorCode     ierr;
1200 
1201   PetscFunctionBegin;
1202   if (trans) {
1203     ierr = VecHYPRE_IJVectorPushVecRead(hA->b,x);CHKERRQ(ierr);
1204     if (b != 0.0) { ierr = VecHYPRE_IJVectorPushVec(hA->x,y);CHKERRQ(ierr); }
1205     else { ierr = VecHYPRE_IJVectorPushVecWrite(hA->x,y);CHKERRQ(ierr); }
1206     PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b->ij,(void**)&hx));
1207     PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x->ij,(void**)&hy));
1208   } else {
1209     ierr = VecHYPRE_IJVectorPushVecRead(hA->x,x);CHKERRQ(ierr);
1210     if (b != 0.0) { ierr = VecHYPRE_IJVectorPushVec(hA->b,y);CHKERRQ(ierr); }
1211     else { ierr = VecHYPRE_IJVectorPushVecWrite(hA->b,y);CHKERRQ(ierr); }
1212     PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x->ij,(void**)&hx));
1213     PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b->ij,(void**)&hy));
1214   }
1215   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1216   if (trans) {
1217     PetscStackCallStandard(hypre_ParCSRMatrixMatvecT,(a,parcsr,hx,b,hy));
1218   } else {
1219     PetscStackCallStandard(hypre_ParCSRMatrixMatvec,(a,parcsr,hx,b,hy));
1220   }
1221   ierr = VecHYPRE_IJVectorPopVec(hA->x);CHKERRQ(ierr);
1222   ierr = VecHYPRE_IJVectorPopVec(hA->b);CHKERRQ(ierr);
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 static PetscErrorCode MatDestroy_HYPRE(Mat A)
1227 {
1228   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1229   PetscErrorCode ierr;
1230 
1231   PetscFunctionBegin;
1232   ierr = VecHYPRE_IJVectorDestroy(&hA->x);CHKERRQ(ierr);
1233   ierr = VecHYPRE_IJVectorDestroy(&hA->b);CHKERRQ(ierr);
1234   if (hA->ij) {
1235     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1236     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
1237   }
1238   if (hA->comm) {ierr = MPI_Comm_free(&hA->comm);CHKERRMPI(ierr);}
1239 
1240   ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr);
1241 
1242   ierr = PetscFree(hA->array);CHKERRQ(ierr);
1243 
1244   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
1245   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
1246   ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_seqaij_hypre_C",NULL);CHKERRQ(ierr);
1247   ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
1248   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr);
1249   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr);
1250   ierr = PetscFree(A->data);CHKERRQ(ierr);
1251   PetscFunctionReturn(0);
1252 }
1253 
1254 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1255 {
1256   PetscErrorCode ierr;
1257 
1258   PetscFunctionBegin;
1259   ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr);
1260   PetscFunctionReturn(0);
1261 }
1262 
1263 //TODO FIX hypre_CSRMatrixMatvecOutOfPlace
1264 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1265 static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind)
1266 {
1267   Mat_HYPRE            *hA = (Mat_HYPRE*)A->data;
1268   HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE;
1269   PetscErrorCode       ierr;
1270 
1271   PetscFunctionBegin;
1272   A->boundtocpu = bind;
1273   if (hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) {
1274     hypre_ParCSRMatrix *parcsr;
1275     PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1276     PetscStackCallStandard(hypre_ParCSRMatrixMigrate,(parcsr, hmem));
1277   }
1278   if (hA->x) { ierr  = VecHYPRE_IJBindToCPU(hA->x,bind);CHKERRQ(ierr); }
1279   if (hA->b) { ierr  = VecHYPRE_IJBindToCPU(hA->b,bind);CHKERRQ(ierr); }
1280   PetscFunctionReturn(0);
1281 }
1282 #endif
1283 
1284 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1285 {
1286   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1287   PetscMPIInt        n;
1288   PetscInt           i,j,rstart,ncols,flg;
1289   PetscInt           *row,*col;
1290   PetscScalar        *val;
1291   PetscErrorCode     ierr;
1292 
1293   PetscFunctionBegin;
1294   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1295 
1296   if (!A->nooffprocentries) {
1297     while (1) {
1298       ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
1299       if (!flg) break;
1300 
1301       for (i=0; i<n;) {
1302         /* Now identify the consecutive vals belonging to the same row */
1303         for (j=i,rstart=row[j]; j<n; j++) {
1304           if (row[j] != rstart) break;
1305         }
1306         if (j < n) ncols = j-i;
1307         else       ncols = n-i;
1308         /* Now assemble all these values with a single function call */
1309         ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr);
1310 
1311         i = j;
1312       }
1313     }
1314     ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr);
1315   }
1316 
1317   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1318   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */
1319   /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */
1320   if (!hA->sorted_full) {
1321     hypre_AuxParCSRMatrix *aux_matrix;
1322 
1323     /* call destroy just to make sure we do not leak anything */
1324     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1325     PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix));
1326     hypre_IJMatrixTranslator(hA->ij) = NULL;
1327 
1328     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1329     PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1330     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1331     if (aux_matrix) {
1332       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1333 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
1334       PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix));
1335 #else
1336       PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize_v2,(aux_matrix,HYPRE_MEMORY_HOST));
1337 #endif
1338     }
1339   }
1340   {
1341     hypre_ParCSRMatrix *parcsr;
1342 
1343     PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1344     if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscStackCallStandard(hypre_MatvecCommPkgCreate,(parcsr));
1345   }
1346   if (!hA->x) { ierr = VecHYPRE_IJVectorCreate(A->cmap,&hA->x);CHKERRQ(ierr); }
1347   if (!hA->b) { ierr = VecHYPRE_IJVectorCreate(A->rmap,&hA->b);CHKERRQ(ierr); }
1348 #if defined(PETSC_HAVE_HYPRE_DEVICE)
1349   ierr = MatBindToCPU_HYPRE(A,A->boundtocpu);CHKERRQ(ierr);
1350 #endif
1351   PetscFunctionReturn(0);
1352 }
1353 
1354 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1355 {
1356   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1357   PetscErrorCode     ierr;
1358 
1359   PetscFunctionBegin;
1360   if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");
1361 
1362   if (hA->size >= size) {
1363     *array = hA->array;
1364   } else {
1365     ierr = PetscFree(hA->array);CHKERRQ(ierr);
1366     hA->size = size;
1367     ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr);
1368     *array = hA->array;
1369   }
1370 
1371   hA->available = PETSC_FALSE;
1372   PetscFunctionReturn(0);
1373 }
1374 
1375 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1376 {
1377   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1378 
1379   PetscFunctionBegin;
1380   *array = NULL;
1381   hA->available = PETSC_TRUE;
1382   PetscFunctionReturn(0);
1383 }
1384 
1385 static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1386 {
1387   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1388   PetscScalar    *vals = (PetscScalar *)v;
1389   HYPRE_Complex  *sscr;
1390   PetscInt       *cscr[2];
1391   PetscInt       i,nzc;
1392   void           *array = NULL;
1393   PetscErrorCode ierr;
1394 
1395   PetscFunctionBegin;
1396   ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);CHKERRQ(ierr);
1397   cscr[0] = (PetscInt*)array;
1398   cscr[1] = ((PetscInt*)array)+nc;
1399   sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2);
1400   for (i=0,nzc=0;i<nc;i++) {
1401     if (cols[i] >= 0) {
1402       cscr[0][nzc  ] = cols[i];
1403       cscr[1][nzc++] = i;
1404     }
1405   }
1406   if (!nzc) {
1407     ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1408     PetscFunctionReturn(0);
1409   }
1410 
1411 #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE)
1412   if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) {
1413     hypre_ParCSRMatrix *parcsr;
1414 
1415     PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1416     PetscStackCallStandard(hypre_ParCSRMatrixMigrate,(parcsr, HYPRE_MEMORY_HOST));
1417   }
1418 #endif
1419 
1420   if (ins == ADD_VALUES) {
1421     for (i=0;i<nr;i++) {
1422       if (rows[i] >= 0) {
1423         PetscInt  j;
1424         HYPRE_Int hnc = (HYPRE_Int)nzc;
1425 
1426         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1427         for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]);CHKERRQ(ierr); }
1428         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1429       }
1430       vals += nc;
1431     }
1432   } else { /* INSERT_VALUES */
1433     PetscInt rst,ren;
1434 
1435     ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1436     for (i=0;i<nr;i++) {
1437       if (rows[i] >= 0) {
1438         PetscInt  j;
1439         HYPRE_Int hnc = (HYPRE_Int)nzc;
1440 
1441         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1442         for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]);CHKERRQ(ierr); }
1443         /* nonlocal values */
1444         if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE);CHKERRQ(ierr); }
1445         /* local values */
1446         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1447       }
1448       vals += nc;
1449     }
1450   }
1451 
1452   ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1453   PetscFunctionReturn(0);
1454 }
1455 
1456 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1457 {
1458   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1459   HYPRE_Int      *hdnnz,*honnz;
1460   PetscInt       i,rs,re,cs,ce,bs;
1461   PetscMPIInt    size;
1462   PetscErrorCode ierr;
1463 
1464   PetscFunctionBegin;
1465   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1466   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1467   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1468   rs   = A->rmap->rstart;
1469   re   = A->rmap->rend;
1470   cs   = A->cmap->rstart;
1471   ce   = A->cmap->rend;
1472   if (!hA->ij) {
1473     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1474     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1475   } else {
1476     HYPRE_BigInt hrs,hre,hcs,hce;
1477     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1478     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re);
1479     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce);
1480   }
1481   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1482   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1483 
1484   if (!dnnz) {
1485     ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr);
1486     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1487   } else {
1488     hdnnz = (HYPRE_Int*)dnnz;
1489   }
1490   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRMPI(ierr);
1491   if (size > 1) {
1492     hypre_AuxParCSRMatrix *aux_matrix;
1493     if (!onnz) {
1494       ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr);
1495       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1496     } else honnz = (HYPRE_Int*)onnz;
1497     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1498        they assume the user will input the entire row values, properly sorted
1499        In PETSc, we don't make such an assumption and set this flag to 1,
1500        unless the option MAT_SORTED_FULL is set to true.
1501        Also, to avoid possible memory leaks, we destroy and recreate the translator
1502        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1503        the IJ matrix for us */
1504     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1505     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1506     hypre_IJMatrixTranslator(hA->ij) = NULL;
1507     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1508     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1509     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !hA->sorted_full;
1510   } else {
1511     honnz = NULL;
1512     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1513   }
1514 
1515   /* reset assembled flag and call the initialize method */
1516   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1517 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
1518   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1519 #else
1520   PetscStackCallStandard(HYPRE_IJMatrixInitialize_v2,(hA->ij,HYPRE_MEMORY_HOST));
1521 #endif
1522   if (!dnnz) {
1523     ierr = PetscFree(hdnnz);CHKERRQ(ierr);
1524   }
1525   if (!onnz && honnz) {
1526     ierr = PetscFree(honnz);CHKERRQ(ierr);
1527   }
1528   /* Match AIJ logic */
1529   A->preallocated = PETSC_TRUE;
1530   A->assembled    = PETSC_FALSE;
1531   PetscFunctionReturn(0);
1532 }
1533 
1534 /*@C
1535    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1536 
1537    Collective on Mat
1538 
1539    Input Parameters:
1540 +  A - the matrix
1541 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1542           (same value is used for all local rows)
1543 .  dnnz - array containing the number of nonzeros in the various rows of the
1544           DIAGONAL portion of the local submatrix (possibly different for each row)
1545           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1546           The size of this array is equal to the number of local rows, i.e 'm'.
1547           For matrices that will be factored, you must leave room for (and set)
1548           the diagonal entry even if it is zero.
1549 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1550           submatrix (same value is used for all local rows).
1551 -  onnz - array containing the number of nonzeros in the various rows of the
1552           OFF-DIAGONAL portion of the local submatrix (possibly different for
1553           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1554           structure. The size of this array is equal to the number
1555           of local rows, i.e 'm'.
1556 
1557    Notes:
1558     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1559 
1560    Level: intermediate
1561 
1562 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE
1563 @*/
1564 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1565 {
1566   PetscErrorCode ierr;
1567 
1568   PetscFunctionBegin;
1569   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1570   PetscValidType(A,1);
1571   ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr);
1572   PetscFunctionReturn(0);
1573 }
1574 
1575 /*
1576    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1577 
1578    Collective
1579 
1580    Input Parameters:
1581 +  parcsr   - the pointer to the hypre_ParCSRMatrix
1582 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1583 -  copymode - PETSc copying options
1584 
1585    Output Parameter:
1586 .  A  - the matrix
1587 
1588    Level: intermediate
1589 
1590 .seealso: MatHYPRE, PetscCopyMode
1591 */
1592 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1593 {
1594   Mat            T;
1595   Mat_HYPRE      *hA;
1596   MPI_Comm       comm;
1597   PetscInt       rstart,rend,cstart,cend,M,N;
1598   PetscBool      isseqaij,isseqaijmkl,ismpiaij,isaij,ishyp,isis;
1599   PetscErrorCode ierr;
1600 
1601   PetscFunctionBegin;
1602   comm  = hypre_ParCSRMatrixComm(parcsr);
1603   ierr  = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
1604   ierr  = PetscStrcmp(mtype,MATSEQAIJMKL,&isseqaijmkl);CHKERRQ(ierr);
1605   ierr  = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
1606   ierr  = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
1607   ierr  = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
1608   ierr  = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr);
1609   isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1610   /* TODO */
1611   if (!isaij && !ishyp && !isis) SETERRQ7(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATSEQAIJMKL,MATMPIAIJ,MATIS,MATHYPRE);
1612   /* access ParCSRMatrix */
1613   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1614   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1615   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1616   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1617   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1618   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1619 
1620   /* fix for empty local rows/columns */
1621   if (rend < rstart) rend = rstart;
1622   if (cend < cstart) cend = cstart;
1623 
1624   /* PETSc convention */
1625   rend++;
1626   cend++;
1627   rend = PetscMin(rend,M);
1628   cend = PetscMin(cend,N);
1629 
1630   /* create PETSc matrix with MatHYPRE */
1631   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
1632   ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr);
1633   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
1634   hA   = (Mat_HYPRE*)(T->data);
1635 
1636   /* create HYPRE_IJMatrix */
1637   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1638   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1639 
1640 // TODO DEV
1641   /* create new ParCSR object if needed */
1642   if (ishyp && copymode == PETSC_COPY_VALUES) {
1643     hypre_ParCSRMatrix *new_parcsr;
1644 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0)
1645     hypre_CSRMatrix    *hdiag,*hoffd,*ndiag,*noffd;
1646 
1647     new_parcsr = hypre_ParCSRMatrixClone(parcsr,0);
1648     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1649     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1650     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1651     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1652     ierr       = PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));CHKERRQ(ierr);
1653     ierr       = PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));CHKERRQ(ierr);
1654 #else
1655     new_parcsr = hypre_ParCSRMatrixClone(parcsr,1);
1656 #endif
1657     parcsr     = new_parcsr;
1658     copymode   = PETSC_OWN_POINTER;
1659   }
1660 
1661   /* set ParCSR object */
1662   hypre_IJMatrixObject(hA->ij) = parcsr;
1663   T->preallocated = PETSC_TRUE;
1664 
1665   /* set assembled flag */
1666   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1667 #if 0
1668   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1669 #endif
1670   if (ishyp) {
1671     PetscMPIInt myid = 0;
1672 
1673     /* make sure we always have row_starts and col_starts available */
1674     if (HYPRE_AssumedPartitionCheck()) {
1675       ierr = MPI_Comm_rank(comm,&myid);CHKERRMPI(ierr);
1676     }
1677 #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1678     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1679       PetscLayout map;
1680 
1681       ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr);
1682       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1683       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1684     }
1685     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1686       PetscLayout map;
1687 
1688       ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr);
1689       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1690       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1691     }
1692 #endif
1693     /* prevent from freeing the pointer */
1694     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1695     *A   = T;
1696     ierr = MatSetOption(*A,MAT_SORTED_FULL,PETSC_TRUE);CHKERRQ(ierr);
1697     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1698     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1699   } else if (isaij) {
1700     if (copymode != PETSC_OWN_POINTER) {
1701       /* prevent from freeing the pointer */
1702       hA->inner_free = PETSC_FALSE;
1703       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1704       ierr = MatDestroy(&T);CHKERRQ(ierr);
1705     } else { /* AIJ return type with PETSC_OWN_POINTER */
1706       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
1707       *A   = T;
1708     }
1709   } else if (isis) {
1710     ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1711     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1712     ierr = MatDestroy(&T);CHKERRQ(ierr);
1713   }
1714   PetscFunctionReturn(0);
1715 }
1716 
1717 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1718 {
1719   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1720   HYPRE_Int type;
1721 
1722   PetscFunctionBegin;
1723   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1724   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1725   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1726   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1727   PetscFunctionReturn(0);
1728 }
1729 
1730 /*
1731    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1732 
1733    Not collective
1734 
1735    Input Parameters:
1736 +  A  - the MATHYPRE object
1737 
1738    Output Parameter:
1739 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1740 
1741    Level: intermediate
1742 
1743 .seealso: MatHYPRE, PetscCopyMode
1744 */
1745 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1746 {
1747   PetscErrorCode ierr;
1748 
1749   PetscFunctionBegin;
1750   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1751   PetscValidType(A,1);
1752   ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr);
1753   PetscFunctionReturn(0);
1754 }
1755 
1756 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1757 {
1758   hypre_ParCSRMatrix *parcsr;
1759   hypre_CSRMatrix    *ha;
1760   PetscInt           rst;
1761   PetscErrorCode     ierr;
1762 
1763   PetscFunctionBegin;
1764   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1765   ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr);
1766   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1767   if (missing) *missing = PETSC_FALSE;
1768   if (dd) *dd = -1;
1769   ha = hypre_ParCSRMatrixDiag(parcsr);
1770   if (ha) {
1771     PetscInt  size,i;
1772     HYPRE_Int *ii,*jj;
1773 
1774     size = hypre_CSRMatrixNumRows(ha);
1775     ii   = hypre_CSRMatrixI(ha);
1776     jj   = hypre_CSRMatrixJ(ha);
1777     for (i = 0; i < size; i++) {
1778       PetscInt  j;
1779       PetscBool found = PETSC_FALSE;
1780 
1781       for (j = ii[i]; j < ii[i+1] && !found; j++)
1782         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1783 
1784       if (!found) {
1785         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1786         if (missing) *missing = PETSC_TRUE;
1787         if (dd) *dd = i+rst;
1788         PetscFunctionReturn(0);
1789       }
1790     }
1791     if (!size) {
1792       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1793       if (missing) *missing = PETSC_TRUE;
1794       if (dd) *dd = rst;
1795     }
1796   } else {
1797     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1798     if (missing) *missing = PETSC_TRUE;
1799     if (dd) *dd = rst;
1800   }
1801   PetscFunctionReturn(0);
1802 }
1803 
1804 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1805 {
1806   hypre_ParCSRMatrix *parcsr;
1807 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
1808   hypre_CSRMatrix    *ha;
1809 #endif
1810   PetscErrorCode     ierr;
1811   HYPRE_Complex      hs;
1812 
1813   PetscFunctionBegin;
1814   ierr = PetscHYPREScalarCast(s,&hs);CHKERRQ(ierr);
1815   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1816 #if PETSC_PKG_HYPRE_VERSION_GE(2,19,0)
1817   PetscStackCallStandard(hypre_ParCSRMatrixScale,(parcsr,hs));
1818 #else  /* diagonal part */
1819   ha = hypre_ParCSRMatrixDiag(parcsr);
1820   if (ha) {
1821     PetscInt      size,i;
1822     HYPRE_Int     *ii;
1823     HYPRE_Complex *a;
1824 
1825     size = hypre_CSRMatrixNumRows(ha);
1826     a    = hypre_CSRMatrixData(ha);
1827     ii   = hypre_CSRMatrixI(ha);
1828     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1829   }
1830   /* offdiagonal part */
1831   ha = hypre_ParCSRMatrixOffd(parcsr);
1832   if (ha) {
1833     PetscInt      size,i;
1834     HYPRE_Int     *ii;
1835     HYPRE_Complex *a;
1836 
1837     size = hypre_CSRMatrixNumRows(ha);
1838     a    = hypre_CSRMatrixData(ha);
1839     ii   = hypre_CSRMatrixI(ha);
1840     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1841   }
1842 #endif
1843   PetscFunctionReturn(0);
1844 }
1845 
1846 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1847 {
1848   hypre_ParCSRMatrix *parcsr;
1849   HYPRE_Int          *lrows;
1850   PetscInt           rst,ren,i;
1851   PetscErrorCode     ierr;
1852 
1853   PetscFunctionBegin;
1854   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1855   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1856   ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr);
1857   ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1858   for (i=0;i<numRows;i++) {
1859     if (rows[i] < rst || rows[i] >= ren)
1860       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1861     lrows[i] = rows[i] - rst;
1862   }
1863   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1864   ierr = PetscFree(lrows);CHKERRQ(ierr);
1865   PetscFunctionReturn(0);
1866 }
1867 
1868 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1869 {
1870   PetscErrorCode      ierr;
1871 
1872   PetscFunctionBegin;
1873   if (ha) {
1874     HYPRE_Int     *ii, size;
1875     HYPRE_Complex *a;
1876 
1877     size = hypre_CSRMatrixNumRows(ha);
1878     a    = hypre_CSRMatrixData(ha);
1879     ii   = hypre_CSRMatrixI(ha);
1880 
1881     if (a) {ierr = PetscArrayzero(a,ii[size]);CHKERRQ(ierr);}
1882   }
1883   PetscFunctionReturn(0);
1884 }
1885 
1886 PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1887 {
1888   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1889 
1890   PetscFunctionBegin;
1891   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) {
1892     PetscStackCallStandard(HYPRE_IJMatrixSetConstantValues,(hA->ij,0.0));
1893   } else {
1894     hypre_ParCSRMatrix *parcsr;
1895     PetscErrorCode     ierr;
1896 
1897     ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1898     ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr);
1899     ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr);
1900   }
1901   PetscFunctionReturn(0);
1902 }
1903 
1904 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag)
1905 {
1906   PetscInt        ii;
1907   HYPRE_Int       *i, *j;
1908   HYPRE_Complex   *a;
1909 
1910   PetscFunctionBegin;
1911   if (!hA) PetscFunctionReturn(0);
1912 
1913   i = hypre_CSRMatrixI(hA);
1914   j = hypre_CSRMatrixJ(hA);
1915   a = hypre_CSRMatrixData(hA);
1916 
1917   for (ii = 0; ii < N; ii++) {
1918     HYPRE_Int jj, ibeg, iend, irow;
1919 
1920     irow = rows[ii];
1921     ibeg = i[irow];
1922     iend = i[irow+1];
1923     for (jj = ibeg; jj < iend; jj++)
1924       if (j[jj] == irow) a[jj] = diag;
1925       else a[jj] = 0.0;
1926    }
1927    PetscFunctionReturn(0);
1928 }
1929 
1930 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1931 {
1932   hypre_ParCSRMatrix  *parcsr;
1933   PetscInt            *lrows,len;
1934   HYPRE_Complex       hdiag;
1935   PetscErrorCode      ierr;
1936 
1937   PetscFunctionBegin;
1938   if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
1939   ierr = PetscHYPREScalarCast(diag,&hdiag);CHKERRQ(ierr);
1940   /* retrieve the internal matrix */
1941   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1942   /* get locally owned rows */
1943   ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr);
1944   /* zero diagonal part */
1945   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);CHKERRQ(ierr);
1946   /* zero off-diagonal part */
1947   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr);
1948 
1949   ierr = PetscFree(lrows);CHKERRQ(ierr);
1950   PetscFunctionReturn(0);
1951 }
1952 
1953 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1954 {
1955   PetscErrorCode ierr;
1956 
1957   PetscFunctionBegin;
1958   if (mat->nooffprocentries) PetscFunctionReturn(0);
1959 
1960   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
1961   PetscFunctionReturn(0);
1962 }
1963 
1964 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1965 {
1966   hypre_ParCSRMatrix  *parcsr;
1967   HYPRE_Int           hnz;
1968   PetscErrorCode      ierr;
1969 
1970   PetscFunctionBegin;
1971   /* retrieve the internal matrix */
1972   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1973   /* call HYPRE API */
1974   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
1975   if (nz) *nz = (PetscInt)hnz;
1976   PetscFunctionReturn(0);
1977 }
1978 
1979 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1980 {
1981   hypre_ParCSRMatrix  *parcsr;
1982   HYPRE_Int           hnz;
1983   PetscErrorCode      ierr;
1984 
1985   PetscFunctionBegin;
1986   /* retrieve the internal matrix */
1987   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1988   /* call HYPRE API */
1989   hnz = nz ? (HYPRE_Int)(*nz) : 0;
1990   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
1991   PetscFunctionReturn(0);
1992 }
1993 
1994 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1995 {
1996   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1997   PetscInt  i;
1998 
1999   PetscFunctionBegin;
2000   if (!m || !n) PetscFunctionReturn(0);
2001   /* Ignore negative row indices
2002    * And negative column indices should be automatically ignored in hypre
2003    * */
2004   for (i=0; i<m; i++) {
2005     if (idxm[i] >= 0) {
2006       HYPRE_Int hn = (HYPRE_Int)n;
2007       PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n)));
2008     }
2009   }
2010   PetscFunctionReturn(0);
2011 }
2012 
2013 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
2014 {
2015   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
2016 
2017   PetscFunctionBegin;
2018   switch (op) {
2019   case MAT_NO_OFF_PROC_ENTRIES:
2020     if (flg) {
2021       PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0));
2022     }
2023     break;
2024   case MAT_SORTED_FULL:
2025     hA->sorted_full = flg;
2026     break;
2027   default:
2028     break;
2029   }
2030   PetscFunctionReturn(0);
2031 }
2032 
2033 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
2034 {
2035   PetscErrorCode     ierr;
2036   PetscViewerFormat  format;
2037 
2038   PetscFunctionBegin;
2039   ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr);
2040   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(0);
2041   if (format != PETSC_VIEWER_NATIVE) {
2042     Mat                B;
2043     hypre_ParCSRMatrix *parcsr;
2044     PetscErrorCode     (*mview)(Mat,PetscViewer) = NULL;
2045 
2046     ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
2047     ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr);
2048     ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr);
2049     if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");
2050     ierr = (*mview)(B,view);CHKERRQ(ierr);
2051     ierr = MatDestroy(&B);CHKERRQ(ierr);
2052   } else {
2053     Mat_HYPRE  *hA = (Mat_HYPRE*)A->data;
2054     PetscMPIInt size;
2055     PetscBool   isascii;
2056     const char *filename;
2057 
2058     /* HYPRE uses only text files */
2059     ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr);
2060     if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name);
2061     ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr);
2062     PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename));
2063     ierr = MPI_Comm_size(hA->comm,&size);CHKERRMPI(ierr);
2064     if (size > 1) {
2065       ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr);
2066     } else {
2067       ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr);
2068     }
2069   }
2070   PetscFunctionReturn(0);
2071 }
2072 
2073 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B)
2074 {
2075   hypre_ParCSRMatrix *parcsr = NULL;
2076   PetscErrorCode     ierr;
2077   PetscCopyMode      cpmode;
2078 
2079   PetscFunctionBegin;
2080   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
2081   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
2082     parcsr = hypre_ParCSRMatrixClone(parcsr,0);
2083     cpmode = PETSC_OWN_POINTER;
2084   } else {
2085     cpmode = PETSC_COPY_VALUES;
2086   }
2087   ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr);
2088   PetscFunctionReturn(0);
2089 }
2090 
2091 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
2092 {
2093   hypre_ParCSRMatrix *acsr,*bcsr;
2094   PetscErrorCode     ierr;
2095 
2096   PetscFunctionBegin;
2097   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
2098     ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr);
2099     ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr);
2100     PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1));
2101     ierr = MatSetOption(B,MAT_SORTED_FULL,PETSC_TRUE);CHKERRQ(ierr); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
2102     ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2103     ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2104   } else {
2105     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
2106   }
2107   PetscFunctionReturn(0);
2108 }
2109 
2110 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
2111 {
2112   hypre_ParCSRMatrix *parcsr;
2113   hypre_CSRMatrix    *dmat;
2114   HYPRE_Complex      *a;
2115   HYPRE_Complex      *data = NULL;
2116   HYPRE_Int          *diag = NULL;
2117   PetscInt           i;
2118   PetscBool          cong;
2119   PetscErrorCode     ierr;
2120 
2121   PetscFunctionBegin;
2122   ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr);
2123   if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns");
2124   if (PetscDefined(USE_DEBUG)) {
2125     PetscBool miss;
2126     ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr);
2127     if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing");
2128   }
2129   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
2130   dmat = hypre_ParCSRMatrixDiag(parcsr);
2131   if (dmat) {
2132     /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
2133     ierr = VecGetArray(d,(PetscScalar**)&a);CHKERRQ(ierr);
2134     diag = hypre_CSRMatrixI(dmat);
2135     data = hypre_CSRMatrixData(dmat);
2136     for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]];
2137     ierr = VecRestoreArray(d,(PetscScalar**)&a);CHKERRQ(ierr);
2138   }
2139   PetscFunctionReturn(0);
2140 }
2141 
2142 #include <petscblaslapack.h>
2143 
2144 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str)
2145 {
2146   PetscErrorCode ierr;
2147 
2148   PetscFunctionBegin;
2149 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2150   {
2151     Mat                B;
2152     hypre_ParCSRMatrix *x,*y,*z;
2153 
2154     ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr);
2155     ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr);
2156     PetscStackCallStandard(hypre_ParCSRMatrixAdd,(1.0,y,1.0,x,&z));
2157     ierr = MatCreateFromParCSR(z,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
2158     ierr = MatHeaderMerge(Y,&B);CHKERRQ(ierr);
2159   }
2160 #else
2161   if (str == SAME_NONZERO_PATTERN) {
2162     hypre_ParCSRMatrix *x,*y;
2163     hypre_CSRMatrix    *xloc,*yloc;
2164     PetscInt           xnnz,ynnz;
2165     HYPRE_Complex      *xarr,*yarr;
2166     PetscBLASInt       one=1,bnz;
2167 
2168     ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr);
2169     ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr);
2170 
2171     /* diagonal block */
2172     xloc = hypre_ParCSRMatrixDiag(x);
2173     yloc = hypre_ParCSRMatrixDiag(y);
2174     xnnz = 0;
2175     ynnz = 0;
2176     xarr = NULL;
2177     yarr = NULL;
2178     if (xloc) {
2179       xarr = hypre_CSRMatrixData(xloc);
2180       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2181     }
2182     if (yloc) {
2183       yarr = hypre_CSRMatrixData(yloc);
2184       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2185     }
2186     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz);
2187     ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr);
2188     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
2189 
2190     /* off-diagonal block */
2191     xloc = hypre_ParCSRMatrixOffd(x);
2192     yloc = hypre_ParCSRMatrixOffd(y);
2193     xnnz = 0;
2194     ynnz = 0;
2195     xarr = NULL;
2196     yarr = NULL;
2197     if (xloc) {
2198       xarr = hypre_CSRMatrixData(xloc);
2199       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2200     }
2201     if (yloc) {
2202       yarr = hypre_CSRMatrixData(yloc);
2203       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2204     }
2205     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz);
2206     ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr);
2207     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
2208   } else if (str == SUBSET_NONZERO_PATTERN) {
2209     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
2210   } else {
2211     Mat B;
2212 
2213     ierr = MatAXPY_Basic_Preallocate(Y,X,&B);CHKERRQ(ierr);
2214     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2215     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2216   }
2217 #endif
2218   PetscFunctionReturn(0);
2219 }
2220 
2221 /*MC
2222    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2223           based on the hypre IJ interface.
2224 
2225    Level: intermediate
2226 
2227 .seealso: MatCreate()
2228 M*/
2229 
2230 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2231 {
2232   Mat_HYPRE      *hB;
2233   PetscErrorCode ierr;
2234 
2235   PetscFunctionBegin;
2236   ierr = PetscNewLog(B,&hB);CHKERRQ(ierr);
2237 
2238   hB->inner_free  = PETSC_TRUE;
2239   hB->available   = PETSC_TRUE;
2240   hB->sorted_full = PETSC_FALSE; /* no assumption whether column indices are sorted or not */
2241   hB->size        = 0;
2242   hB->array       = NULL;
2243 
2244   B->data       = (void*)hB;
2245   B->rmap->bs   = 1;
2246   B->assembled  = PETSC_FALSE;
2247 
2248   ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
2249   B->ops->mult                  = MatMult_HYPRE;
2250   B->ops->multtranspose         = MatMultTranspose_HYPRE;
2251   B->ops->multadd               = MatMultAdd_HYPRE;
2252   B->ops->multtransposeadd      = MatMultTransposeAdd_HYPRE;
2253   B->ops->setup                 = MatSetUp_HYPRE;
2254   B->ops->destroy               = MatDestroy_HYPRE;
2255   B->ops->assemblyend           = MatAssemblyEnd_HYPRE;
2256   B->ops->assemblybegin         = MatAssemblyBegin_HYPRE;
2257   B->ops->setvalues             = MatSetValues_HYPRE;
2258   B->ops->missingdiagonal       = MatMissingDiagonal_HYPRE;
2259   B->ops->scale                 = MatScale_HYPRE;
2260   B->ops->zerorowscolumns       = MatZeroRowsColumns_HYPRE;
2261   B->ops->zeroentries           = MatZeroEntries_HYPRE;
2262   B->ops->zerorows              = MatZeroRows_HYPRE;
2263   B->ops->getrow                = MatGetRow_HYPRE;
2264   B->ops->restorerow            = MatRestoreRow_HYPRE;
2265   B->ops->getvalues             = MatGetValues_HYPRE;
2266   B->ops->setoption             = MatSetOption_HYPRE;
2267   B->ops->duplicate             = MatDuplicate_HYPRE;
2268   B->ops->copy                  = MatCopy_HYPRE;
2269   B->ops->view                  = MatView_HYPRE;
2270   B->ops->getdiagonal           = MatGetDiagonal_HYPRE;
2271   B->ops->axpy                  = MatAXPY_HYPRE;
2272   B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE;
2273 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2274   B->ops->bindtocpu             = MatBindToCPU_HYPRE;
2275   B->boundtocpu                 = PETSC_FALSE;
2276 #endif
2277 
2278   /* build cache for off array entries formed */
2279   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
2280 
2281   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRMPI(ierr);
2282   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
2283   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
2284   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
2285   ierr = PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_seqaij_hypre_C",MatProductSetFromOptions_HYPRE);CHKERRQ(ierr);
2286   ierr = PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_mpiaij_hypre_C",MatProductSetFromOptions_HYPRE);CHKERRQ(ierr);
2287   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr);
2288   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr);
2289 #if defined(PETSC_HAVE_HYPRE_DEVICE)
2290 #if defined(HYPRE_USING_HIP)
2291   ierr = PetscDeviceInitialize(PETSC_DEVICE_HIP);CHKERRQ(ierr);
2292   ierr = MatSetVecType(B,VECHIP);CHKERRQ(ierr);
2293 #endif
2294 #if defined(HYPRE_USING_CUDA)
2295   ierr = PetscDeviceInitialize(PETSC_DEVICE_CUDA);CHKERRQ(ierr);
2296   ierr = MatSetVecType(B,VECCUDA);CHKERRQ(ierr);
2297 #endif
2298 #endif
2299   PetscFunctionReturn(0);
2300 }
2301 
2302 static PetscErrorCode hypre_array_destroy(void *ptr)
2303 {
2304    PetscFunctionBegin;
2305    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
2306    PetscFunctionReturn(0);
2307 }
2308