xref: /petsc/src/mat/impls/hypre/mhypre.c (revision 534a8f05a7a8aff70dd8cfd53d9cd834400a8dbf)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 #include <petscpkg_version.h>
7 #include <petsc/private/petschypre.h>
8 #include <petscmathypre.h>
9 #include <petsc/private/matimpl.h>
10 #include <../src/mat/impls/hypre/mhypre.h>
11 #include <../src/mat/impls/aij/mpi/mpiaij.h>
12 #include <../src/vec/vec/impls/hypre/vhyp.h>
13 #include <HYPRE.h>
14 #include <HYPRE_utilities.h>
15 #include <_hypre_parcsr_ls.h>
16 #include <_hypre_sstruct_ls.h>
17 
18 PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
19 
20 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
21 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
22 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
23 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
24 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool);
25 static PetscErrorCode hypre_array_destroy(void*);
26 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);
27 
28 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
29 {
30   PetscErrorCode ierr;
31   PetscInt       i,n_d,n_o;
32   const PetscInt *ia_d,*ia_o;
33   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
34   HYPRE_Int      *nnz_d=NULL,*nnz_o=NULL;
35 
36   PetscFunctionBegin;
37   if (A_d) { /* determine number of nonzero entries in local diagonal part */
38     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
39     if (done_d) {
40       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
41       for (i=0; i<n_d; i++) {
42         nnz_d[i] = ia_d[i+1] - ia_d[i];
43       }
44     }
45     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
46   }
47   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
48     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
49     if (done_o) {
50       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
51       for (i=0; i<n_o; i++) {
52         nnz_o[i] = ia_o[i+1] - ia_o[i];
53       }
54     }
55     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
56   }
57   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
58     if (!done_o) { /* only diagonal part */
59       ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr);
60       for (i=0; i<n_d; i++) {
61         nnz_o[i] = 0;
62       }
63     }
64 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
65     { /* If we don't do this, the columns of the matrix will be all zeros! */
66       hypre_AuxParCSRMatrix *aux_matrix;
67       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
68       hypre_AuxParCSRMatrixDestroy(aux_matrix);
69       hypre_IJMatrixTranslator(ij) = NULL;
70       PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
71       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
72       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
73     }
74 #else
75     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
76 #endif
77     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
78     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
79   }
80   PetscFunctionReturn(0);
81 }
82 
83 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
84 {
85   PetscErrorCode ierr;
86   PetscInt       rstart,rend,cstart,cend;
87 
88   PetscFunctionBegin;
89   ierr   = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
90   ierr   = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
91   rstart = A->rmap->rstart;
92   rend   = A->rmap->rend;
93   cstart = A->cmap->rstart;
94   cend   = A->cmap->rend;
95   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
96   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
97   {
98     PetscBool      same;
99     Mat            A_d,A_o;
100     const PetscInt *colmap;
101     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
102     if (same) {
103       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
104       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
105       PetscFunctionReturn(0);
106     }
107     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
108     if (same) {
109       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
110       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
111       PetscFunctionReturn(0);
112     }
113     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
114     if (same) {
115       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
116       PetscFunctionReturn(0);
117     }
118     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
119     if (same) {
120       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
121       PetscFunctionReturn(0);
122     }
123   }
124   PetscFunctionReturn(0);
125 }
126 
127 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
128 {
129   PetscErrorCode    ierr;
130   PetscInt          i,rstart,rend,ncols,nr,nc;
131   const PetscScalar *values;
132   const PetscInt    *cols;
133   PetscBool         flg;
134 
135   PetscFunctionBegin;
136   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
137   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
138   if (flg && nr == nc) {
139     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
140     PetscFunctionReturn(0);
141   }
142   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
143   if (flg) {
144     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
145     PetscFunctionReturn(0);
146   }
147 
148   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
149   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
150   for (i=rstart; i<rend; i++) {
151     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
152     if (ncols) {
153       HYPRE_Int nc = (HYPRE_Int)ncols;
154 
155       if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i);
156       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values));
157     }
158     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
159   }
160   PetscFunctionReturn(0);
161 }
162 
163 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
164 {
165   PetscErrorCode        ierr;
166   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
167   HYPRE_Int             type;
168   hypre_ParCSRMatrix    *par_matrix;
169   hypre_AuxParCSRMatrix *aux_matrix;
170   hypre_CSRMatrix       *hdiag;
171   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
172 
173   PetscFunctionBegin;
174   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
175   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
176   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
177   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
178   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
179   /*
180        this is the Hack part where we monkey directly with the hypre datastructures
181   */
182   if (sameint) {
183     ierr = PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);CHKERRQ(ierr);
184     ierr = PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);CHKERRQ(ierr);
185   } else {
186     PetscInt i;
187 
188     for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
189     for (i=0;i<pdiag->nz;i++)      hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
190   }
191   ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr);
192 
193   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
194   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
195   PetscFunctionReturn(0);
196 }
197 
198 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
199 {
200   PetscErrorCode        ierr;
201   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
202   Mat_SeqAIJ            *pdiag,*poffd;
203   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
204   HYPRE_Int             *hjj,type;
205   hypre_ParCSRMatrix    *par_matrix;
206   hypre_AuxParCSRMatrix *aux_matrix;
207   hypre_CSRMatrix       *hdiag,*hoffd;
208   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
209 
210   PetscFunctionBegin;
211   pdiag = (Mat_SeqAIJ*) pA->A->data;
212   poffd = (Mat_SeqAIJ*) pA->B->data;
213   /* cstart is only valid for square MPIAIJ layed out in the usual way */
214   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
215 
216   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
217   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
218   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
219   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
220   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
221   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
222 
223   /*
224        this is the Hack part where we monkey directly with the hypre datastructures
225   */
226   if (sameint) {
227     ierr = PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);CHKERRQ(ierr);
228   } else {
229     for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]);
230   }
231   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
232   hjj = hdiag->j;
233   pjj = pdiag->j;
234 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
235   for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i];
236 #else
237   for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i];
238 #endif
239   ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr);
240   if (sameint) {
241     ierr = PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);CHKERRQ(ierr);
242   } else {
243     for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]);
244   }
245 
246   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
247      If we hacked a hypre a bit more we might be able to avoid this step */
248 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
249   PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd));
250   jj  = (PetscInt*) hoffd->big_j;
251 #else
252   jj  = (PetscInt*) hoffd->j;
253 #endif
254   pjj = poffd->j;
255   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
256 
257   ierr = PetscArraycpy(hoffd->data,poffd->a,poffd->nz);CHKERRQ(ierr);
258 
259   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
260   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
261   PetscFunctionReturn(0);
262 }
263 
264 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
265 {
266   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
267   Mat                    lA;
268   ISLocalToGlobalMapping rl2g,cl2g;
269   IS                     is;
270   hypre_ParCSRMatrix     *hA;
271   hypre_CSRMatrix        *hdiag,*hoffd;
272   MPI_Comm               comm;
273   HYPRE_Complex          *hdd,*hod,*aa;
274   PetscScalar            *data;
275   HYPRE_BigInt           *col_map_offd;
276   HYPRE_Int              *hdi,*hdj,*hoi,*hoj;
277   PetscInt               *ii,*jj,*iptr,*jptr;
278   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
279   HYPRE_Int              type;
280   PetscErrorCode         ierr;
281 
282   PetscFunctionBegin;
283   comm = PetscObjectComm((PetscObject)A);
284   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
285   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
286   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
287   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
288   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
289   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
290   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
291   hdiag = hypre_ParCSRMatrixDiag(hA);
292   hoffd = hypre_ParCSRMatrixOffd(hA);
293   dr    = hypre_CSRMatrixNumRows(hdiag);
294   dc    = hypre_CSRMatrixNumCols(hdiag);
295   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
296   hdi   = hypre_CSRMatrixI(hdiag);
297   hdj   = hypre_CSRMatrixJ(hdiag);
298   hdd   = hypre_CSRMatrixData(hdiag);
299   oc    = hypre_CSRMatrixNumCols(hoffd);
300   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
301   hoi   = hypre_CSRMatrixI(hoffd);
302   hoj   = hypre_CSRMatrixJ(hoffd);
303   hod   = hypre_CSRMatrixData(hoffd);
304   if (reuse != MAT_REUSE_MATRIX) {
305     PetscInt *aux;
306 
307     /* generate l2g maps for rows and cols */
308     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
309     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
310     ierr = ISDestroy(&is);CHKERRQ(ierr);
311     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
312     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
313     for (i=0; i<dc; i++) aux[i] = i+stc;
314     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
315     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
316     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
317     ierr = ISDestroy(&is);CHKERRQ(ierr);
318     /* create MATIS object */
319     ierr = MatCreate(comm,B);CHKERRQ(ierr);
320     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
321     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
322     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
323     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
324     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
325 
326     /* allocate CSR for local matrix */
327     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
328     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
329     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
330   } else {
331     PetscInt  nr;
332     PetscBool done;
333     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
334     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
335     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
336     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
337     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
338   }
339   /* merge local matrices */
340   ii  = iptr;
341   jj  = jptr;
342   aa  = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
343   *ii = *(hdi++) + *(hoi++);
344   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
345     PetscScalar *aold = (PetscScalar*)aa;
346     PetscInt    *jold = jj,nc = jd+jo;
347     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
348     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
349     *(++ii) = *(hdi++) + *(hoi++);
350     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
351   }
352   for (; cum<dr; cum++) *(++ii) = nnz;
353   if (reuse != MAT_REUSE_MATRIX) {
354     Mat_SeqAIJ* a;
355 
356     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
357     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
358     /* hack SeqAIJ */
359     a          = (Mat_SeqAIJ*)(lA->data);
360     a->free_a  = PETSC_TRUE;
361     a->free_ij = PETSC_TRUE;
362     ierr = MatDestroy(&lA);CHKERRQ(ierr);
363   }
364   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
365   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
366   if (reuse == MAT_INPLACE_MATRIX) {
367     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
368   }
369   PetscFunctionReturn(0);
370 }
371 
372 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
373 {
374   Mat            M = NULL;
375   Mat_HYPRE      *hB;
376   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
377   PetscErrorCode ierr;
378 
379   PetscFunctionBegin;
380   if (reuse == MAT_REUSE_MATRIX) {
381     /* always destroy the old matrix and create a new memory;
382        hope this does not churn the memory too much. The problem
383        is I do not know if it is possible to put the matrix back to
384        its initial state so that we can directly copy the values
385        the second time through. */
386     hB = (Mat_HYPRE*)((*B)->data);
387     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
388   } else {
389     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
390     ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr);
391     ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
392     hB   = (Mat_HYPRE*)(M->data);
393     if (reuse == MAT_INITIAL_MATRIX) *B = M;
394   }
395   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
396   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
397   if (reuse == MAT_INPLACE_MATRIX) {
398     ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr);
399   }
400   (*B)->preallocated = PETSC_TRUE;
401   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
402   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
403   PetscFunctionReturn(0);
404 }
405 
406 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
407 {
408   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
409   hypre_ParCSRMatrix *parcsr;
410   hypre_CSRMatrix    *hdiag,*hoffd;
411   MPI_Comm           comm;
412   PetscScalar        *da,*oa,*aptr;
413   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
414   PetscInt           i,dnnz,onnz,m,n;
415   HYPRE_Int          type;
416   PetscMPIInt        size;
417   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
418   PetscErrorCode     ierr;
419 
420   PetscFunctionBegin;
421   comm = PetscObjectComm((PetscObject)A);
422   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
423   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
424   if (reuse == MAT_REUSE_MATRIX) {
425     PetscBool ismpiaij,isseqaij;
426     ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
427     ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
428     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
429   }
430   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
431 
432   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
433   hdiag = hypre_ParCSRMatrixDiag(parcsr);
434   hoffd = hypre_ParCSRMatrixOffd(parcsr);
435   m     = hypre_CSRMatrixNumRows(hdiag);
436   n     = hypre_CSRMatrixNumCols(hdiag);
437   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
438   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
439   if (reuse == MAT_INITIAL_MATRIX) {
440     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
441     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
442     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
443   } else if (reuse == MAT_REUSE_MATRIX) {
444     PetscInt  nr;
445     PetscBool done;
446     if (size > 1) {
447       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
448 
449       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
450       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
451       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
452       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
453     } else {
454       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
455       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
456       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
457       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
458     }
459   } else { /* MAT_INPLACE_MATRIX */
460     if (!sameint) {
461       ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
462       ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
463     } else {
464       dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
465       djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
466     }
467     da = (PetscScalar*)hypre_CSRMatrixData(hdiag);
468   }
469 
470   if (!sameint) {
471     for (i=0;i<m+1;i++)  dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]);
472     for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]);
473   } else {
474     ierr = PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1);CHKERRQ(ierr);
475     ierr = PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);CHKERRQ(ierr);
476   }
477   ierr = PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);CHKERRQ(ierr);
478   iptr = djj;
479   aptr = da;
480   for (i=0; i<m; i++) {
481     PetscInt nc = dii[i+1]-dii[i];
482     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
483     iptr += nc;
484     aptr += nc;
485   }
486   if (size > 1) {
487     HYPRE_BigInt *coffd;
488     HYPRE_Int    *offdj;
489 
490     if (reuse == MAT_INITIAL_MATRIX) {
491       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
492       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
493       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
494     } else if (reuse == MAT_REUSE_MATRIX) {
495       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
496       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
497       PetscBool  done;
498 
499       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
500       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
501       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
502       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
503     } else { /* MAT_INPLACE_MATRIX */
504       if (!sameint) {
505         ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
506         ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
507       } else {
508         oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
509         ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
510       }
511       oa = (PetscScalar*)hypre_CSRMatrixData(hoffd);
512     }
513     if (!sameint) {
514       for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]);
515     } else {
516       ierr = PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);CHKERRQ(ierr);
517     }
518     offdj = hypre_CSRMatrixJ(hoffd);
519     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
520     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
521     ierr = PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);CHKERRQ(ierr);
522     iptr = ojj;
523     aptr = oa;
524     for (i=0; i<m; i++) {
525        PetscInt nc = oii[i+1]-oii[i];
526        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
527        iptr += nc;
528        aptr += nc;
529     }
530     if (reuse == MAT_INITIAL_MATRIX) {
531       Mat_MPIAIJ *b;
532       Mat_SeqAIJ *d,*o;
533 
534       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
535       /* hack MPIAIJ */
536       b          = (Mat_MPIAIJ*)((*B)->data);
537       d          = (Mat_SeqAIJ*)b->A->data;
538       o          = (Mat_SeqAIJ*)b->B->data;
539       d->free_a  = PETSC_TRUE;
540       d->free_ij = PETSC_TRUE;
541       o->free_a  = PETSC_TRUE;
542       o->free_ij = PETSC_TRUE;
543     } else if (reuse == MAT_INPLACE_MATRIX) {
544       Mat T;
545 
546       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
547       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
548         hypre_CSRMatrixI(hdiag) = NULL;
549         hypre_CSRMatrixJ(hdiag) = NULL;
550         hypre_CSRMatrixI(hoffd) = NULL;
551         hypre_CSRMatrixJ(hoffd) = NULL;
552       } else { /* Hack MPIAIJ -> free ij but not a */
553         Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data);
554         Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data);
555         Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data);
556 
557         d->free_ij = PETSC_TRUE;
558         o->free_ij = PETSC_TRUE;
559       }
560       hypre_CSRMatrixData(hdiag) = NULL;
561       hypre_CSRMatrixData(hoffd) = NULL;
562       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
563     }
564   } else {
565     oii  = NULL;
566     ojj  = NULL;
567     oa   = NULL;
568     if (reuse == MAT_INITIAL_MATRIX) {
569       Mat_SeqAIJ* b;
570 
571       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
572       /* hack SeqAIJ */
573       b          = (Mat_SeqAIJ*)((*B)->data);
574       b->free_a  = PETSC_TRUE;
575       b->free_ij = PETSC_TRUE;
576     } else if (reuse == MAT_INPLACE_MATRIX) {
577       Mat T;
578 
579       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
580       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
581         hypre_CSRMatrixI(hdiag) = NULL;
582         hypre_CSRMatrixJ(hdiag) = NULL;
583       } else { /* free ij but not a */
584         Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data);
585 
586         b->free_ij = PETSC_TRUE;
587       }
588       hypre_CSRMatrixData(hdiag) = NULL;
589       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
590     }
591   }
592 
593   /* we have to use hypre_Tfree to free the HYPRE arrays
594      that PETSc now onws */
595   if (reuse == MAT_INPLACE_MATRIX) {
596     PetscInt nh;
597     void *ptrs[6] = {da,oa,dii,djj,oii,ojj};
598     const char *names[6] = {"_hypre_csr_da",
599                             "_hypre_csr_oa",
600                             "_hypre_csr_dii",
601                             "_hypre_csr_djj",
602                             "_hypre_csr_oii",
603                             "_hypre_csr_ojj"};
604     nh = sameint ? 6 : 2;
605     for (i=0; i<nh; i++) {
606       PetscContainer c;
607 
608       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
609       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
610       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
611       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
612       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
613     }
614   }
615   PetscFunctionReturn(0);
616 }
617 
618 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
619 {
620   hypre_ParCSRMatrix *tA;
621   hypre_CSRMatrix    *hdiag,*hoffd;
622   Mat_SeqAIJ         *diag,*offd;
623   PetscInt           *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts;
624   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
625   PetscBool          ismpiaij,isseqaij;
626   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
627   PetscErrorCode     ierr;
628 
629   PetscFunctionBegin;
630   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
631   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
632   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name);
633   if (ismpiaij) {
634     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
635 
636     diag   = (Mat_SeqAIJ*)a->A->data;
637     offd   = (Mat_SeqAIJ*)a->B->data;
638     garray = a->garray;
639     noffd  = a->B->cmap->N;
640     dnnz   = diag->nz;
641     onnz   = offd->nz;
642   } else {
643     diag   = (Mat_SeqAIJ*)A->data;
644     offd   = NULL;
645     garray = NULL;
646     noffd  = 0;
647     dnnz   = diag->nz;
648     onnz   = 0;
649   }
650 
651   /* create a temporary ParCSR */
652   if (HYPRE_AssumedPartitionCheck()) {
653     PetscMPIInt myid;
654 
655     ierr       = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
656     row_starts = A->rmap->range + myid;
657     col_starts = A->cmap->range + myid;
658   } else {
659     row_starts = A->rmap->range;
660     col_starts = A->cmap->range;
661   }
662   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz);
663   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
664   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
665 
666   /* set diagonal part */
667   hdiag = hypre_ParCSRMatrixDiag(tA);
668   if (sameint) { /* reuse CSR pointers */
669     hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i;
670     hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j;
671   } else { /* malloc CSR pointers */
672     HYPRE_Int *hi,*hj;
673 
674     ierr = PetscMalloc2(A->rmap->n+1,&hi,dnnz,&hj);CHKERRQ(ierr);
675     for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(diag->i[i]);
676     for (i = 0; i < dnnz; i++)         hj[i] = (HYPRE_Int)(diag->j[i]);
677     hypre_CSRMatrixI(hdiag) = hi;
678     hypre_CSRMatrixJ(hdiag) = hj;
679   }
680   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex*)diag->a;
681   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
682   hypre_CSRMatrixSetRownnz(hdiag);
683   hypre_CSRMatrixSetDataOwner(hdiag,0);
684 
685   /* set offdiagonal part */
686   hoffd = hypre_ParCSRMatrixOffd(tA);
687   if (offd) {
688     if (sameint) { /* reuse CSR pointers */
689       hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i;
690       hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j;
691     } else { /* malloc CSR pointers */
692       HYPRE_Int *hi,*hj;
693 
694       ierr = PetscMalloc2(A->rmap->n+1,&hi,onnz,&hj);CHKERRQ(ierr);
695       for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(offd->i[i]);
696       for (i = 0; i < onnz; i++)         hj[i] = (HYPRE_Int)(offd->j[i]);
697       hypre_CSRMatrixI(hoffd) = hi;
698       hypre_CSRMatrixJ(hoffd) = hj;
699     }
700     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex*)offd->a;
701     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
702     hypre_CSRMatrixSetRownnz(hoffd);
703     hypre_CSRMatrixSetDataOwner(hoffd,0);
704     hypre_ParCSRMatrixSetNumNonzeros(tA);
705     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray;
706   }
707   *hA = tA;
708   PetscFunctionReturn(0);
709 }
710 
711 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
712 {
713   hypre_CSRMatrix *hdiag,*hoffd;
714   PetscBool       sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
715   PetscErrorCode  ierr;
716 
717   PetscFunctionBegin;
718   hdiag = hypre_ParCSRMatrixDiag(*hA);
719   hoffd = hypre_ParCSRMatrixOffd(*hA);
720   /* free temporary memory allocated by PETSc */
721   if (!sameint) {
722     HYPRE_Int *hi,*hj;
723 
724     hi = hypre_CSRMatrixI(hdiag);
725     hj = hypre_CSRMatrixJ(hdiag);
726     ierr = PetscFree2(hi,hj);CHKERRQ(ierr);
727     if (hoffd) {
728       hi = hypre_CSRMatrixI(hoffd);
729       hj = hypre_CSRMatrixJ(hoffd);
730       ierr = PetscFree2(hi,hj);CHKERRQ(ierr);
731     }
732   }
733   /* set pointers to NULL before destroying tA */
734   hypre_CSRMatrixI(hdiag)           = NULL;
735   hypre_CSRMatrixJ(hdiag)           = NULL;
736   hypre_CSRMatrixData(hdiag)        = NULL;
737   hypre_CSRMatrixI(hoffd)           = NULL;
738   hypre_CSRMatrixJ(hoffd)           = NULL;
739   hypre_CSRMatrixData(hoffd)        = NULL;
740   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
741   hypre_ParCSRMatrixDestroy(*hA);
742   *hA = NULL;
743   PetscFunctionReturn(0);
744 }
745 
746 /* calls RAP from BoomerAMG:
747    the resulting ParCSR will not own the column and row starts
748    It looks like we don't need to have the diagonal entries
749    ordered first in the rows of the diagonal part
750    for boomerAMGBuildCoarseOperator to work */
751 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
752 {
753   HYPRE_Int P_owns_col_starts,R_owns_row_starts;
754 
755   PetscFunctionBegin;
756   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
757   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
758   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
759   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
760   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
761   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
762   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
763   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
764   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
765   PetscFunctionReturn(0);
766 }
767 
768 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
769 {
770   Mat                B;
771   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
772   PetscErrorCode     ierr;
773 
774   PetscFunctionBegin;
775   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
776   ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr);
777   ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr);
778   ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
779   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
780   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
781   ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr);
782   PetscFunctionReturn(0);
783 }
784 
785 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
786 {
787   PetscErrorCode ierr;
788 
789   PetscFunctionBegin;
790   ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
791   ierr                   = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
792   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
793   PetscFunctionReturn(0);
794 }
795 
796 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
797 {
798   Mat                B;
799   Mat_HYPRE          *hP;
800   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
801   HYPRE_Int          type;
802   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
803   PetscBool          ishypre;
804   PetscErrorCode     ierr;
805 
806   PetscFunctionBegin;
807   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
808   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
809   hP = (Mat_HYPRE*)P->data;
810   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
811   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
812   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
813 
814   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
815   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
816   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
817 
818   /* create temporary matrix and merge to C */
819   ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
820   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
821   PetscFunctionReturn(0);
822 }
823 
824 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
825 {
826   PetscErrorCode ierr;
827 
828   PetscFunctionBegin;
829   if (scall == MAT_INITIAL_MATRIX) {
830     const char *deft = MATAIJ;
831     char       type[256];
832     PetscBool  flg;
833 
834     ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr);
835     ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr);
836     ierr = PetscOptionsEnd();CHKERRQ(ierr);
837     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
838     ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
839     if (flg) {
840       ierr = MatSetType(*C,type);CHKERRQ(ierr);
841     } else {
842       ierr = MatSetType(*C,deft);CHKERRQ(ierr);
843     }
844     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
845     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
846   }
847   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
848   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
849   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
850   PetscFunctionReturn(0);
851 }
852 
853 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
854 {
855   Mat                B;
856   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
857   Mat_HYPRE          *hA,*hP;
858   PetscBool          ishypre;
859   HYPRE_Int          type;
860   PetscErrorCode     ierr;
861 
862   PetscFunctionBegin;
863   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
864   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
865   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
866   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
867   hA = (Mat_HYPRE*)A->data;
868   hP = (Mat_HYPRE*)P->data;
869   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
870   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
871   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
872   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
873   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
874   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
875   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
876   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
877   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
878   PetscFunctionReturn(0);
879 }
880 
881 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
882 {
883   PetscErrorCode ierr;
884 
885   PetscFunctionBegin;
886   if (scall == MAT_INITIAL_MATRIX) {
887     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
888     ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
889     ierr                   = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
890     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
891     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
892   }
893   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
894   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
895   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
896   PetscFunctionReturn(0);
897 }
898 
899 /* calls hypre_ParMatmul
900    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
901    hypre_ParMatrixCreate does not duplicate the communicator
902    It looks like we don't need to have the diagonal entries
903    ordered first in the rows of the diagonal part
904    for boomerAMGBuildCoarseOperator to work */
905 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
906 {
907   PetscFunctionBegin;
908   PetscStackPush("hypre_ParMatmul");
909   *hAB = hypre_ParMatmul(hA,hB);
910   PetscStackPop;
911   PetscFunctionReturn(0);
912 }
913 
914 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
915 {
916   Mat                D;
917   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
918   PetscErrorCode     ierr;
919 
920   PetscFunctionBegin;
921   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
922   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
923   ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr);
924   ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
925   ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr);
926   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
927   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
928   PetscFunctionReturn(0);
929 }
930 
931 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
932 {
933   PetscErrorCode ierr;
934 
935   PetscFunctionBegin;
936   ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
937   ierr                      = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
938   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
939   PetscFunctionReturn(0);
940 }
941 
942 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
943 {
944   Mat                D;
945   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
946   Mat_HYPRE          *hA,*hB;
947   PetscBool          ishypre;
948   HYPRE_Int          type;
949   PetscErrorCode     ierr;
950 
951   PetscFunctionBegin;
952   ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr);
953   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
954   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
955   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
956   hA = (Mat_HYPRE*)A->data;
957   hB = (Mat_HYPRE*)B->data;
958   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
959   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
960   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
961   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
962   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
963   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
964   ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr);
965   ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
966   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
967   ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr);
968   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
969   PetscFunctionReturn(0);
970 }
971 
972 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
973 {
974   PetscErrorCode ierr;
975 
976   PetscFunctionBegin;
977   if (scall == MAT_INITIAL_MATRIX) {
978     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
979     ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
980     ierr                      = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
981     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
982     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
983   }
984   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
985   ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr);
986   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
987   PetscFunctionReturn(0);
988 }
989 
990 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
991 {
992   Mat                E;
993   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
994   PetscErrorCode     ierr;
995 
996   PetscFunctionBegin;
997   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
998   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
999   ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr);
1000   ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr);
1001   ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr);
1002   ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr);
1003   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
1004   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
1005   ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr);
1006   PetscFunctionReturn(0);
1007 }
1008 
1009 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
1010 {
1011   PetscErrorCode ierr;
1012 
1013   PetscFunctionBegin;
1014   ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr);
1015   ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr);
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1020 {
1021   PetscErrorCode ierr;
1022 
1023   PetscFunctionBegin;
1024   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);CHKERRQ(ierr);
1025   PetscFunctionReturn(0);
1026 }
1027 
1028 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1029 {
1030   PetscErrorCode ierr;
1031 
1032   PetscFunctionBegin;
1033   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);CHKERRQ(ierr);
1034   PetscFunctionReturn(0);
1035 }
1036 
1037 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1038 {
1039   PetscErrorCode ierr;
1040 
1041   PetscFunctionBegin;
1042   if (y != z) {
1043     ierr = VecCopy(y,z);CHKERRQ(ierr);
1044   }
1045   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);CHKERRQ(ierr);
1046   PetscFunctionReturn(0);
1047 }
1048 
1049 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1050 {
1051   PetscErrorCode ierr;
1052 
1053   PetscFunctionBegin;
1054   if (y != z) {
1055     ierr = VecCopy(y,z);CHKERRQ(ierr);
1056   }
1057   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);CHKERRQ(ierr);
1058   PetscFunctionReturn(0);
1059 }
1060 
1061 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1062 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1063 {
1064   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1065   hypre_ParCSRMatrix *parcsr;
1066   hypre_ParVector    *hx,*hy;
1067   HYPRE_Complex      *ax,*ay,*sax,*say;
1068   PetscErrorCode     ierr;
1069 
1070   PetscFunctionBegin;
1071   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1072   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
1073   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
1074   ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
1075   ierr = VecGetArray(y,(PetscScalar**)&ay);CHKERRQ(ierr);
1076   if (trans) {
1077     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
1078     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
1079     hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx);
1080     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
1081     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
1082   } else {
1083     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
1084     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
1085     hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy);
1086     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
1087     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
1088   }
1089   ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
1090   ierr = VecRestoreArray(y,(PetscScalar**)&ay);CHKERRQ(ierr);
1091   PetscFunctionReturn(0);
1092 }
1093 
1094 static PetscErrorCode MatDestroy_HYPRE(Mat A)
1095 {
1096   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1097   PetscErrorCode ierr;
1098 
1099   PetscFunctionBegin;
1100   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
1101   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
1102   if (hA->ij) {
1103     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1104     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
1105   }
1106   if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); }
1107 
1108   ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr);
1109 
1110   ierr = PetscFree(hA->array);CHKERRQ(ierr);
1111 
1112   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
1113   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
1114   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr);
1115   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
1116   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr);
1117   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr);
1118   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);CHKERRQ(ierr);
1119   ierr = PetscFree(A->data);CHKERRQ(ierr);
1120   PetscFunctionReturn(0);
1121 }
1122 
1123 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1124 {
1125   PetscErrorCode ierr;
1126 
1127   PetscFunctionBegin;
1128   ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr);
1129   PetscFunctionReturn(0);
1130 }
1131 
1132 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1133 {
1134   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1135   Vec                x,b;
1136   PetscMPIInt        n;
1137   PetscInt           i,j,rstart,ncols,flg;
1138   PetscInt           *row,*col;
1139   PetscScalar        *val;
1140   PetscErrorCode     ierr;
1141 
1142   PetscFunctionBegin;
1143   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1144 
1145   if (!A->nooffprocentries) {
1146     while (1) {
1147       ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
1148       if (!flg) break;
1149 
1150       for (i=0; i<n; ) {
1151         /* Now identify the consecutive vals belonging to the same row */
1152         for (j=i,rstart=row[j]; j<n; j++) {
1153           if (row[j] != rstart) break;
1154         }
1155         if (j < n) ncols = j-i;
1156         else       ncols = n-i;
1157         /* Now assemble all these values with a single function call */
1158         ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr);
1159 
1160         i = j;
1161       }
1162     }
1163     ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr);
1164   }
1165 
1166   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1167   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitalize */
1168   {
1169     hypre_AuxParCSRMatrix *aux_matrix;
1170 
1171     /* call destroy just to make sure we do not leak anything */
1172     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1173     PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix));
1174     hypre_IJMatrixTranslator(hA->ij) = NULL;
1175 
1176     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1177     PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1178     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1179     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1180     PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix));
1181   }
1182   if (hA->x) PetscFunctionReturn(0);
1183   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1184   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1185   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr);
1186   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr);
1187   ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr);
1188   ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr);
1189   ierr = VecDestroy(&x);CHKERRQ(ierr);
1190   ierr = VecDestroy(&b);CHKERRQ(ierr);
1191   PetscFunctionReturn(0);
1192 }
1193 
1194 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1195 {
1196   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1197   PetscErrorCode     ierr;
1198 
1199   PetscFunctionBegin;
1200   if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");
1201 
1202   if (hA->size >= size) {
1203     *array = hA->array;
1204   } else {
1205     ierr = PetscFree(hA->array);CHKERRQ(ierr);
1206     hA->size = size;
1207     ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr);
1208     *array = hA->array;
1209   }
1210 
1211   hA->available = PETSC_FALSE;
1212   PetscFunctionReturn(0);
1213 }
1214 
1215 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1216 {
1217   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1218 
1219   PetscFunctionBegin;
1220   *array = NULL;
1221   hA->available = PETSC_TRUE;
1222   PetscFunctionReturn(0);
1223 }
1224 
1225 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1226 {
1227   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1228   PetscScalar        *vals = (PetscScalar *)v;
1229   HYPRE_Complex      *sscr;
1230   PetscInt           *cscr[2];
1231   PetscInt           i,nzc;
1232   void               *array = NULL;
1233   PetscErrorCode     ierr;
1234 
1235   PetscFunctionBegin;
1236   ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);CHKERRQ(ierr);
1237   cscr[0] = (PetscInt*)array;
1238   cscr[1] = ((PetscInt*)array)+nc;
1239   sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2);
1240   for (i=0,nzc=0;i<nc;i++) {
1241     if (cols[i] >= 0) {
1242       cscr[0][nzc  ] = cols[i];
1243       cscr[1][nzc++] = i;
1244     }
1245   }
1246   if (!nzc) {
1247     ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1248     PetscFunctionReturn(0);
1249   }
1250 
1251   if (ins == ADD_VALUES) {
1252     for (i=0;i<nr;i++) {
1253       if (rows[i] >= 0 && nzc) {
1254         PetscInt  j;
1255         HYPRE_Int hnc = (HYPRE_Int)nzc;
1256 
1257         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1258         for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); }
1259         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1260       }
1261       vals += nc;
1262     }
1263   } else { /* INSERT_VALUES */
1264     PetscInt rst,ren;
1265 
1266     ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1267     for (i=0;i<nr;i++) {
1268       if (rows[i] >= 0 && nzc) {
1269         PetscInt  j;
1270         HYPRE_Int hnc = (HYPRE_Int)nzc;
1271 
1272         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1273         for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); }
1274         /* nonlocal values */
1275         if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE);CHKERRQ(ierr); }
1276         /* local values */
1277         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1278       }
1279       vals += nc;
1280     }
1281   }
1282 
1283   ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1284   PetscFunctionReturn(0);
1285 }
1286 
1287 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1288 {
1289   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1290   HYPRE_Int      *hdnnz,*honnz;
1291   PetscInt       i,rs,re,cs,ce,bs;
1292   PetscMPIInt    size;
1293   PetscErrorCode ierr;
1294 
1295   PetscFunctionBegin;
1296   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1297   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1298   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1299   rs   = A->rmap->rstart;
1300   re   = A->rmap->rend;
1301   cs   = A->cmap->rstart;
1302   ce   = A->cmap->rend;
1303   if (!hA->ij) {
1304     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1305     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1306   } else {
1307     HYPRE_BigInt hrs,hre,hcs,hce;
1308     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1309     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re);
1310     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce);
1311   }
1312   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1313   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1314 
1315   if (!dnnz) {
1316     ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr);
1317     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1318   } else {
1319     hdnnz = (HYPRE_Int*)dnnz;
1320   }
1321   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1322   if (size > 1) {
1323     hypre_AuxParCSRMatrix *aux_matrix;
1324     if (!onnz) {
1325       ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr);
1326       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1327     } else {
1328       honnz = (HYPRE_Int*)onnz;
1329     }
1330     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1331        they assume the user will input the entire row values, properly sorted
1332        In PETSc, we don't make such an assumption, and we instead set this flag to 1
1333        Also, to avoid possible memory leaks, we destroy and recreate the translator
1334        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1335        the IJ matrix for us */
1336     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1337     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1338     hypre_IJMatrixTranslator(hA->ij) = NULL;
1339     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1340     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1341     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1342   } else {
1343     honnz = NULL;
1344     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1345   }
1346 
1347   /* reset assembled flag and call the initialize method */
1348   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1349   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1350   if (!dnnz) {
1351     ierr = PetscFree(hdnnz);CHKERRQ(ierr);
1352   }
1353   if (!onnz && honnz) {
1354     ierr = PetscFree(honnz);CHKERRQ(ierr);
1355   }
1356 
1357   /* Match AIJ logic */
1358   A->preallocated = PETSC_TRUE;
1359   A->assembled    = PETSC_FALSE;
1360   PetscFunctionReturn(0);
1361 }
1362 
1363 /*@C
1364    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1365 
1366    Collective on Mat
1367 
1368    Input Parameters:
1369 +  A - the matrix
1370 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1371           (same value is used for all local rows)
1372 .  dnnz - array containing the number of nonzeros in the various rows of the
1373           DIAGONAL portion of the local submatrix (possibly different for each row)
1374           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1375           The size of this array is equal to the number of local rows, i.e 'm'.
1376           For matrices that will be factored, you must leave room for (and set)
1377           the diagonal entry even if it is zero.
1378 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1379           submatrix (same value is used for all local rows).
1380 -  onnz - array containing the number of nonzeros in the various rows of the
1381           OFF-DIAGONAL portion of the local submatrix (possibly different for
1382           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1383           structure. The size of this array is equal to the number
1384           of local rows, i.e 'm'.
1385 
1386    Notes:
1387     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1388 
1389    Level: intermediate
1390 
1391 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE
1392 @*/
1393 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1394 {
1395   PetscErrorCode ierr;
1396 
1397   PetscFunctionBegin;
1398   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1399   PetscValidType(A,1);
1400   ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr);
1401   PetscFunctionReturn(0);
1402 }
1403 
1404 /*
1405    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1406 
1407    Collective
1408 
1409    Input Parameters:
1410 +  parcsr   - the pointer to the hypre_ParCSRMatrix
1411 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1412 -  copymode - PETSc copying options
1413 
1414    Output Parameter:
1415 .  A  - the matrix
1416 
1417    Level: intermediate
1418 
1419 .seealso: MatHYPRE, PetscCopyMode
1420 */
1421 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1422 {
1423   Mat                   T;
1424   Mat_HYPRE             *hA;
1425   MPI_Comm              comm;
1426   PetscInt              rstart,rend,cstart,cend,M,N;
1427   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1428   PetscErrorCode        ierr;
1429 
1430   PetscFunctionBegin;
1431   comm   = hypre_ParCSRMatrixComm(parcsr);
1432   ierr   = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
1433   ierr   = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
1434   ierr   = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
1435   ierr   = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
1436   ierr   = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr);
1437   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1438   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1439   /* access ParCSRMatrix */
1440   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1441   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1442   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1443   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1444   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1445   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1446 
1447   /* fix for empty local rows/columns */
1448   if (rend < rstart) rend = rstart;
1449   if (cend < cstart) cend = cstart;
1450 
1451   /* PETSc convention */
1452   rend++;
1453   cend++;
1454   rend = PetscMin(rend,M);
1455   cend = PetscMin(cend,N);
1456 
1457   /* create PETSc matrix with MatHYPRE */
1458   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
1459   ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr);
1460   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
1461   hA   = (Mat_HYPRE*)(T->data);
1462 
1463   /* create HYPRE_IJMatrix */
1464   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1465   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1466 
1467   /* create new ParCSR object if needed */
1468   if (ishyp && copymode == PETSC_COPY_VALUES) {
1469     hypre_ParCSRMatrix *new_parcsr;
1470     hypre_CSRMatrix    *hdiag,*hoffd,*ndiag,*noffd;
1471 
1472     new_parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1473     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1474     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1475     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1476     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1477     ierr       = PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));CHKERRQ(ierr);
1478     ierr       = PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));CHKERRQ(ierr);
1479     parcsr     = new_parcsr;
1480     copymode   = PETSC_OWN_POINTER;
1481   }
1482 
1483   /* set ParCSR object */
1484   hypre_IJMatrixObject(hA->ij) = parcsr;
1485   T->preallocated = PETSC_TRUE;
1486 
1487   /* set assembled flag */
1488   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1489   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1490   if (ishyp) {
1491     PetscMPIInt myid = 0;
1492 
1493     /* make sure we always have row_starts and col_starts available */
1494     if (HYPRE_AssumedPartitionCheck()) {
1495       ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
1496     }
1497     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1498       PetscLayout map;
1499 
1500       ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr);
1501       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1502       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1503     }
1504     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1505       PetscLayout map;
1506 
1507       ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr);
1508       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1509       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1510     }
1511     /* prevent from freeing the pointer */
1512     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1513     *A   = T;
1514     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1515     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1516   } else if (isaij) {
1517     if (copymode != PETSC_OWN_POINTER) {
1518       /* prevent from freeing the pointer */
1519       hA->inner_free = PETSC_FALSE;
1520       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1521       ierr = MatDestroy(&T);CHKERRQ(ierr);
1522     } else { /* AIJ return type with PETSC_OWN_POINTER */
1523       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
1524       *A   = T;
1525     }
1526   } else if (isis) {
1527     ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1528     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1529     ierr = MatDestroy(&T);CHKERRQ(ierr);
1530   }
1531   PetscFunctionReturn(0);
1532 }
1533 
1534 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1535 {
1536   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1537   HYPRE_Int type;
1538 
1539   PetscFunctionBegin;
1540   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1541   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1542   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1543   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1544   PetscFunctionReturn(0);
1545 }
1546 
1547 /*
1548    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1549 
1550    Not collective
1551 
1552    Input Parameters:
1553 +  A  - the MATHYPRE object
1554 
1555    Output Parameter:
1556 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1557 
1558    Level: intermediate
1559 
1560 .seealso: MatHYPRE, PetscCopyMode
1561 */
1562 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1563 {
1564   PetscErrorCode ierr;
1565 
1566   PetscFunctionBegin;
1567   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1568   PetscValidType(A,1);
1569   ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr);
1570   PetscFunctionReturn(0);
1571 }
1572 
1573 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1574 {
1575   hypre_ParCSRMatrix *parcsr;
1576   hypre_CSRMatrix    *ha;
1577   PetscInt           rst;
1578   PetscErrorCode     ierr;
1579 
1580   PetscFunctionBegin;
1581   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1582   ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr);
1583   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1584   if (missing) *missing = PETSC_FALSE;
1585   if (dd) *dd = -1;
1586   ha = hypre_ParCSRMatrixDiag(parcsr);
1587   if (ha) {
1588     PetscInt  size,i;
1589     HYPRE_Int *ii,*jj;
1590 
1591     size = hypre_CSRMatrixNumRows(ha);
1592     ii   = hypre_CSRMatrixI(ha);
1593     jj   = hypre_CSRMatrixJ(ha);
1594     for (i = 0; i < size; i++) {
1595       PetscInt  j;
1596       PetscBool found = PETSC_FALSE;
1597 
1598       for (j = ii[i]; j < ii[i+1] && !found; j++)
1599         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1600 
1601       if (!found) {
1602         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1603         if (missing) *missing = PETSC_TRUE;
1604         if (dd) *dd = i+rst;
1605         PetscFunctionReturn(0);
1606       }
1607     }
1608     if (!size) {
1609       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1610       if (missing) *missing = PETSC_TRUE;
1611       if (dd) *dd = rst;
1612     }
1613   } else {
1614     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1615     if (missing) *missing = PETSC_TRUE;
1616     if (dd) *dd = rst;
1617   }
1618   PetscFunctionReturn(0);
1619 }
1620 
1621 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1622 {
1623   hypre_ParCSRMatrix *parcsr;
1624   hypre_CSRMatrix    *ha;
1625   PetscErrorCode     ierr;
1626   HYPRE_Complex      hs;
1627 
1628   PetscFunctionBegin;
1629   ierr = PetscHYPREScalarCast(s,&hs);CHKERRQ(ierr);
1630   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1631   /* diagonal part */
1632   ha = hypre_ParCSRMatrixDiag(parcsr);
1633   if (ha) {
1634     PetscInt      size,i;
1635     HYPRE_Int     *ii;
1636     HYPRE_Complex *a;
1637 
1638     size = hypre_CSRMatrixNumRows(ha);
1639     a    = hypre_CSRMatrixData(ha);
1640     ii   = hypre_CSRMatrixI(ha);
1641     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1642   }
1643   /* offdiagonal part */
1644   ha = hypre_ParCSRMatrixOffd(parcsr);
1645   if (ha) {
1646     PetscInt      size,i;
1647     HYPRE_Int     *ii;
1648     HYPRE_Complex *a;
1649 
1650     size = hypre_CSRMatrixNumRows(ha);
1651     a    = hypre_CSRMatrixData(ha);
1652     ii   = hypre_CSRMatrixI(ha);
1653     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1654   }
1655   PetscFunctionReturn(0);
1656 }
1657 
1658 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1659 {
1660   hypre_ParCSRMatrix *parcsr;
1661   HYPRE_Int          *lrows;
1662   PetscInt           rst,ren,i;
1663   PetscErrorCode     ierr;
1664 
1665   PetscFunctionBegin;
1666   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1667   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1668   ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr);
1669   ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1670   for (i=0;i<numRows;i++) {
1671     if (rows[i] < rst || rows[i] >= ren)
1672       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1673     lrows[i] = rows[i] - rst;
1674   }
1675   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1676   ierr = PetscFree(lrows);CHKERRQ(ierr);
1677   PetscFunctionReturn(0);
1678 }
1679 
1680 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1681 {
1682   PetscErrorCode      ierr;
1683 
1684   PetscFunctionBegin;
1685   if (ha) {
1686     HYPRE_Int     *ii, size;
1687     HYPRE_Complex *a;
1688 
1689     size = hypre_CSRMatrixNumRows(ha);
1690     a    = hypre_CSRMatrixData(ha);
1691     ii   = hypre_CSRMatrixI(ha);
1692 
1693     if (a) {ierr = PetscArrayzero(a,ii[size]);CHKERRQ(ierr);}
1694   }
1695   PetscFunctionReturn(0);
1696 }
1697 
1698 PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1699 {
1700   hypre_ParCSRMatrix  *parcsr;
1701   PetscErrorCode      ierr;
1702 
1703   PetscFunctionBegin;
1704   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1705   /* diagonal part */
1706   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr);
1707   /* off-diagonal part */
1708   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr);
1709   PetscFunctionReturn(0);
1710 }
1711 
1712 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag)
1713 {
1714   PetscInt        ii;
1715   HYPRE_Int       *i, *j;
1716   HYPRE_Complex   *a;
1717 
1718   PetscFunctionBegin;
1719   if (!hA) PetscFunctionReturn(0);
1720 
1721   i = hypre_CSRMatrixI(hA);
1722   j = hypre_CSRMatrixJ(hA);
1723   a = hypre_CSRMatrixData(hA);
1724 
1725   for (ii = 0; ii < N; ii++) {
1726     HYPRE_Int jj, ibeg, iend, irow;
1727 
1728     irow = rows[ii];
1729     ibeg = i[irow];
1730     iend = i[irow+1];
1731     for (jj = ibeg; jj < iend; jj++)
1732       if (j[jj] == irow) a[jj] = diag;
1733       else a[jj] = 0.0;
1734    }
1735    PetscFunctionReturn(0);
1736 }
1737 
1738 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1739 {
1740   hypre_ParCSRMatrix  *parcsr;
1741   PetscInt            *lrows,len;
1742   HYPRE_Complex       hdiag;
1743   PetscErrorCode      ierr;
1744 
1745   PetscFunctionBegin;
1746   if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
1747   ierr = PetscHYPREScalarCast(diag,&hdiag);CHKERRQ(ierr);
1748   /* retrieve the internal matrix */
1749   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1750   /* get locally owned rows */
1751   ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr);
1752   /* zero diagonal part */
1753   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);CHKERRQ(ierr);
1754   /* zero off-diagonal part */
1755   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr);
1756 
1757   ierr = PetscFree(lrows);CHKERRQ(ierr);
1758   PetscFunctionReturn(0);
1759 }
1760 
1761 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1762 {
1763   PetscErrorCode ierr;
1764 
1765   PetscFunctionBegin;
1766   if (mat->nooffprocentries) PetscFunctionReturn(0);
1767 
1768   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
1769   PetscFunctionReturn(0);
1770 }
1771 
1772 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1773 {
1774   hypre_ParCSRMatrix  *parcsr;
1775   HYPRE_Int           hnz;
1776   PetscErrorCode      ierr;
1777 
1778   PetscFunctionBegin;
1779   /* retrieve the internal matrix */
1780   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1781   /* call HYPRE API */
1782   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
1783   if (nz) *nz = (PetscInt)hnz;
1784   PetscFunctionReturn(0);
1785 }
1786 
1787 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1788 {
1789   hypre_ParCSRMatrix  *parcsr;
1790   HYPRE_Int           hnz;
1791   PetscErrorCode      ierr;
1792 
1793   PetscFunctionBegin;
1794   /* retrieve the internal matrix */
1795   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1796   /* call HYPRE API */
1797   hnz = nz ? (HYPRE_Int)(*nz) : 0;
1798   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
1799   PetscFunctionReturn(0);
1800 }
1801 
1802 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1803 {
1804   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1805   PetscInt  i;
1806 
1807   PetscFunctionBegin;
1808   if (!m || !n) PetscFunctionReturn(0);
1809   /* Ignore negative row indices
1810    * And negative column indices should be automatically ignored in hypre
1811    * */
1812   for (i=0; i<m; i++) {
1813     if (idxm[i] >= 0) {
1814       HYPRE_Int hn = (HYPRE_Int)n;
1815       PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n)));
1816     }
1817   }
1818   PetscFunctionReturn(0);
1819 }
1820 
1821 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
1822 {
1823   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1824 
1825   PetscFunctionBegin;
1826   switch (op) {
1827   case MAT_NO_OFF_PROC_ENTRIES:
1828     if (flg) {
1829       PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0));
1830     }
1831     break;
1832   default:
1833     break;
1834   }
1835   PetscFunctionReturn(0);
1836 }
1837 
1838 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
1839 {
1840   hypre_ParCSRMatrix *parcsr;
1841   PetscErrorCode     ierr;
1842   Mat                B;
1843   PetscViewerFormat  format;
1844   PetscErrorCode     (*mview)(Mat,PetscViewer) = NULL;
1845 
1846   PetscFunctionBegin;
1847   ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr);
1848   if (format != PETSC_VIEWER_NATIVE) {
1849     ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1850     ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr);
1851     ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr);
1852     if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");CHKERRQ(ierr);
1853     ierr = (*mview)(B,view);CHKERRQ(ierr);
1854     ierr = MatDestroy(&B);CHKERRQ(ierr);
1855   } else {
1856     Mat_HYPRE  *hA = (Mat_HYPRE*)A->data;
1857     PetscMPIInt size;
1858     PetscBool   isascii;
1859     const char *filename;
1860 
1861     /* HYPRE uses only text files */
1862     ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr);
1863     if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name);
1864     ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr);
1865     PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename));
1866     ierr = MPI_Comm_size(hA->comm,&size);CHKERRQ(ierr);
1867     if (size > 1) {
1868       ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr);
1869     } else {
1870       ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr);
1871     }
1872   }
1873   PetscFunctionReturn(0);
1874 }
1875 
1876 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B)
1877 {
1878   hypre_ParCSRMatrix *parcsr;
1879   PetscErrorCode     ierr;
1880   PetscCopyMode      cpmode;
1881 
1882   PetscFunctionBegin;
1883   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1884   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
1885     parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1886     cpmode = PETSC_OWN_POINTER;
1887   } else {
1888     cpmode = PETSC_COPY_VALUES;
1889   }
1890   ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr);
1891   PetscFunctionReturn(0);
1892 }
1893 
1894 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
1895 {
1896   hypre_ParCSRMatrix *acsr,*bcsr;
1897   PetscErrorCode     ierr;
1898 
1899   PetscFunctionBegin;
1900   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
1901     ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr);
1902     ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr);
1903     PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1));
1904     ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1905     ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1906   } else {
1907     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1908   }
1909   PetscFunctionReturn(0);
1910 }
1911 
1912 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
1913 {
1914   hypre_ParCSRMatrix *parcsr;
1915   hypre_CSRMatrix    *dmat;
1916   HYPRE_Complex      *a;
1917   HYPRE_Complex      *data = NULL;
1918   HYPRE_Int          *diag = NULL;
1919   PetscInt           i;
1920   PetscBool          cong;
1921   PetscErrorCode     ierr;
1922 
1923   PetscFunctionBegin;
1924   ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr);
1925   if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns");
1926 #if defined(PETSC_USE_DEBUG)
1927   {
1928     PetscBool miss;
1929     ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr);
1930     if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing");
1931   }
1932 #endif
1933   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1934   dmat = hypre_ParCSRMatrixDiag(parcsr);
1935   if (dmat) {
1936     /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
1937     ierr = VecGetArray(d,(PetscScalar**)&a);CHKERRQ(ierr);
1938     diag = hypre_CSRMatrixI(dmat);
1939     data = hypre_CSRMatrixData(dmat);
1940     for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]];
1941     ierr = VecRestoreArray(d,(PetscScalar**)&a);CHKERRQ(ierr);
1942   }
1943   PetscFunctionReturn(0);
1944 }
1945 
1946 #include <petscblaslapack.h>
1947 
1948 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str)
1949 {
1950   PetscErrorCode ierr;
1951 
1952   PetscFunctionBegin;
1953   if (str == SAME_NONZERO_PATTERN) {
1954     hypre_ParCSRMatrix *x,*y;
1955     hypre_CSRMatrix    *xloc,*yloc;
1956     PetscInt           xnnz,ynnz;
1957     HYPRE_Complex      *xarr,*yarr;
1958     PetscBLASInt       one=1,bnz;
1959 
1960     ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr);
1961     ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr);
1962 
1963     /* diagonal block */
1964     xloc = hypre_ParCSRMatrixDiag(x);
1965     yloc = hypre_ParCSRMatrixDiag(y);
1966     xnnz = 0;
1967     ynnz = 0;
1968     xarr = NULL;
1969     yarr = NULL;
1970     if (xloc) {
1971       xarr = hypre_CSRMatrixData(xloc);
1972       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1973     }
1974     if (yloc) {
1975       yarr = hypre_CSRMatrixData(yloc);
1976       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1977     }
1978     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz);
1979     ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr);
1980     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
1981 
1982     /* off-diagonal block */
1983     xloc = hypre_ParCSRMatrixOffd(x);
1984     yloc = hypre_ParCSRMatrixOffd(y);
1985     xnnz = 0;
1986     ynnz = 0;
1987     xarr = NULL;
1988     yarr = NULL;
1989     if (xloc) {
1990       xarr = hypre_CSRMatrixData(xloc);
1991       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1992     }
1993     if (yloc) {
1994       yarr = hypre_CSRMatrixData(yloc);
1995       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1996     }
1997     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz);
1998     ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr);
1999     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
2000   } else if (str == SUBSET_NONZERO_PATTERN) {
2001     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
2002   } else {
2003     Mat B;
2004 
2005     ierr = MatAXPY_Basic_Preallocate(Y,X,&B);CHKERRQ(ierr);
2006     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2007     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2008   }
2009   PetscFunctionReturn(0);
2010 }
2011 
2012 /*MC
2013    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2014           based on the hypre IJ interface.
2015 
2016    Level: intermediate
2017 
2018 .seealso: MatCreate()
2019 M*/
2020 
2021 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2022 {
2023   Mat_HYPRE      *hB;
2024   PetscErrorCode ierr;
2025 
2026   PetscFunctionBegin;
2027   ierr           = PetscNewLog(B,&hB);CHKERRQ(ierr);
2028   hB->inner_free = PETSC_TRUE;
2029   hB->available  = PETSC_TRUE;
2030   hB->size       = 0;
2031   hB->array      = NULL;
2032 
2033   B->data       = (void*)hB;
2034   B->rmap->bs   = 1;
2035   B->assembled  = PETSC_FALSE;
2036 
2037   ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
2038   B->ops->mult             = MatMult_HYPRE;
2039   B->ops->multtranspose    = MatMultTranspose_HYPRE;
2040   B->ops->multadd          = MatMultAdd_HYPRE;
2041   B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE;
2042   B->ops->setup            = MatSetUp_HYPRE;
2043   B->ops->destroy          = MatDestroy_HYPRE;
2044   B->ops->assemblyend      = MatAssemblyEnd_HYPRE;
2045   B->ops->assemblybegin    = MatAssemblyBegin_HYPRE;
2046   B->ops->ptap             = MatPtAP_HYPRE_HYPRE;
2047   B->ops->matmult          = MatMatMult_HYPRE_HYPRE;
2048   B->ops->setvalues        = MatSetValues_HYPRE;
2049   B->ops->missingdiagonal  = MatMissingDiagonal_HYPRE;
2050   B->ops->scale            = MatScale_HYPRE;
2051   B->ops->zerorowscolumns  = MatZeroRowsColumns_HYPRE;
2052   B->ops->zeroentries      = MatZeroEntries_HYPRE;
2053   B->ops->zerorows         = MatZeroRows_HYPRE;
2054   B->ops->getrow           = MatGetRow_HYPRE;
2055   B->ops->restorerow       = MatRestoreRow_HYPRE;
2056   B->ops->getvalues        = MatGetValues_HYPRE;
2057   B->ops->setoption        = MatSetOption_HYPRE;
2058   B->ops->duplicate        = MatDuplicate_HYPRE;
2059   B->ops->copy             = MatCopy_HYPRE;
2060   B->ops->view             = MatView_HYPRE;
2061   B->ops->getdiagonal      = MatGetDiagonal_HYPRE;
2062   B->ops->axpy             = MatAXPY_HYPRE;
2063 
2064   /* build cache for off array entries formed */
2065   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
2066 
2067   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr);
2068   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
2069   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
2070   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
2071   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
2072   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
2073   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr);
2074   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr);
2075   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);CHKERRQ(ierr);
2076   PetscFunctionReturn(0);
2077 }
2078 
2079 static PetscErrorCode hypre_array_destroy(void *ptr)
2080 {
2081    PetscFunctionBegin;
2082    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
2083    PetscFunctionReturn(0);
2084 }
2085