xref: /petsc/src/mat/impls/hypre/mhypre.c (revision ef0bb6c736604ce380bf8bea4ebd4a7bda431d97)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 #include <petscpkg_version.h>
7 #include <petsc/private/petschypre.h>
8 #include <petscmathypre.h>
9 #include <petsc/private/matimpl.h>
10 #include <../src/mat/impls/hypre/mhypre.h>
11 #include <../src/mat/impls/aij/mpi/mpiaij.h>
12 #include <../src/vec/vec/impls/hypre/vhyp.h>
13 #include <HYPRE.h>
14 #include <HYPRE_utilities.h>
15 #include <_hypre_parcsr_ls.h>
16 #include <_hypre_sstruct_ls.h>
17 
18 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0)
19 #define  hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A)
20 #endif
21 
22 PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
23 
24 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
25 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
27 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
28 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool);
29 static PetscErrorCode hypre_array_destroy(void*);
30 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);
31 
32 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
33 {
34   PetscErrorCode ierr;
35   PetscInt       i,n_d,n_o;
36   const PetscInt *ia_d,*ia_o;
37   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
38   HYPRE_Int      *nnz_d=NULL,*nnz_o=NULL;
39 
40   PetscFunctionBegin;
41   if (A_d) { /* determine number of nonzero entries in local diagonal part */
42     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
43     if (done_d) {
44       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
45       for (i=0; i<n_d; i++) {
46         nnz_d[i] = ia_d[i+1] - ia_d[i];
47       }
48     }
49     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
50   }
51   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
52     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
53     if (done_o) {
54       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
55       for (i=0; i<n_o; i++) {
56         nnz_o[i] = ia_o[i+1] - ia_o[i];
57       }
58     }
59     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
60   }
61   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
62     if (!done_o) { /* only diagonal part */
63       ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr);
64       for (i=0; i<n_d; i++) {
65         nnz_o[i] = 0;
66       }
67     }
68 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
69     { /* If we don't do this, the columns of the matrix will be all zeros! */
70       hypre_AuxParCSRMatrix *aux_matrix;
71       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
72       hypre_AuxParCSRMatrixDestroy(aux_matrix);
73       hypre_IJMatrixTranslator(ij) = NULL;
74       PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
75       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
76       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
77     }
78 #else
79     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
80 #endif
81     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
82     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
83   }
84   PetscFunctionReturn(0);
85 }
86 
87 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
88 {
89   PetscErrorCode ierr;
90   PetscInt       rstart,rend,cstart,cend;
91 
92   PetscFunctionBegin;
93   ierr   = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
94   ierr   = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
95   rstart = A->rmap->rstart;
96   rend   = A->rmap->rend;
97   cstart = A->cmap->rstart;
98   cend   = A->cmap->rend;
99   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
100   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
101   {
102     PetscBool      same;
103     Mat            A_d,A_o;
104     const PetscInt *colmap;
105     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
106     if (same) {
107       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
108       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
109       PetscFunctionReturn(0);
110     }
111     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
112     if (same) {
113       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
114       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
115       PetscFunctionReturn(0);
116     }
117     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
118     if (same) {
119       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
120       PetscFunctionReturn(0);
121     }
122     ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
123     if (same) {
124       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
125       PetscFunctionReturn(0);
126     }
127   }
128   PetscFunctionReturn(0);
129 }
130 
131 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
132 {
133   PetscErrorCode    ierr;
134   PetscInt          i,rstart,rend,ncols,nr,nc;
135   const PetscScalar *values;
136   const PetscInt    *cols;
137   PetscBool         flg;
138 
139   PetscFunctionBegin;
140   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
141   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
142   if (flg && nr == nc) {
143     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
144     PetscFunctionReturn(0);
145   }
146   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
147   if (flg) {
148     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
149     PetscFunctionReturn(0);
150   }
151 
152   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
153   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
154   for (i=rstart; i<rend; i++) {
155     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
156     if (ncols) {
157       HYPRE_Int nc = (HYPRE_Int)ncols;
158 
159       if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i);
160       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values));
161     }
162     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
163   }
164   PetscFunctionReturn(0);
165 }
166 
167 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
168 {
169   PetscErrorCode        ierr;
170   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
171   HYPRE_Int             type;
172   hypre_ParCSRMatrix    *par_matrix;
173   hypre_AuxParCSRMatrix *aux_matrix;
174   hypre_CSRMatrix       *hdiag;
175   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
176 
177   PetscFunctionBegin;
178   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
179   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
180   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
181   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
182   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
183   /*
184        this is the Hack part where we monkey directly with the hypre datastructures
185   */
186   if (sameint) {
187     ierr = PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);CHKERRQ(ierr);
188     ierr = PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);CHKERRQ(ierr);
189   } else {
190     PetscInt i;
191 
192     for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
193     for (i=0;i<pdiag->nz;i++)      hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
194   }
195   ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr);
196 
197   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
198   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
199   PetscFunctionReturn(0);
200 }
201 
202 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
203 {
204   PetscErrorCode        ierr;
205   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
206   Mat_SeqAIJ            *pdiag,*poffd;
207   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
208   HYPRE_Int             *hjj,type;
209   hypre_ParCSRMatrix    *par_matrix;
210   hypre_AuxParCSRMatrix *aux_matrix;
211   hypre_CSRMatrix       *hdiag,*hoffd;
212   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
213 
214   PetscFunctionBegin;
215   pdiag = (Mat_SeqAIJ*) pA->A->data;
216   poffd = (Mat_SeqAIJ*) pA->B->data;
217   /* cstart is only valid for square MPIAIJ layed out in the usual way */
218   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
219 
220   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
221   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
222   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
223   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
224   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
225   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
226 
227   /*
228        this is the Hack part where we monkey directly with the hypre datastructures
229   */
230   if (sameint) {
231     ierr = PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);CHKERRQ(ierr);
232   } else {
233     for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]);
234   }
235   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
236   hjj = hdiag->j;
237   pjj = pdiag->j;
238 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
239   for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i];
240 #else
241   for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i];
242 #endif
243   ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr);
244   if (sameint) {
245     ierr = PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);CHKERRQ(ierr);
246   } else {
247     for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]);
248   }
249 
250   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
251      If we hacked a hypre a bit more we might be able to avoid this step */
252 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
253   PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd));
254   jj  = (PetscInt*) hoffd->big_j;
255 #else
256   jj  = (PetscInt*) hoffd->j;
257 #endif
258   pjj = poffd->j;
259   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
260 
261   ierr = PetscArraycpy(hoffd->data,poffd->a,poffd->nz);CHKERRQ(ierr);
262 
263   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
264   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
265   PetscFunctionReturn(0);
266 }
267 
268 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
269 {
270   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
271   Mat                    lA;
272   ISLocalToGlobalMapping rl2g,cl2g;
273   IS                     is;
274   hypre_ParCSRMatrix     *hA;
275   hypre_CSRMatrix        *hdiag,*hoffd;
276   MPI_Comm               comm;
277   HYPRE_Complex          *hdd,*hod,*aa;
278   PetscScalar            *data;
279   HYPRE_BigInt           *col_map_offd;
280   HYPRE_Int              *hdi,*hdj,*hoi,*hoj;
281   PetscInt               *ii,*jj,*iptr,*jptr;
282   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
283   HYPRE_Int              type;
284   PetscErrorCode         ierr;
285 
286   PetscFunctionBegin;
287   comm = PetscObjectComm((PetscObject)A);
288   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
289   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
290   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
291   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
292   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
293   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
294   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
295   hdiag = hypre_ParCSRMatrixDiag(hA);
296   hoffd = hypre_ParCSRMatrixOffd(hA);
297   dr    = hypre_CSRMatrixNumRows(hdiag);
298   dc    = hypre_CSRMatrixNumCols(hdiag);
299   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
300   hdi   = hypre_CSRMatrixI(hdiag);
301   hdj   = hypre_CSRMatrixJ(hdiag);
302   hdd   = hypre_CSRMatrixData(hdiag);
303   oc    = hypre_CSRMatrixNumCols(hoffd);
304   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
305   hoi   = hypre_CSRMatrixI(hoffd);
306   hoj   = hypre_CSRMatrixJ(hoffd);
307   hod   = hypre_CSRMatrixData(hoffd);
308   if (reuse != MAT_REUSE_MATRIX) {
309     PetscInt *aux;
310 
311     /* generate l2g maps for rows and cols */
312     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
313     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
314     ierr = ISDestroy(&is);CHKERRQ(ierr);
315     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
316     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
317     for (i=0; i<dc; i++) aux[i] = i+stc;
318     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
319     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
320     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
321     ierr = ISDestroy(&is);CHKERRQ(ierr);
322     /* create MATIS object */
323     ierr = MatCreate(comm,B);CHKERRQ(ierr);
324     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
325     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
326     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
327     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
328     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
329 
330     /* allocate CSR for local matrix */
331     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
332     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
333     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
334   } else {
335     PetscInt  nr;
336     PetscBool done;
337     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
338     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
339     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
340     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
341     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
342   }
343   /* merge local matrices */
344   ii  = iptr;
345   jj  = jptr;
346   aa  = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
347   *ii = *(hdi++) + *(hoi++);
348   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
349     PetscScalar *aold = (PetscScalar*)aa;
350     PetscInt    *jold = jj,nc = jd+jo;
351     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
352     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
353     *(++ii) = *(hdi++) + *(hoi++);
354     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
355   }
356   for (; cum<dr; cum++) *(++ii) = nnz;
357   if (reuse != MAT_REUSE_MATRIX) {
358     Mat_SeqAIJ* a;
359 
360     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
361     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
362     /* hack SeqAIJ */
363     a          = (Mat_SeqAIJ*)(lA->data);
364     a->free_a  = PETSC_TRUE;
365     a->free_ij = PETSC_TRUE;
366     ierr = MatDestroy(&lA);CHKERRQ(ierr);
367   }
368   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
369   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
370   if (reuse == MAT_INPLACE_MATRIX) {
371     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
372   }
373   PetscFunctionReturn(0);
374 }
375 
376 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
377 {
378   Mat            M = NULL;
379   Mat_HYPRE      *hB;
380   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
381   PetscErrorCode ierr;
382 
383   PetscFunctionBegin;
384   if (reuse == MAT_REUSE_MATRIX) {
385     /* always destroy the old matrix and create a new memory;
386        hope this does not churn the memory too much. The problem
387        is I do not know if it is possible to put the matrix back to
388        its initial state so that we can directly copy the values
389        the second time through. */
390     hB = (Mat_HYPRE*)((*B)->data);
391     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
392   } else {
393     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
394     ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr);
395     ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
396     hB   = (Mat_HYPRE*)(M->data);
397     if (reuse == MAT_INITIAL_MATRIX) *B = M;
398   }
399   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
400   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
401   if (reuse == MAT_INPLACE_MATRIX) {
402     ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr);
403   }
404   (*B)->preallocated = PETSC_TRUE;
405   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
406   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
407   PetscFunctionReturn(0);
408 }
409 
410 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
411 {
412   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
413   hypre_ParCSRMatrix *parcsr;
414   hypre_CSRMatrix    *hdiag,*hoffd;
415   MPI_Comm           comm;
416   PetscScalar        *da,*oa,*aptr;
417   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
418   PetscInt           i,dnnz,onnz,m,n;
419   HYPRE_Int          type;
420   PetscMPIInt        size;
421   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
422   PetscErrorCode     ierr;
423 
424   PetscFunctionBegin;
425   comm = PetscObjectComm((PetscObject)A);
426   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
427   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
428   if (reuse == MAT_REUSE_MATRIX) {
429     PetscBool ismpiaij,isseqaij;
430     ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
431     ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
432     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
433   }
434   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
435 
436   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
437   hdiag = hypre_ParCSRMatrixDiag(parcsr);
438   hoffd = hypre_ParCSRMatrixOffd(parcsr);
439   m     = hypre_CSRMatrixNumRows(hdiag);
440   n     = hypre_CSRMatrixNumCols(hdiag);
441   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
442   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
443   if (reuse == MAT_INITIAL_MATRIX) {
444     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
445     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
446     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
447   } else if (reuse == MAT_REUSE_MATRIX) {
448     PetscInt  nr;
449     PetscBool done;
450     if (size > 1) {
451       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
452 
453       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
454       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
455       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
456       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
457     } else {
458       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
459       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
460       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
461       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
462     }
463   } else { /* MAT_INPLACE_MATRIX */
464     if (!sameint) {
465       ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
466       ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
467     } else {
468       dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
469       djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
470     }
471     da = (PetscScalar*)hypre_CSRMatrixData(hdiag);
472   }
473 
474   if (!sameint) {
475     for (i=0;i<m+1;i++)  dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]);
476     for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]);
477   } else {
478     ierr = PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1);CHKERRQ(ierr);
479     ierr = PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);CHKERRQ(ierr);
480   }
481   ierr = PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);CHKERRQ(ierr);
482   iptr = djj;
483   aptr = da;
484   for (i=0; i<m; i++) {
485     PetscInt nc = dii[i+1]-dii[i];
486     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
487     iptr += nc;
488     aptr += nc;
489   }
490   if (size > 1) {
491     HYPRE_BigInt *coffd;
492     HYPRE_Int    *offdj;
493 
494     if (reuse == MAT_INITIAL_MATRIX) {
495       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
496       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
497       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
498     } else if (reuse == MAT_REUSE_MATRIX) {
499       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
500       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
501       PetscBool  done;
502 
503       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
504       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
505       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
506       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
507     } else { /* MAT_INPLACE_MATRIX */
508       if (!sameint) {
509         ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
510         ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
511       } else {
512         oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
513         ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
514       }
515       oa = (PetscScalar*)hypre_CSRMatrixData(hoffd);
516     }
517     if (!sameint) {
518       for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]);
519     } else {
520       ierr = PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);CHKERRQ(ierr);
521     }
522     offdj = hypre_CSRMatrixJ(hoffd);
523     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
524     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
525     ierr = PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);CHKERRQ(ierr);
526     iptr = ojj;
527     aptr = oa;
528     for (i=0; i<m; i++) {
529        PetscInt nc = oii[i+1]-oii[i];
530        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
531        iptr += nc;
532        aptr += nc;
533     }
534     if (reuse == MAT_INITIAL_MATRIX) {
535       Mat_MPIAIJ *b;
536       Mat_SeqAIJ *d,*o;
537 
538       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
539       /* hack MPIAIJ */
540       b          = (Mat_MPIAIJ*)((*B)->data);
541       d          = (Mat_SeqAIJ*)b->A->data;
542       o          = (Mat_SeqAIJ*)b->B->data;
543       d->free_a  = PETSC_TRUE;
544       d->free_ij = PETSC_TRUE;
545       o->free_a  = PETSC_TRUE;
546       o->free_ij = PETSC_TRUE;
547     } else if (reuse == MAT_INPLACE_MATRIX) {
548       Mat T;
549 
550       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
551       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
552         hypre_CSRMatrixI(hdiag) = NULL;
553         hypre_CSRMatrixJ(hdiag) = NULL;
554         hypre_CSRMatrixI(hoffd) = NULL;
555         hypre_CSRMatrixJ(hoffd) = NULL;
556       } else { /* Hack MPIAIJ -> free ij but not a */
557         Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data);
558         Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data);
559         Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data);
560 
561         d->free_ij = PETSC_TRUE;
562         o->free_ij = PETSC_TRUE;
563       }
564       hypre_CSRMatrixData(hdiag) = NULL;
565       hypre_CSRMatrixData(hoffd) = NULL;
566       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
567     }
568   } else {
569     oii  = NULL;
570     ojj  = NULL;
571     oa   = NULL;
572     if (reuse == MAT_INITIAL_MATRIX) {
573       Mat_SeqAIJ* b;
574 
575       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
576       /* hack SeqAIJ */
577       b          = (Mat_SeqAIJ*)((*B)->data);
578       b->free_a  = PETSC_TRUE;
579       b->free_ij = PETSC_TRUE;
580     } else if (reuse == MAT_INPLACE_MATRIX) {
581       Mat T;
582 
583       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
584       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
585         hypre_CSRMatrixI(hdiag) = NULL;
586         hypre_CSRMatrixJ(hdiag) = NULL;
587       } else { /* free ij but not a */
588         Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data);
589 
590         b->free_ij = PETSC_TRUE;
591       }
592       hypre_CSRMatrixData(hdiag) = NULL;
593       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
594     }
595   }
596 
597   /* we have to use hypre_Tfree to free the HYPRE arrays
598      that PETSc now onws */
599   if (reuse == MAT_INPLACE_MATRIX) {
600     PetscInt nh;
601     void *ptrs[6] = {da,oa,dii,djj,oii,ojj};
602     const char *names[6] = {"_hypre_csr_da",
603                             "_hypre_csr_oa",
604                             "_hypre_csr_dii",
605                             "_hypre_csr_djj",
606                             "_hypre_csr_oii",
607                             "_hypre_csr_ojj"};
608     nh = sameint ? 6 : 2;
609     for (i=0; i<nh; i++) {
610       PetscContainer c;
611 
612       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
613       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
614       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
615       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
616       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
617     }
618   }
619   PetscFunctionReturn(0);
620 }
621 
622 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
623 {
624   hypre_ParCSRMatrix *tA;
625   hypre_CSRMatrix    *hdiag,*hoffd;
626   Mat_SeqAIJ         *diag,*offd;
627   PetscInt           *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts;
628   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
629   PetscBool          ismpiaij,isseqaij;
630   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
631   PetscErrorCode     ierr;
632 
633   PetscFunctionBegin;
634   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
635   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
636   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name);
637   if (ismpiaij) {
638     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
639 
640     diag   = (Mat_SeqAIJ*)a->A->data;
641     offd   = (Mat_SeqAIJ*)a->B->data;
642     garray = a->garray;
643     noffd  = a->B->cmap->N;
644     dnnz   = diag->nz;
645     onnz   = offd->nz;
646   } else {
647     diag   = (Mat_SeqAIJ*)A->data;
648     offd   = NULL;
649     garray = NULL;
650     noffd  = 0;
651     dnnz   = diag->nz;
652     onnz   = 0;
653   }
654 
655   /* create a temporary ParCSR */
656   if (HYPRE_AssumedPartitionCheck()) {
657     PetscMPIInt myid;
658 
659     ierr       = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
660     row_starts = A->rmap->range + myid;
661     col_starts = A->cmap->range + myid;
662   } else {
663     row_starts = A->rmap->range;
664     col_starts = A->cmap->range;
665   }
666   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz);
667   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
668   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
669 
670   /* set diagonal part */
671   hdiag = hypre_ParCSRMatrixDiag(tA);
672   if (sameint) { /* reuse CSR pointers */
673     hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i;
674     hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j;
675   } else { /* malloc CSR pointers */
676     HYPRE_Int *hi,*hj;
677 
678     ierr = PetscMalloc2(A->rmap->n+1,&hi,dnnz,&hj);CHKERRQ(ierr);
679     for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(diag->i[i]);
680     for (i = 0; i < dnnz; i++)         hj[i] = (HYPRE_Int)(diag->j[i]);
681     hypre_CSRMatrixI(hdiag) = hi;
682     hypre_CSRMatrixJ(hdiag) = hj;
683   }
684   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex*)diag->a;
685   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
686   hypre_CSRMatrixSetRownnz(hdiag);
687   hypre_CSRMatrixSetDataOwner(hdiag,0);
688 
689   /* set offdiagonal part */
690   hoffd = hypre_ParCSRMatrixOffd(tA);
691   if (offd) {
692     if (sameint) { /* reuse CSR pointers */
693       hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i;
694       hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j;
695     } else { /* malloc CSR pointers */
696       HYPRE_Int *hi,*hj;
697 
698       ierr = PetscMalloc2(A->rmap->n+1,&hi,onnz,&hj);CHKERRQ(ierr);
699       for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(offd->i[i]);
700       for (i = 0; i < onnz; i++)         hj[i] = (HYPRE_Int)(offd->j[i]);
701       hypre_CSRMatrixI(hoffd) = hi;
702       hypre_CSRMatrixJ(hoffd) = hj;
703     }
704     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex*)offd->a;
705     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
706     hypre_CSRMatrixSetRownnz(hoffd);
707     hypre_CSRMatrixSetDataOwner(hoffd,0);
708     hypre_ParCSRMatrixSetNumNonzeros(tA);
709     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray;
710   }
711   *hA = tA;
712   PetscFunctionReturn(0);
713 }
714 
715 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
716 {
717   hypre_CSRMatrix *hdiag,*hoffd;
718   PetscBool       sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
719   PetscErrorCode  ierr;
720 
721   PetscFunctionBegin;
722   hdiag = hypre_ParCSRMatrixDiag(*hA);
723   hoffd = hypre_ParCSRMatrixOffd(*hA);
724   /* free temporary memory allocated by PETSc */
725   if (!sameint) {
726     HYPRE_Int *hi,*hj;
727 
728     hi = hypre_CSRMatrixI(hdiag);
729     hj = hypre_CSRMatrixJ(hdiag);
730     ierr = PetscFree2(hi,hj);CHKERRQ(ierr);
731     if (hoffd) {
732       hi = hypre_CSRMatrixI(hoffd);
733       hj = hypre_CSRMatrixJ(hoffd);
734       ierr = PetscFree2(hi,hj);CHKERRQ(ierr);
735     }
736   }
737   /* set pointers to NULL before destroying tA */
738   hypre_CSRMatrixI(hdiag)           = NULL;
739   hypre_CSRMatrixJ(hdiag)           = NULL;
740   hypre_CSRMatrixData(hdiag)        = NULL;
741   hypre_CSRMatrixI(hoffd)           = NULL;
742   hypre_CSRMatrixJ(hoffd)           = NULL;
743   hypre_CSRMatrixData(hoffd)        = NULL;
744   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
745   hypre_ParCSRMatrixDestroy(*hA);
746   *hA = NULL;
747   PetscFunctionReturn(0);
748 }
749 
750 /* calls RAP from BoomerAMG:
751    the resulting ParCSR will not own the column and row starts
752    It looks like we don't need to have the diagonal entries
753    ordered first in the rows of the diagonal part
754    for boomerAMGBuildCoarseOperator to work */
755 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
756 {
757   HYPRE_Int P_owns_col_starts,R_owns_row_starts;
758 
759   PetscFunctionBegin;
760   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
761   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
762   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
763   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
764   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
765   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
766   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
767   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
768   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
769   PetscFunctionReturn(0);
770 }
771 
772 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
773 {
774   Mat                B;
775   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
776   PetscErrorCode     ierr;
777 
778   PetscFunctionBegin;
779   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
780   ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr);
781   ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr);
782   ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
783   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
784   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
785   ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr);
786   PetscFunctionReturn(0);
787 }
788 
789 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
790 {
791   PetscErrorCode ierr;
792 
793   PetscFunctionBegin;
794   ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
795   ierr                   = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
796   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
797   PetscFunctionReturn(0);
798 }
799 
800 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
801 {
802   Mat                B;
803   Mat_HYPRE          *hP;
804   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
805   HYPRE_Int          type;
806   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
807   PetscBool          ishypre;
808   PetscErrorCode     ierr;
809 
810   PetscFunctionBegin;
811   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
812   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
813   hP = (Mat_HYPRE*)P->data;
814   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
815   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
816   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
817 
818   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
819   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
820   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
821 
822   /* create temporary matrix and merge to C */
823   ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
824   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
825   PetscFunctionReturn(0);
826 }
827 
828 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
829 {
830   PetscErrorCode ierr;
831 
832   PetscFunctionBegin;
833   if (scall == MAT_INITIAL_MATRIX) {
834     const char *deft = MATAIJ;
835     char       type[256];
836     PetscBool  flg;
837 
838     ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr);
839     ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr);
840     ierr = PetscOptionsEnd();CHKERRQ(ierr);
841     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
842     ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
843     if (flg) {
844       ierr = MatSetType(*C,type);CHKERRQ(ierr);
845     } else {
846       ierr = MatSetType(*C,deft);CHKERRQ(ierr);
847     }
848     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
849     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
850   }
851   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
852   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
853   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
854   PetscFunctionReturn(0);
855 }
856 
857 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
858 {
859   Mat                B;
860   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
861   Mat_HYPRE          *hA,*hP;
862   PetscBool          ishypre;
863   HYPRE_Int          type;
864   PetscErrorCode     ierr;
865 
866   PetscFunctionBegin;
867   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
868   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
869   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
870   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
871   hA = (Mat_HYPRE*)A->data;
872   hP = (Mat_HYPRE*)P->data;
873   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
874   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
875   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
876   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
877   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
878   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
879   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
880   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
881   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
882   PetscFunctionReturn(0);
883 }
884 
885 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
886 {
887   PetscErrorCode ierr;
888 
889   PetscFunctionBegin;
890   if (scall == MAT_INITIAL_MATRIX) {
891     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
892     ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
893     ierr                   = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
894     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
895     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
896   }
897   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
898   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
899   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
900   PetscFunctionReturn(0);
901 }
902 
903 /* calls hypre_ParMatmul
904    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
905    hypre_ParMatrixCreate does not duplicate the communicator
906    It looks like we don't need to have the diagonal entries
907    ordered first in the rows of the diagonal part
908    for boomerAMGBuildCoarseOperator to work */
909 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
910 {
911   PetscFunctionBegin;
912   PetscStackPush("hypre_ParMatmul");
913   *hAB = hypre_ParMatmul(hA,hB);
914   PetscStackPop;
915   PetscFunctionReturn(0);
916 }
917 
918 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
919 {
920   Mat                D;
921   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
922   PetscErrorCode     ierr;
923 
924   PetscFunctionBegin;
925   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
926   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
927   ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr);
928   ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
929   ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr);
930   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
931   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
936 {
937   PetscErrorCode ierr;
938 
939   PetscFunctionBegin;
940   ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
941   ierr                      = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
942   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
943   PetscFunctionReturn(0);
944 }
945 
946 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
947 {
948   Mat                D;
949   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
950   Mat_HYPRE          *hA,*hB;
951   PetscBool          ishypre;
952   HYPRE_Int          type;
953   PetscErrorCode     ierr;
954 
955   PetscFunctionBegin;
956   ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr);
957   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
958   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
959   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
960   hA = (Mat_HYPRE*)A->data;
961   hB = (Mat_HYPRE*)B->data;
962   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
963   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
964   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
965   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
966   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
967   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
968   ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr);
969   ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
970   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
971   ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr);
972   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
973   PetscFunctionReturn(0);
974 }
975 
976 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
977 {
978   PetscErrorCode ierr;
979 
980   PetscFunctionBegin;
981   if (scall == MAT_INITIAL_MATRIX) {
982     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
983     ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
984     ierr                      = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
985     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
986     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
987   }
988   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
989   ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr);
990   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
991   PetscFunctionReturn(0);
992 }
993 
994 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
995 {
996   Mat                E;
997   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
998   PetscErrorCode     ierr;
999 
1000   PetscFunctionBegin;
1001   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
1002   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
1003   ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr);
1004   ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr);
1005   ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr);
1006   ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr);
1007   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
1008   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
1009   ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr);
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
1014 {
1015   PetscErrorCode ierr;
1016 
1017   PetscFunctionBegin;
1018   ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr);
1019   ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr);
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1024 {
1025   PetscErrorCode ierr;
1026 
1027   PetscFunctionBegin;
1028   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);CHKERRQ(ierr);
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1033 {
1034   PetscErrorCode ierr;
1035 
1036   PetscFunctionBegin;
1037   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);CHKERRQ(ierr);
1038   PetscFunctionReturn(0);
1039 }
1040 
1041 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1042 {
1043   PetscErrorCode ierr;
1044 
1045   PetscFunctionBegin;
1046   if (y != z) {
1047     ierr = VecCopy(y,z);CHKERRQ(ierr);
1048   }
1049   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);CHKERRQ(ierr);
1050   PetscFunctionReturn(0);
1051 }
1052 
1053 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1054 {
1055   PetscErrorCode ierr;
1056 
1057   PetscFunctionBegin;
1058   if (y != z) {
1059     ierr = VecCopy(y,z);CHKERRQ(ierr);
1060   }
1061   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);CHKERRQ(ierr);
1062   PetscFunctionReturn(0);
1063 }
1064 
1065 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1066 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1067 {
1068   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1069   hypre_ParCSRMatrix *parcsr;
1070   hypre_ParVector    *hx,*hy;
1071   HYPRE_Complex      *ax,*ay,*sax,*say;
1072   PetscErrorCode     ierr;
1073 
1074   PetscFunctionBegin;
1075   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1076   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
1077   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
1078   ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
1079   ierr = VecGetArray(y,(PetscScalar**)&ay);CHKERRQ(ierr);
1080   if (trans) {
1081     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
1082     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
1083     hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx);
1084     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
1085     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
1086   } else {
1087     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
1088     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
1089     hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy);
1090     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
1091     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
1092   }
1093   ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
1094   ierr = VecRestoreArray(y,(PetscScalar**)&ay);CHKERRQ(ierr);
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 static PetscErrorCode MatDestroy_HYPRE(Mat A)
1099 {
1100   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1101   PetscErrorCode ierr;
1102 
1103   PetscFunctionBegin;
1104   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
1105   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
1106   if (hA->ij) {
1107     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1108     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
1109   }
1110   if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); }
1111 
1112   ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr);
1113 
1114   ierr = PetscFree(hA->array);CHKERRQ(ierr);
1115 
1116   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
1117   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
1118   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr);
1119   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaijmkl_hypre_C",NULL);CHKERRQ(ierr);
1120   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
1121   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr);
1122   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr);
1123   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);CHKERRQ(ierr);
1124   ierr = PetscFree(A->data);CHKERRQ(ierr);
1125   PetscFunctionReturn(0);
1126 }
1127 
1128 static PetscErrorCode MatSetUp_HYPRE(Mat A)
1129 {
1130   PetscErrorCode ierr;
1131 
1132   PetscFunctionBegin;
1133   ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr);
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1138 {
1139   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1140   Vec                x,b;
1141   PetscMPIInt        n;
1142   PetscInt           i,j,rstart,ncols,flg;
1143   PetscInt           *row,*col;
1144   PetscScalar        *val;
1145   PetscErrorCode     ierr;
1146 
1147   PetscFunctionBegin;
1148   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1149 
1150   if (!A->nooffprocentries) {
1151     while (1) {
1152       ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
1153       if (!flg) break;
1154 
1155       for (i=0; i<n; ) {
1156         /* Now identify the consecutive vals belonging to the same row */
1157         for (j=i,rstart=row[j]; j<n; j++) {
1158           if (row[j] != rstart) break;
1159         }
1160         if (j < n) ncols = j-i;
1161         else       ncols = n-i;
1162         /* Now assemble all these values with a single function call */
1163         ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr);
1164 
1165         i = j;
1166       }
1167     }
1168     ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr);
1169   }
1170 
1171   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1172   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitalize */
1173   {
1174     hypre_AuxParCSRMatrix *aux_matrix;
1175 
1176     /* call destroy just to make sure we do not leak anything */
1177     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1178     PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix));
1179     hypre_IJMatrixTranslator(hA->ij) = NULL;
1180 
1181     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1182     PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1183     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1184     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1185     PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix));
1186   }
1187   if (hA->x) PetscFunctionReturn(0);
1188   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1189   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1190   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr);
1191   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr);
1192   ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr);
1193   ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr);
1194   ierr = VecDestroy(&x);CHKERRQ(ierr);
1195   ierr = VecDestroy(&b);CHKERRQ(ierr);
1196   PetscFunctionReturn(0);
1197 }
1198 
1199 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1200 {
1201   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1202   PetscErrorCode     ierr;
1203 
1204   PetscFunctionBegin;
1205   if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");
1206 
1207   if (hA->size >= size) {
1208     *array = hA->array;
1209   } else {
1210     ierr = PetscFree(hA->array);CHKERRQ(ierr);
1211     hA->size = size;
1212     ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr);
1213     *array = hA->array;
1214   }
1215 
1216   hA->available = PETSC_FALSE;
1217   PetscFunctionReturn(0);
1218 }
1219 
1220 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1221 {
1222   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1223 
1224   PetscFunctionBegin;
1225   *array = NULL;
1226   hA->available = PETSC_TRUE;
1227   PetscFunctionReturn(0);
1228 }
1229 
1230 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1231 {
1232   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1233   PetscScalar        *vals = (PetscScalar *)v;
1234   HYPRE_Complex      *sscr;
1235   PetscInt           *cscr[2];
1236   PetscInt           i,nzc;
1237   void               *array = NULL;
1238   PetscErrorCode     ierr;
1239 
1240   PetscFunctionBegin;
1241   ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);CHKERRQ(ierr);
1242   cscr[0] = (PetscInt*)array;
1243   cscr[1] = ((PetscInt*)array)+nc;
1244   sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2);
1245   for (i=0,nzc=0;i<nc;i++) {
1246     if (cols[i] >= 0) {
1247       cscr[0][nzc  ] = cols[i];
1248       cscr[1][nzc++] = i;
1249     }
1250   }
1251   if (!nzc) {
1252     ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1253     PetscFunctionReturn(0);
1254   }
1255 
1256   if (ins == ADD_VALUES) {
1257     for (i=0;i<nr;i++) {
1258       if (rows[i] >= 0 && nzc) {
1259         PetscInt  j;
1260         HYPRE_Int hnc = (HYPRE_Int)nzc;
1261 
1262         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1263         for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); }
1264         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1265       }
1266       vals += nc;
1267     }
1268   } else { /* INSERT_VALUES */
1269     PetscInt rst,ren;
1270 
1271     ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1272     for (i=0;i<nr;i++) {
1273       if (rows[i] >= 0 && nzc) {
1274         PetscInt  j;
1275         HYPRE_Int hnc = (HYPRE_Int)nzc;
1276 
1277         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1278         for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); }
1279         /* nonlocal values */
1280         if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE);CHKERRQ(ierr); }
1281         /* local values */
1282         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1283       }
1284       vals += nc;
1285     }
1286   }
1287 
1288   ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1289   PetscFunctionReturn(0);
1290 }
1291 
1292 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1293 {
1294   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1295   HYPRE_Int      *hdnnz,*honnz;
1296   PetscInt       i,rs,re,cs,ce,bs;
1297   PetscMPIInt    size;
1298   PetscErrorCode ierr;
1299 
1300   PetscFunctionBegin;
1301   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1302   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1303   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1304   rs   = A->rmap->rstart;
1305   re   = A->rmap->rend;
1306   cs   = A->cmap->rstart;
1307   ce   = A->cmap->rend;
1308   if (!hA->ij) {
1309     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1310     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1311   } else {
1312     HYPRE_BigInt hrs,hre,hcs,hce;
1313     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1314     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re);
1315     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce);
1316   }
1317   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1318   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1319 
1320   if (!dnnz) {
1321     ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr);
1322     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1323   } else {
1324     hdnnz = (HYPRE_Int*)dnnz;
1325   }
1326   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1327   if (size > 1) {
1328     hypre_AuxParCSRMatrix *aux_matrix;
1329     if (!onnz) {
1330       ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr);
1331       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1332     } else {
1333       honnz = (HYPRE_Int*)onnz;
1334     }
1335     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1336        they assume the user will input the entire row values, properly sorted
1337        In PETSc, we don't make such an assumption, and we instead set this flag to 1
1338        Also, to avoid possible memory leaks, we destroy and recreate the translator
1339        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1340        the IJ matrix for us */
1341     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1342     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1343     hypre_IJMatrixTranslator(hA->ij) = NULL;
1344     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1345     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1346     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1347   } else {
1348     honnz = NULL;
1349     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1350   }
1351 
1352   /* reset assembled flag and call the initialize method */
1353   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1354   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1355   if (!dnnz) {
1356     ierr = PetscFree(hdnnz);CHKERRQ(ierr);
1357   }
1358   if (!onnz && honnz) {
1359     ierr = PetscFree(honnz);CHKERRQ(ierr);
1360   }
1361 
1362   /* Match AIJ logic */
1363   A->preallocated = PETSC_TRUE;
1364   A->assembled    = PETSC_FALSE;
1365   PetscFunctionReturn(0);
1366 }
1367 
1368 /*@C
1369    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1370 
1371    Collective on Mat
1372 
1373    Input Parameters:
1374 +  A - the matrix
1375 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1376           (same value is used for all local rows)
1377 .  dnnz - array containing the number of nonzeros in the various rows of the
1378           DIAGONAL portion of the local submatrix (possibly different for each row)
1379           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1380           The size of this array is equal to the number of local rows, i.e 'm'.
1381           For matrices that will be factored, you must leave room for (and set)
1382           the diagonal entry even if it is zero.
1383 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1384           submatrix (same value is used for all local rows).
1385 -  onnz - array containing the number of nonzeros in the various rows of the
1386           OFF-DIAGONAL portion of the local submatrix (possibly different for
1387           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1388           structure. The size of this array is equal to the number
1389           of local rows, i.e 'm'.
1390 
1391    Notes:
1392     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1393 
1394    Level: intermediate
1395 
1396 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE
1397 @*/
1398 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1399 {
1400   PetscErrorCode ierr;
1401 
1402   PetscFunctionBegin;
1403   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1404   PetscValidType(A,1);
1405   ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr);
1406   PetscFunctionReturn(0);
1407 }
1408 
1409 /*
1410    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1411 
1412    Collective
1413 
1414    Input Parameters:
1415 +  parcsr   - the pointer to the hypre_ParCSRMatrix
1416 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1417 -  copymode - PETSc copying options
1418 
1419    Output Parameter:
1420 .  A  - the matrix
1421 
1422    Level: intermediate
1423 
1424 .seealso: MatHYPRE, PetscCopyMode
1425 */
1426 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1427 {
1428   Mat                   T;
1429   Mat_HYPRE             *hA;
1430   MPI_Comm              comm;
1431   PetscInt              rstart,rend,cstart,cend,M,N;
1432   PetscBool             isseqaij,isseqaijmkl,ismpiaij,isaij,ishyp,isis;
1433   PetscErrorCode        ierr;
1434 
1435   PetscFunctionBegin;
1436   comm   = hypre_ParCSRMatrixComm(parcsr);
1437   ierr   = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
1438   ierr   = PetscStrcmp(mtype,MATSEQAIJMKL,&isseqaijmkl);CHKERRQ(ierr);
1439   ierr   = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
1440   ierr   = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
1441   ierr   = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
1442   ierr   = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr);
1443   isaij  = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1444   if (!isaij && !ishyp && !isis) SETERRQ7(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATSEQAIJMKL,MATMPIAIJ,MATIS,MATHYPRE);
1445   /* access ParCSRMatrix */
1446   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1447   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1448   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1449   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1450   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1451   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1452 
1453   /* fix for empty local rows/columns */
1454   if (rend < rstart) rend = rstart;
1455   if (cend < cstart) cend = cstart;
1456 
1457   /* PETSc convention */
1458   rend++;
1459   cend++;
1460   rend = PetscMin(rend,M);
1461   cend = PetscMin(cend,N);
1462 
1463   /* create PETSc matrix with MatHYPRE */
1464   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
1465   ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr);
1466   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
1467   hA   = (Mat_HYPRE*)(T->data);
1468 
1469   /* create HYPRE_IJMatrix */
1470   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1471   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1472 
1473   /* create new ParCSR object if needed */
1474   if (ishyp && copymode == PETSC_COPY_VALUES) {
1475     hypre_ParCSRMatrix *new_parcsr;
1476     hypre_CSRMatrix    *hdiag,*hoffd,*ndiag,*noffd;
1477 
1478     new_parcsr = hypre_ParCSRMatrixClone(parcsr,0);
1479     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1480     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1481     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1482     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1483     ierr       = PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));CHKERRQ(ierr);
1484     ierr       = PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));CHKERRQ(ierr);
1485     parcsr     = new_parcsr;
1486     copymode   = PETSC_OWN_POINTER;
1487   }
1488 
1489   /* set ParCSR object */
1490   hypre_IJMatrixObject(hA->ij) = parcsr;
1491   T->preallocated = PETSC_TRUE;
1492 
1493   /* set assembled flag */
1494   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1495   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1496   if (ishyp) {
1497     PetscMPIInt myid = 0;
1498 
1499     /* make sure we always have row_starts and col_starts available */
1500     if (HYPRE_AssumedPartitionCheck()) {
1501       ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
1502     }
1503     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1504       PetscLayout map;
1505 
1506       ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr);
1507       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1508       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1509     }
1510     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1511       PetscLayout map;
1512 
1513       ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr);
1514       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1515       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1516     }
1517     /* prevent from freeing the pointer */
1518     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1519     *A   = T;
1520     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1521     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1522   } else if (isaij) {
1523     if (copymode != PETSC_OWN_POINTER) {
1524       /* prevent from freeing the pointer */
1525       hA->inner_free = PETSC_FALSE;
1526       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1527       ierr = MatDestroy(&T);CHKERRQ(ierr);
1528     } else { /* AIJ return type with PETSC_OWN_POINTER */
1529       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
1530       *A   = T;
1531     }
1532   } else if (isis) {
1533     ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1534     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1535     ierr = MatDestroy(&T);CHKERRQ(ierr);
1536   }
1537   PetscFunctionReturn(0);
1538 }
1539 
1540 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1541 {
1542   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1543   HYPRE_Int type;
1544 
1545   PetscFunctionBegin;
1546   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1547   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1548   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1549   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1550   PetscFunctionReturn(0);
1551 }
1552 
1553 /*
1554    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1555 
1556    Not collective
1557 
1558    Input Parameters:
1559 +  A  - the MATHYPRE object
1560 
1561    Output Parameter:
1562 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1563 
1564    Level: intermediate
1565 
1566 .seealso: MatHYPRE, PetscCopyMode
1567 */
1568 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1569 {
1570   PetscErrorCode ierr;
1571 
1572   PetscFunctionBegin;
1573   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1574   PetscValidType(A,1);
1575   ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr);
1576   PetscFunctionReturn(0);
1577 }
1578 
1579 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1580 {
1581   hypre_ParCSRMatrix *parcsr;
1582   hypre_CSRMatrix    *ha;
1583   PetscInt           rst;
1584   PetscErrorCode     ierr;
1585 
1586   PetscFunctionBegin;
1587   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1588   ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr);
1589   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1590   if (missing) *missing = PETSC_FALSE;
1591   if (dd) *dd = -1;
1592   ha = hypre_ParCSRMatrixDiag(parcsr);
1593   if (ha) {
1594     PetscInt  size,i;
1595     HYPRE_Int *ii,*jj;
1596 
1597     size = hypre_CSRMatrixNumRows(ha);
1598     ii   = hypre_CSRMatrixI(ha);
1599     jj   = hypre_CSRMatrixJ(ha);
1600     for (i = 0; i < size; i++) {
1601       PetscInt  j;
1602       PetscBool found = PETSC_FALSE;
1603 
1604       for (j = ii[i]; j < ii[i+1] && !found; j++)
1605         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1606 
1607       if (!found) {
1608         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1609         if (missing) *missing = PETSC_TRUE;
1610         if (dd) *dd = i+rst;
1611         PetscFunctionReturn(0);
1612       }
1613     }
1614     if (!size) {
1615       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1616       if (missing) *missing = PETSC_TRUE;
1617       if (dd) *dd = rst;
1618     }
1619   } else {
1620     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1621     if (missing) *missing = PETSC_TRUE;
1622     if (dd) *dd = rst;
1623   }
1624   PetscFunctionReturn(0);
1625 }
1626 
1627 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1628 {
1629   hypre_ParCSRMatrix *parcsr;
1630   hypre_CSRMatrix    *ha;
1631   PetscErrorCode     ierr;
1632   HYPRE_Complex      hs;
1633 
1634   PetscFunctionBegin;
1635   ierr = PetscHYPREScalarCast(s,&hs);CHKERRQ(ierr);
1636   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1637   /* diagonal part */
1638   ha = hypre_ParCSRMatrixDiag(parcsr);
1639   if (ha) {
1640     PetscInt      size,i;
1641     HYPRE_Int     *ii;
1642     HYPRE_Complex *a;
1643 
1644     size = hypre_CSRMatrixNumRows(ha);
1645     a    = hypre_CSRMatrixData(ha);
1646     ii   = hypre_CSRMatrixI(ha);
1647     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1648   }
1649   /* offdiagonal part */
1650   ha = hypre_ParCSRMatrixOffd(parcsr);
1651   if (ha) {
1652     PetscInt      size,i;
1653     HYPRE_Int     *ii;
1654     HYPRE_Complex *a;
1655 
1656     size = hypre_CSRMatrixNumRows(ha);
1657     a    = hypre_CSRMatrixData(ha);
1658     ii   = hypre_CSRMatrixI(ha);
1659     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1660   }
1661   PetscFunctionReturn(0);
1662 }
1663 
1664 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1665 {
1666   hypre_ParCSRMatrix *parcsr;
1667   HYPRE_Int          *lrows;
1668   PetscInt           rst,ren,i;
1669   PetscErrorCode     ierr;
1670 
1671   PetscFunctionBegin;
1672   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1673   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1674   ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr);
1675   ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1676   for (i=0;i<numRows;i++) {
1677     if (rows[i] < rst || rows[i] >= ren)
1678       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1679     lrows[i] = rows[i] - rst;
1680   }
1681   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1682   ierr = PetscFree(lrows);CHKERRQ(ierr);
1683   PetscFunctionReturn(0);
1684 }
1685 
1686 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1687 {
1688   PetscErrorCode      ierr;
1689 
1690   PetscFunctionBegin;
1691   if (ha) {
1692     HYPRE_Int     *ii, size;
1693     HYPRE_Complex *a;
1694 
1695     size = hypre_CSRMatrixNumRows(ha);
1696     a    = hypre_CSRMatrixData(ha);
1697     ii   = hypre_CSRMatrixI(ha);
1698 
1699     if (a) {ierr = PetscArrayzero(a,ii[size]);CHKERRQ(ierr);}
1700   }
1701   PetscFunctionReturn(0);
1702 }
1703 
1704 PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1705 {
1706   hypre_ParCSRMatrix  *parcsr;
1707   PetscErrorCode      ierr;
1708 
1709   PetscFunctionBegin;
1710   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1711   /* diagonal part */
1712   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr);
1713   /* off-diagonal part */
1714   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr);
1715   PetscFunctionReturn(0);
1716 }
1717 
1718 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag)
1719 {
1720   PetscInt        ii;
1721   HYPRE_Int       *i, *j;
1722   HYPRE_Complex   *a;
1723 
1724   PetscFunctionBegin;
1725   if (!hA) PetscFunctionReturn(0);
1726 
1727   i = hypre_CSRMatrixI(hA);
1728   j = hypre_CSRMatrixJ(hA);
1729   a = hypre_CSRMatrixData(hA);
1730 
1731   for (ii = 0; ii < N; ii++) {
1732     HYPRE_Int jj, ibeg, iend, irow;
1733 
1734     irow = rows[ii];
1735     ibeg = i[irow];
1736     iend = i[irow+1];
1737     for (jj = ibeg; jj < iend; jj++)
1738       if (j[jj] == irow) a[jj] = diag;
1739       else a[jj] = 0.0;
1740    }
1741    PetscFunctionReturn(0);
1742 }
1743 
1744 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1745 {
1746   hypre_ParCSRMatrix  *parcsr;
1747   PetscInt            *lrows,len;
1748   HYPRE_Complex       hdiag;
1749   PetscErrorCode      ierr;
1750 
1751   PetscFunctionBegin;
1752   if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
1753   ierr = PetscHYPREScalarCast(diag,&hdiag);CHKERRQ(ierr);
1754   /* retrieve the internal matrix */
1755   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1756   /* get locally owned rows */
1757   ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr);
1758   /* zero diagonal part */
1759   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);CHKERRQ(ierr);
1760   /* zero off-diagonal part */
1761   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr);
1762 
1763   ierr = PetscFree(lrows);CHKERRQ(ierr);
1764   PetscFunctionReturn(0);
1765 }
1766 
1767 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1768 {
1769   PetscErrorCode ierr;
1770 
1771   PetscFunctionBegin;
1772   if (mat->nooffprocentries) PetscFunctionReturn(0);
1773 
1774   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
1775   PetscFunctionReturn(0);
1776 }
1777 
1778 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1779 {
1780   hypre_ParCSRMatrix  *parcsr;
1781   HYPRE_Int           hnz;
1782   PetscErrorCode      ierr;
1783 
1784   PetscFunctionBegin;
1785   /* retrieve the internal matrix */
1786   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1787   /* call HYPRE API */
1788   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
1789   if (nz) *nz = (PetscInt)hnz;
1790   PetscFunctionReturn(0);
1791 }
1792 
1793 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1794 {
1795   hypre_ParCSRMatrix  *parcsr;
1796   HYPRE_Int           hnz;
1797   PetscErrorCode      ierr;
1798 
1799   PetscFunctionBegin;
1800   /* retrieve the internal matrix */
1801   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1802   /* call HYPRE API */
1803   hnz = nz ? (HYPRE_Int)(*nz) : 0;
1804   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
1805   PetscFunctionReturn(0);
1806 }
1807 
1808 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1809 {
1810   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1811   PetscInt  i;
1812 
1813   PetscFunctionBegin;
1814   if (!m || !n) PetscFunctionReturn(0);
1815   /* Ignore negative row indices
1816    * And negative column indices should be automatically ignored in hypre
1817    * */
1818   for (i=0; i<m; i++) {
1819     if (idxm[i] >= 0) {
1820       HYPRE_Int hn = (HYPRE_Int)n;
1821       PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n)));
1822     }
1823   }
1824   PetscFunctionReturn(0);
1825 }
1826 
1827 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
1828 {
1829   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1830 
1831   PetscFunctionBegin;
1832   switch (op) {
1833   case MAT_NO_OFF_PROC_ENTRIES:
1834     if (flg) {
1835       PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0));
1836     }
1837     break;
1838   default:
1839     break;
1840   }
1841   PetscFunctionReturn(0);
1842 }
1843 
1844 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
1845 {
1846   hypre_ParCSRMatrix *parcsr;
1847   PetscErrorCode     ierr;
1848   Mat                B;
1849   PetscViewerFormat  format;
1850   PetscErrorCode     (*mview)(Mat,PetscViewer) = NULL;
1851 
1852   PetscFunctionBegin;
1853   ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr);
1854   if (format != PETSC_VIEWER_NATIVE) {
1855     ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1856     ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr);
1857     ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr);
1858     if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");CHKERRQ(ierr);
1859     ierr = (*mview)(B,view);CHKERRQ(ierr);
1860     ierr = MatDestroy(&B);CHKERRQ(ierr);
1861   } else {
1862     Mat_HYPRE  *hA = (Mat_HYPRE*)A->data;
1863     PetscMPIInt size;
1864     PetscBool   isascii;
1865     const char *filename;
1866 
1867     /* HYPRE uses only text files */
1868     ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr);
1869     if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name);
1870     ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr);
1871     PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename));
1872     ierr = MPI_Comm_size(hA->comm,&size);CHKERRQ(ierr);
1873     if (size > 1) {
1874       ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr);
1875     } else {
1876       ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr);
1877     }
1878   }
1879   PetscFunctionReturn(0);
1880 }
1881 
1882 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B)
1883 {
1884   hypre_ParCSRMatrix *parcsr;
1885   PetscErrorCode     ierr;
1886   PetscCopyMode      cpmode;
1887 
1888   PetscFunctionBegin;
1889   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1890   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
1891     parcsr = hypre_ParCSRMatrixClone(parcsr,0);
1892     cpmode = PETSC_OWN_POINTER;
1893   } else {
1894     cpmode = PETSC_COPY_VALUES;
1895   }
1896   ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr);
1897   PetscFunctionReturn(0);
1898 }
1899 
1900 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
1901 {
1902   hypre_ParCSRMatrix *acsr,*bcsr;
1903   PetscErrorCode     ierr;
1904 
1905   PetscFunctionBegin;
1906   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
1907     ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr);
1908     ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr);
1909     PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1));
1910     ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1911     ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1912   } else {
1913     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1914   }
1915   PetscFunctionReturn(0);
1916 }
1917 
1918 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
1919 {
1920   hypre_ParCSRMatrix *parcsr;
1921   hypre_CSRMatrix    *dmat;
1922   HYPRE_Complex      *a;
1923   HYPRE_Complex      *data = NULL;
1924   HYPRE_Int          *diag = NULL;
1925   PetscInt           i;
1926   PetscBool          cong;
1927   PetscErrorCode     ierr;
1928 
1929   PetscFunctionBegin;
1930   ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr);
1931   if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns");
1932 #if defined(PETSC_USE_DEBUG)
1933   {
1934     PetscBool miss;
1935     ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr);
1936     if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing");
1937   }
1938 #endif
1939   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1940   dmat = hypre_ParCSRMatrixDiag(parcsr);
1941   if (dmat) {
1942     /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
1943     ierr = VecGetArray(d,(PetscScalar**)&a);CHKERRQ(ierr);
1944     diag = hypre_CSRMatrixI(dmat);
1945     data = hypre_CSRMatrixData(dmat);
1946     for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]];
1947     ierr = VecRestoreArray(d,(PetscScalar**)&a);CHKERRQ(ierr);
1948   }
1949   PetscFunctionReturn(0);
1950 }
1951 
1952 #include <petscblaslapack.h>
1953 
1954 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str)
1955 {
1956   PetscErrorCode ierr;
1957 
1958   PetscFunctionBegin;
1959   if (str == SAME_NONZERO_PATTERN) {
1960     hypre_ParCSRMatrix *x,*y;
1961     hypre_CSRMatrix    *xloc,*yloc;
1962     PetscInt           xnnz,ynnz;
1963     HYPRE_Complex      *xarr,*yarr;
1964     PetscBLASInt       one=1,bnz;
1965 
1966     ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr);
1967     ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr);
1968 
1969     /* diagonal block */
1970     xloc = hypre_ParCSRMatrixDiag(x);
1971     yloc = hypre_ParCSRMatrixDiag(y);
1972     xnnz = 0;
1973     ynnz = 0;
1974     xarr = NULL;
1975     yarr = NULL;
1976     if (xloc) {
1977       xarr = hypre_CSRMatrixData(xloc);
1978       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1979     }
1980     if (yloc) {
1981       yarr = hypre_CSRMatrixData(yloc);
1982       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1983     }
1984     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz);
1985     ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr);
1986     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
1987 
1988     /* off-diagonal block */
1989     xloc = hypre_ParCSRMatrixOffd(x);
1990     yloc = hypre_ParCSRMatrixOffd(y);
1991     xnnz = 0;
1992     ynnz = 0;
1993     xarr = NULL;
1994     yarr = NULL;
1995     if (xloc) {
1996       xarr = hypre_CSRMatrixData(xloc);
1997       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1998     }
1999     if (yloc) {
2000       yarr = hypre_CSRMatrixData(yloc);
2001       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2002     }
2003     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz);
2004     ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr);
2005     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
2006   } else if (str == SUBSET_NONZERO_PATTERN) {
2007     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
2008   } else {
2009     Mat B;
2010 
2011     ierr = MatAXPY_Basic_Preallocate(Y,X,&B);CHKERRQ(ierr);
2012     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2013     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2014   }
2015   PetscFunctionReturn(0);
2016 }
2017 
2018 /*MC
2019    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2020           based on the hypre IJ interface.
2021 
2022    Level: intermediate
2023 
2024 .seealso: MatCreate()
2025 M*/
2026 
2027 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2028 {
2029   Mat_HYPRE      *hB;
2030   PetscErrorCode ierr;
2031 
2032   PetscFunctionBegin;
2033   ierr           = PetscNewLog(B,&hB);CHKERRQ(ierr);
2034   hB->inner_free = PETSC_TRUE;
2035   hB->available  = PETSC_TRUE;
2036   hB->size       = 0;
2037   hB->array      = NULL;
2038 
2039   B->data       = (void*)hB;
2040   B->rmap->bs   = 1;
2041   B->assembled  = PETSC_FALSE;
2042 
2043   ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
2044   B->ops->mult             = MatMult_HYPRE;
2045   B->ops->multtranspose    = MatMultTranspose_HYPRE;
2046   B->ops->multadd          = MatMultAdd_HYPRE;
2047   B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE;
2048   B->ops->setup            = MatSetUp_HYPRE;
2049   B->ops->destroy          = MatDestroy_HYPRE;
2050   B->ops->assemblyend      = MatAssemblyEnd_HYPRE;
2051   B->ops->assemblybegin    = MatAssemblyBegin_HYPRE;
2052   B->ops->ptap             = MatPtAP_HYPRE_HYPRE;
2053   B->ops->matmult          = MatMatMult_HYPRE_HYPRE;
2054   B->ops->setvalues        = MatSetValues_HYPRE;
2055   B->ops->missingdiagonal  = MatMissingDiagonal_HYPRE;
2056   B->ops->scale            = MatScale_HYPRE;
2057   B->ops->zerorowscolumns  = MatZeroRowsColumns_HYPRE;
2058   B->ops->zeroentries      = MatZeroEntries_HYPRE;
2059   B->ops->zerorows         = MatZeroRows_HYPRE;
2060   B->ops->getrow           = MatGetRow_HYPRE;
2061   B->ops->restorerow       = MatRestoreRow_HYPRE;
2062   B->ops->getvalues        = MatGetValues_HYPRE;
2063   B->ops->setoption        = MatSetOption_HYPRE;
2064   B->ops->duplicate        = MatDuplicate_HYPRE;
2065   B->ops->copy             = MatCopy_HYPRE;
2066   B->ops->view             = MatView_HYPRE;
2067   B->ops->getdiagonal      = MatGetDiagonal_HYPRE;
2068   B->ops->axpy             = MatAXPY_HYPRE;
2069 
2070   /* build cache for off array entries formed */
2071   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
2072 
2073   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr);
2074   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
2075   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
2076   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
2077   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
2078   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaijmkl_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
2079   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
2080   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr);
2081   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr);
2082   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);CHKERRQ(ierr);
2083   PetscFunctionReturn(0);
2084 }
2085 
2086 static PetscErrorCode hypre_array_destroy(void *ptr)
2087 {
2088    PetscFunctionBegin;
2089    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
2090    PetscFunctionReturn(0);
2091 }
2092