xref: /petsc/src/mat/impls/hypre/mhypre.c (revision bfcb38ea38335faa6e7f8d97f6bc6ce9aa2a1dd1)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 #include <petscmathypre.h>
7 #include <petsc/private/matimpl.h>
8 #include <../src/mat/impls/hypre/mhypre.h>
9 #include <../src/mat/impls/aij/mpi/mpiaij.h>
10 #include <../src/vec/vec/impls/hypre/vhyp.h>
11 #include <HYPRE.h>
12 #include <HYPRE_utilities.h>
13 #include <_hypre_parcsr_ls.h>
14 #include <_hypre_sstruct_ls.h>
15 
16 PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
17 
18 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
19 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
20 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
21 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
22 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,PetscScalar,Vec,PetscScalar,Vec,PetscBool);
23 static PetscErrorCode hypre_array_destroy(void*);
24 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);
25 
26 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
27 {
28   PetscErrorCode ierr;
29   PetscInt       i,n_d,n_o;
30   const PetscInt *ia_d,*ia_o;
31   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
32   PetscInt       *nnz_d=NULL,*nnz_o=NULL;
33 
34   PetscFunctionBegin;
35   if (A_d) { /* determine number of nonzero entries in local diagonal part */
36     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
37     if (done_d) {
38       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
39       for (i=0; i<n_d; i++) {
40         nnz_d[i] = ia_d[i+1] - ia_d[i];
41       }
42     }
43     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
44   }
45   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
46     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
47     if (done_o) {
48       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
49       for (i=0; i<n_o; i++) {
50         nnz_o[i] = ia_o[i+1] - ia_o[i];
51       }
52     }
53     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
54   }
55   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
56     if (!done_o) { /* only diagonal part */
57       ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr);
58       for (i=0; i<n_d; i++) {
59         nnz_o[i] = 0;
60       }
61     }
62     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
63     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
64     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
65   }
66   PetscFunctionReturn(0);
67 }
68 
69 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
70 {
71   PetscErrorCode ierr;
72   PetscInt       rstart,rend,cstart,cend;
73 
74   PetscFunctionBegin;
75   ierr   = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
76   ierr   = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
77   rstart = A->rmap->rstart;
78   rend   = A->rmap->rend;
79   cstart = A->cmap->rstart;
80   cend   = A->cmap->rend;
81   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
82   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
83   {
84     PetscBool      same;
85     Mat            A_d,A_o;
86     const PetscInt *colmap;
87     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
88     if (same) {
89       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
90       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
91       PetscFunctionReturn(0);
92     }
93     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
94     if (same) {
95       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
96       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
97       PetscFunctionReturn(0);
98     }
99     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
100     if (same) {
101       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
102       PetscFunctionReturn(0);
103     }
104     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
105     if (same) {
106       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
107       PetscFunctionReturn(0);
108     }
109   }
110   PetscFunctionReturn(0);
111 }
112 
113 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
114 {
115   PetscErrorCode    ierr;
116   PetscInt          i,rstart,rend,ncols,nr,nc;
117   const PetscScalar *values;
118   const PetscInt    *cols;
119   PetscBool         flg;
120 
121   PetscFunctionBegin;
122   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
123   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
124   if (flg && nr == nc) {
125     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
126     PetscFunctionReturn(0);
127   }
128   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
129   if (flg) {
130     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
131     PetscFunctionReturn(0);
132   }
133 
134   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
135   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
136   for (i=rstart; i<rend; i++) {
137     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
138     if (ncols) {
139       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
140     }
141     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
142   }
143   PetscFunctionReturn(0);
144 }
145 
146 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
147 {
148   PetscErrorCode        ierr;
149   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
150   HYPRE_Int             type;
151   hypre_ParCSRMatrix    *par_matrix;
152   hypre_AuxParCSRMatrix *aux_matrix;
153   hypre_CSRMatrix       *hdiag;
154 
155   PetscFunctionBegin;
156   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
157   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
158   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
159   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
160   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
161   /*
162        this is the Hack part where we monkey directly with the hypre datastructures
163   */
164   ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));CHKERRQ(ierr);
165   ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));CHKERRQ(ierr);
166   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));CHKERRQ(ierr);
167 
168   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
169   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
170   PetscFunctionReturn(0);
171 }
172 
173 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
174 {
175   PetscErrorCode        ierr;
176   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
177   Mat_SeqAIJ            *pdiag,*poffd;
178   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
179   HYPRE_Int             type;
180   hypre_ParCSRMatrix    *par_matrix;
181   hypre_AuxParCSRMatrix *aux_matrix;
182   hypre_CSRMatrix       *hdiag,*hoffd;
183 
184   PetscFunctionBegin;
185   pdiag = (Mat_SeqAIJ*) pA->A->data;
186   poffd = (Mat_SeqAIJ*) pA->B->data;
187   /* cstart is only valid for square MPIAIJ layed out in the usual way */
188   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
189 
190   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
191   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
192   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
193   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
194   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
195   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
196 
197   /*
198        this is the Hack part where we monkey directly with the hypre datastructures
199   */
200   ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
201   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
202   jj  = (PetscInt*)hdiag->j;
203   pjj = (PetscInt*)pdiag->j;
204   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
205   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
206   ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
207   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
208      If we hacked a hypre a bit more we might be able to avoid this step */
209   jj  = (PetscInt*) hoffd->j;
210   pjj = (PetscInt*) poffd->j;
211   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
212   ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));
213 
214   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
215   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
216   PetscFunctionReturn(0);
217 }
218 
219 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
220 {
221   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
222   Mat                    lA;
223   ISLocalToGlobalMapping rl2g,cl2g;
224   IS                     is;
225   hypre_ParCSRMatrix     *hA;
226   hypre_CSRMatrix        *hdiag,*hoffd;
227   MPI_Comm               comm;
228   PetscScalar            *hdd,*hod,*aa,*data;
229   HYPRE_Int              *col_map_offd,*hdi,*hdj,*hoi,*hoj;
230   PetscInt               *ii,*jj,*iptr,*jptr;
231   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
232   HYPRE_Int              type;
233   PetscErrorCode         ierr;
234 
235   PetscFunctionBegin;
236   comm = PetscObjectComm((PetscObject)A);
237   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
238   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
239   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
240   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
241   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
242   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
243   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
244   hdiag = hypre_ParCSRMatrixDiag(hA);
245   hoffd = hypre_ParCSRMatrixOffd(hA);
246   dr    = hypre_CSRMatrixNumRows(hdiag);
247   dc    = hypre_CSRMatrixNumCols(hdiag);
248   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
249   hdi   = hypre_CSRMatrixI(hdiag);
250   hdj   = hypre_CSRMatrixJ(hdiag);
251   hdd   = hypre_CSRMatrixData(hdiag);
252   oc    = hypre_CSRMatrixNumCols(hoffd);
253   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
254   hoi   = hypre_CSRMatrixI(hoffd);
255   hoj   = hypre_CSRMatrixJ(hoffd);
256   hod   = hypre_CSRMatrixData(hoffd);
257   if (reuse != MAT_REUSE_MATRIX) {
258     PetscInt *aux;
259 
260     /* generate l2g maps for rows and cols */
261     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
262     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
263     ierr = ISDestroy(&is);CHKERRQ(ierr);
264     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
265     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
266     for (i=0; i<dc; i++) aux[i] = i+stc;
267     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
268     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
269     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
270     ierr = ISDestroy(&is);CHKERRQ(ierr);
271     /* create MATIS object */
272     ierr = MatCreate(comm,B);CHKERRQ(ierr);
273     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
274     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
275     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
276     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
277     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
278 
279     /* allocate CSR for local matrix */
280     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
281     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
282     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
283   } else {
284     PetscInt  nr;
285     PetscBool done;
286     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
287     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
288     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
289     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
290     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
291   }
292   /* merge local matrices */
293   ii   = iptr;
294   jj   = jptr;
295   aa   = data;
296   *ii  = *(hdi++) + *(hoi++);
297   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
298     PetscScalar *aold = aa;
299     PetscInt    *jold = jj,nc = jd+jo;
300     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
301     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
302     *(++ii) = *(hdi++) + *(hoi++);
303     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
304   }
305   for (; cum<dr; cum++) *(++ii) = nnz;
306   if (reuse != MAT_REUSE_MATRIX) {
307     Mat_SeqAIJ* a;
308 
309     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
310     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
311     /* hack SeqAIJ */
312     a          = (Mat_SeqAIJ*)(lA->data);
313     a->free_a  = PETSC_TRUE;
314     a->free_ij = PETSC_TRUE;
315     ierr = MatDestroy(&lA);CHKERRQ(ierr);
316   }
317   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
318   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
319   if (reuse == MAT_INPLACE_MATRIX) {
320     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
321   }
322   PetscFunctionReturn(0);
323 }
324 
325 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
326 {
327   Mat            M = NULL;
328   Mat_HYPRE      *hB;
329   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
330   PetscErrorCode ierr;
331 
332   PetscFunctionBegin;
333   if (reuse == MAT_REUSE_MATRIX) {
334     /* always destroy the old matrix and create a new memory;
335        hope this does not churn the memory too much. The problem
336        is I do not know if it is possible to put the matrix back to
337        its initial state so that we can directly copy the values
338        the second time through. */
339     hB = (Mat_HYPRE*)((*B)->data);
340     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
341   } else {
342     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
343     ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr);
344     ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
345     hB   = (Mat_HYPRE*)(M->data);
346     if (reuse == MAT_INITIAL_MATRIX) *B = M;
347   }
348   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
349   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
350   if (reuse == MAT_INPLACE_MATRIX) {
351     ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr);
352   }
353   (*B)->preallocated = PETSC_TRUE;
354   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
355   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
356   PetscFunctionReturn(0);
357 }
358 
359 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
360 {
361   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
362   hypre_ParCSRMatrix *parcsr;
363   hypre_CSRMatrix    *hdiag,*hoffd;
364   MPI_Comm           comm;
365   PetscScalar        *da,*oa,*aptr;
366   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
367   PetscInt           i,dnnz,onnz,m,n;
368   HYPRE_Int          type;
369   PetscMPIInt        size;
370   PetscErrorCode     ierr;
371 
372   PetscFunctionBegin;
373   comm = PetscObjectComm((PetscObject)A);
374   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
375   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
376   if (reuse == MAT_REUSE_MATRIX) {
377     PetscBool ismpiaij,isseqaij;
378     ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
379     ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
380     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
381   }
382   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
383 
384   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
385   hdiag = hypre_ParCSRMatrixDiag(parcsr);
386   hoffd = hypre_ParCSRMatrixOffd(parcsr);
387   m     = hypre_CSRMatrixNumRows(hdiag);
388   n     = hypre_CSRMatrixNumCols(hdiag);
389   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
390   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
391   if (reuse == MAT_INITIAL_MATRIX) {
392     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
393     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
394     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
395   } else if (reuse == MAT_REUSE_MATRIX) {
396     PetscInt  nr;
397     PetscBool done;
398     if (size > 1) {
399       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
400 
401       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
402       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
403       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
404       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
405     } else {
406       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
407       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
408       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
409       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
410     }
411   } else { /* MAT_INPLACE_MATRIX */
412     dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
413     djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
414     da  = hypre_CSRMatrixData(hdiag);
415   }
416   ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
417   ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr);
418   ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr);
419   iptr = djj;
420   aptr = da;
421   for (i=0; i<m; i++) {
422     PetscInt nc = dii[i+1]-dii[i];
423     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
424     iptr += nc;
425     aptr += nc;
426   }
427   if (size > 1) {
428     HYPRE_Int *offdj,*coffd;
429 
430     if (reuse == MAT_INITIAL_MATRIX) {
431       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
432       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
433       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
434     } else if (reuse == MAT_REUSE_MATRIX) {
435       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
436       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
437       PetscBool  done;
438 
439       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
440       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
441       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
442       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
443     } else { /* MAT_INPLACE_MATRIX */
444       oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
445       ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
446       oa  = hypre_CSRMatrixData(hoffd);
447     }
448     ierr  = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
449     offdj = hypre_CSRMatrixJ(hoffd);
450     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
451     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
452     ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr);
453     iptr = ojj;
454     aptr = oa;
455     for (i=0; i<m; i++) {
456        PetscInt nc = oii[i+1]-oii[i];
457        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
458        iptr += nc;
459        aptr += nc;
460     }
461     if (reuse == MAT_INITIAL_MATRIX) {
462       Mat_MPIAIJ *b;
463       Mat_SeqAIJ *d,*o;
464 
465       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
466       /* hack MPIAIJ */
467       b          = (Mat_MPIAIJ*)((*B)->data);
468       d          = (Mat_SeqAIJ*)b->A->data;
469       o          = (Mat_SeqAIJ*)b->B->data;
470       d->free_a  = PETSC_TRUE;
471       d->free_ij = PETSC_TRUE;
472       o->free_a  = PETSC_TRUE;
473       o->free_ij = PETSC_TRUE;
474     } else if (reuse == MAT_INPLACE_MATRIX) {
475       Mat T;
476       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
477       hypre_CSRMatrixI(hdiag)    = NULL;
478       hypre_CSRMatrixJ(hdiag)    = NULL;
479       hypre_CSRMatrixData(hdiag) = NULL;
480       hypre_CSRMatrixI(hoffd)    = NULL;
481       hypre_CSRMatrixJ(hoffd)    = NULL;
482       hypre_CSRMatrixData(hoffd) = NULL;
483       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
484     }
485   } else {
486     oii  = NULL;
487     ojj  = NULL;
488     oa   = NULL;
489     if (reuse == MAT_INITIAL_MATRIX) {
490       Mat_SeqAIJ* b;
491       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
492       /* hack SeqAIJ */
493       b          = (Mat_SeqAIJ*)((*B)->data);
494       b->free_a  = PETSC_TRUE;
495       b->free_ij = PETSC_TRUE;
496     } else if (reuse == MAT_INPLACE_MATRIX) {
497       Mat T;
498       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
499       hypre_CSRMatrixI(hdiag)    = NULL;
500       hypre_CSRMatrixJ(hdiag)    = NULL;
501       hypre_CSRMatrixData(hdiag) = NULL;
502       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
503     }
504   }
505 
506   /* we have to use hypre_Tfree to free the arrays */
507   if (reuse == MAT_INPLACE_MATRIX) {
508     void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
509     const char *names[6] = {"_hypre_csr_dii",
510                             "_hypre_csr_djj",
511                             "_hypre_csr_da",
512                             "_hypre_csr_oii",
513                             "_hypre_csr_ojj",
514                             "_hypre_csr_oa"};
515     for (i=0; i<6; i++) {
516       PetscContainer c;
517 
518       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
519       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
520       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
521       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
522       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
523     }
524   }
525   PetscFunctionReturn(0);
526 }
527 
528 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
529 {
530   hypre_ParCSRMatrix *tA;
531   hypre_CSRMatrix    *hdiag,*hoffd;
532   Mat_SeqAIJ         *diag,*offd;
533   PetscInt           *garray,noffd,dnnz,onnz,*row_starts,*col_starts;
534   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
535   PetscBool          ismpiaij,isseqaij;
536   PetscErrorCode     ierr;
537 
538   PetscFunctionBegin;
539   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
540   ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
541   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type);
542   if (ismpiaij) {
543     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
544 
545     diag   = (Mat_SeqAIJ*)a->A->data;
546     offd   = (Mat_SeqAIJ*)a->B->data;
547     garray = a->garray;
548     noffd  = a->B->cmap->N;
549     dnnz   = diag->nz;
550     onnz   = offd->nz;
551   } else {
552     diag   = (Mat_SeqAIJ*)A->data;
553     offd   = NULL;
554     garray = NULL;
555     noffd  = 0;
556     dnnz   = diag->nz;
557     onnz   = 0;
558   }
559 
560   /* create a temporary ParCSR */
561   if (HYPRE_AssumedPartitionCheck()) {
562     PetscMPIInt myid;
563 
564     ierr       = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
565     row_starts = A->rmap->range + myid;
566     col_starts = A->cmap->range + myid;
567   } else {
568     row_starts = A->rmap->range;
569     col_starts = A->cmap->range;
570   }
571   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz);
572   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
573   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
574 
575   /* set diagonal part */
576   hdiag = hypre_ParCSRMatrixDiag(tA);
577   hypre_CSRMatrixI(hdiag)           = (HYPRE_Int*)diag->i;
578   hypre_CSRMatrixJ(hdiag)           = (HYPRE_Int*)diag->j;
579   hypre_CSRMatrixData(hdiag)        = diag->a;
580   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
581   hypre_CSRMatrixSetRownnz(hdiag);
582   hypre_CSRMatrixSetDataOwner(hdiag,0);
583 
584   /* set offdiagonal part */
585   hoffd = hypre_ParCSRMatrixOffd(tA);
586   if (offd) {
587     hypre_CSRMatrixI(hoffd)           = (HYPRE_Int*)offd->i;
588     hypre_CSRMatrixJ(hoffd)           = (HYPRE_Int*)offd->j;
589     hypre_CSRMatrixData(hoffd)        = offd->a;
590     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
591     hypre_CSRMatrixSetRownnz(hoffd);
592     hypre_CSRMatrixSetDataOwner(hoffd,0);
593     hypre_ParCSRMatrixSetNumNonzeros(tA);
594     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray;
595   }
596   *hA = tA;
597   PetscFunctionReturn(0);
598 }
599 
600 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
601 {
602   hypre_CSRMatrix    *hdiag,*hoffd;
603 
604   PetscFunctionBegin;
605   hdiag = hypre_ParCSRMatrixDiag(*hA);
606   hoffd = hypre_ParCSRMatrixOffd(*hA);
607   /* set pointers to NULL before destroying tA */
608   hypre_CSRMatrixI(hdiag)           = NULL;
609   hypre_CSRMatrixJ(hdiag)           = NULL;
610   hypre_CSRMatrixData(hdiag)        = NULL;
611   hypre_CSRMatrixI(hoffd)           = NULL;
612   hypre_CSRMatrixJ(hoffd)           = NULL;
613   hypre_CSRMatrixData(hoffd)        = NULL;
614   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
615   hypre_ParCSRMatrixDestroy(*hA);
616   *hA = NULL;
617   PetscFunctionReturn(0);
618 }
619 
620 /* calls RAP from BoomerAMG:
621    the resulting ParCSR will not own the column and row starts
622    It looks like we don't need to have the diagonal entries
623    ordered first in the rows of the diagonal part
624    for boomerAMGBuildCoarseOperator to work */
625 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
626 {
627   HYPRE_Int P_owns_col_starts,R_owns_row_starts;
628 
629   PetscFunctionBegin;
630   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
631   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
632   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
633   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
634   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
635   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
636   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
637   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
638   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
639   PetscFunctionReturn(0);
640 }
641 
642 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
643 {
644   Mat                B;
645   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
646   PetscErrorCode     ierr;
647 
648   PetscFunctionBegin;
649   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
650   ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr);
651   ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr);
652   ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
653   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
654   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
655   ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr);
656   PetscFunctionReturn(0);
657 }
658 
659 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
660 {
661   PetscErrorCode ierr;
662 
663   PetscFunctionBegin;
664   ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
665   ierr                   = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
666   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
667   PetscFunctionReturn(0);
668 }
669 
670 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
671 {
672   Mat                B;
673   Mat_HYPRE          *hP;
674   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
675   HYPRE_Int          type;
676   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
677   PetscBool          ishypre;
678   PetscErrorCode     ierr;
679 
680   PetscFunctionBegin;
681   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
682   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
683   hP = (Mat_HYPRE*)P->data;
684   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
685   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
686   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
687 
688   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
689   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
690   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
691 
692   /* create temporary matrix and merge to C */
693   ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
694   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
695   PetscFunctionReturn(0);
696 }
697 
698 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
699 {
700   PetscErrorCode ierr;
701 
702   PetscFunctionBegin;
703   if (scall == MAT_INITIAL_MATRIX) {
704     const char *deft = MATAIJ;
705     char       type[256];
706     PetscBool  flg;
707 
708     ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr);
709     ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr);
710     ierr = PetscOptionsEnd();CHKERRQ(ierr);
711     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
712     ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
713     if (flg) {
714       ierr = MatSetType(*C,type);CHKERRQ(ierr);
715     } else {
716       ierr = MatSetType(*C,deft);CHKERRQ(ierr);
717     }
718     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
719     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
720   }
721   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
722   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
723   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
724   PetscFunctionReturn(0);
725 }
726 
727 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
728 {
729   Mat                B;
730   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
731   Mat_HYPRE          *hA,*hP;
732   PetscBool          ishypre;
733   HYPRE_Int          type;
734   PetscErrorCode     ierr;
735 
736   PetscFunctionBegin;
737   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
738   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
739   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
740   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
741   hA = (Mat_HYPRE*)A->data;
742   hP = (Mat_HYPRE*)P->data;
743   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
744   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
745   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
746   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
747   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
748   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
749   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
750   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
751   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
752   PetscFunctionReturn(0);
753 }
754 
755 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
756 {
757   PetscErrorCode ierr;
758 
759   PetscFunctionBegin;
760   if (scall == MAT_INITIAL_MATRIX) {
761     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
762     ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
763     ierr                   = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
764     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
765     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
766   }
767   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
768   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
769   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
770   PetscFunctionReturn(0);
771 }
772 
773 /* calls hypre_ParMatmul
774    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
775    hypre_ParMatrixCreate does not duplicate the communicator
776    It looks like we don't need to have the diagonal entries
777    ordered first in the rows of the diagonal part
778    for boomerAMGBuildCoarseOperator to work */
779 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
780 {
781   PetscFunctionBegin;
782   PetscStackPush("hypre_ParMatmul");
783   *hAB = hypre_ParMatmul(hA,hB);
784   PetscStackPop;
785   PetscFunctionReturn(0);
786 }
787 
788 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
789 {
790   Mat                D;
791   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
792   PetscErrorCode     ierr;
793 
794   PetscFunctionBegin;
795   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
796   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
797   ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr);
798   ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
799   ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr);
800   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
801   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
802   PetscFunctionReturn(0);
803 }
804 
805 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
806 {
807   PetscErrorCode ierr;
808 
809   PetscFunctionBegin;
810   ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
811   ierr                      = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
812   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
813   PetscFunctionReturn(0);
814 }
815 
816 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
817 {
818   Mat                D;
819   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
820   Mat_HYPRE          *hA,*hB;
821   PetscBool          ishypre;
822   HYPRE_Int          type;
823   PetscErrorCode     ierr;
824 
825   PetscFunctionBegin;
826   ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr);
827   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
828   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
829   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
830   hA = (Mat_HYPRE*)A->data;
831   hB = (Mat_HYPRE*)B->data;
832   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
833   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
834   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
835   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
836   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
837   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
838   ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr);
839   ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
840   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
841   ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr);
842   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
843   PetscFunctionReturn(0);
844 }
845 
846 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
847 {
848   PetscErrorCode ierr;
849 
850   PetscFunctionBegin;
851   if (scall == MAT_INITIAL_MATRIX) {
852     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
853     ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
854     ierr                      = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
855     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
856     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
857   }
858   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
859   ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr);
860   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
861   PetscFunctionReturn(0);
862 }
863 
864 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
865 {
866   Mat                E;
867   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
868   PetscErrorCode     ierr;
869 
870   PetscFunctionBegin;
871   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
872   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
873   ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr);
874   ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr);
875   ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr);
876   ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr);
877   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
878   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
879   ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr);
880   PetscFunctionReturn(0);
881 }
882 
883 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
884 {
885   PetscErrorCode ierr;
886 
887   PetscFunctionBegin;
888   ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr);
889   ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr);
890   PetscFunctionReturn(0);
891 }
892 
893 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
894 {
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);CHKERRQ(ierr);
899   PetscFunctionReturn(0);
900 }
901 
902 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
903 {
904   PetscErrorCode ierr;
905 
906   PetscFunctionBegin;
907   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);CHKERRQ(ierr);
908   PetscFunctionReturn(0);
909 }
910 
911 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
912 {
913   PetscErrorCode ierr;
914 
915   PetscFunctionBegin;
916   if (y != z) {
917     ierr = VecCopy(y,z);CHKERRQ(ierr);
918   }
919   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);CHKERRQ(ierr);
920   PetscFunctionReturn(0);
921 }
922 
923 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
924 {
925   PetscErrorCode ierr;
926 
927   PetscFunctionBegin;
928   if (y != z) {
929     ierr = VecCopy(y,z);CHKERRQ(ierr);
930   }
931   ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
936 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, PetscScalar a, Vec x, PetscScalar b, Vec y, PetscBool trans)
937 {
938   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
939   hypre_ParCSRMatrix *parcsr;
940   hypre_ParVector    *hx,*hy;
941   PetscScalar        *ax,*ay,*sax,*say;
942   PetscErrorCode     ierr;
943 
944   PetscFunctionBegin;
945   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
946   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
947   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
948   ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
949   ierr = VecGetArray(y,&ay);CHKERRQ(ierr);
950   if (trans) {
951     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
952     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
953     hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx);
954     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
955     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
956   } else {
957     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
958     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
959     hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy);
960     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
961     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
962   }
963   ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
964   ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr);
965   PetscFunctionReturn(0);
966 }
967 
968 static PetscErrorCode MatDestroy_HYPRE(Mat A)
969 {
970   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
971   PetscErrorCode ierr;
972 
973   PetscFunctionBegin;
974   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
975   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
976   if (hA->ij) {
977     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
978     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
979   }
980   if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); }
981 
982   ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr);
983 
984   ierr = PetscFree(hA->array);CHKERRQ(ierr);
985 
986   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
987   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
988   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr);
989   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
990   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr);
991   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr);
992   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);CHKERRQ(ierr);
993   ierr = PetscFree(A->data);CHKERRQ(ierr);
994   PetscFunctionReturn(0);
995 }
996 
997 static PetscErrorCode MatSetUp_HYPRE(Mat A)
998 {
999   PetscErrorCode ierr;
1000 
1001   PetscFunctionBegin;
1002   ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr);
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1007 {
1008   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1009   Vec                x,b;
1010   PetscMPIInt        n;
1011   PetscInt           i,j,rstart,ncols,flg;
1012   PetscInt           *row,*col;
1013   PetscScalar        *val;
1014   PetscErrorCode     ierr;
1015 
1016   PetscFunctionBegin;
1017   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1018 
1019   if (!A->nooffprocentries) {
1020     while (1) {
1021       ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
1022       if (!flg) break;
1023 
1024       for (i=0; i<n; ) {
1025         /* Now identify the consecutive vals belonging to the same row */
1026         for (j=i,rstart=row[j]; j<n; j++) {
1027           if (row[j] != rstart) break;
1028         }
1029         if (j < n) ncols = j-i;
1030         else       ncols = n-i;
1031         /* Now assemble all these values with a single function call */
1032         ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr);
1033 
1034         i = j;
1035       }
1036     }
1037     ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr);
1038   }
1039 
1040   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1041   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitalize */
1042   {
1043     hypre_AuxParCSRMatrix *aux_matrix;
1044 
1045     /* call destroy just to make sure we do not leak anything */
1046     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1047     PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix));
1048     hypre_IJMatrixTranslator(hA->ij) = NULL;
1049 
1050     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1051     PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1052     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1053     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1054     PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix));
1055   }
1056   if (hA->x) PetscFunctionReturn(0);
1057   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1058   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1059   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr);
1060   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr);
1061   ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr);
1062   ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr);
1063   ierr = VecDestroy(&x);CHKERRQ(ierr);
1064   ierr = VecDestroy(&b);CHKERRQ(ierr);
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1069 {
1070   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1071   PetscErrorCode     ierr;
1072 
1073   PetscFunctionBegin;
1074   if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");
1075 
1076   if (hA->size >= size) *array = hA->array;
1077   else {
1078     ierr = PetscFree(hA->array);CHKERRQ(ierr);
1079     hA->size = size;
1080     ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr);
1081     *array = hA->array;
1082   }
1083 
1084   hA->available = PETSC_FALSE;
1085   PetscFunctionReturn(0);
1086 }
1087 
1088 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1089 {
1090   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1091 
1092   PetscFunctionBegin;
1093   *array = NULL;
1094   hA->available = PETSC_TRUE;
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1099 {
1100   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1101   PetscScalar        *vals = (PetscScalar *)v;
1102   PetscScalar        *sscr;
1103   PetscInt           *cscr[2];
1104   PetscInt           i,nzc;
1105   void               *array = NULL;
1106   PetscErrorCode     ierr;
1107 
1108   PetscFunctionBegin;
1109   ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(PetscScalar)*nc*nr,&array);CHKERRQ(ierr);
1110   cscr[0] = (PetscInt*)array;
1111   cscr[1] = ((PetscInt*)array)+nc;
1112   sscr = (PetscScalar*)(((PetscInt*)array)+nc*2);
1113   for (i=0,nzc=0;i<nc;i++) {
1114     if (cols[i] >= 0) {
1115       cscr[0][nzc  ] = cols[i];
1116       cscr[1][nzc++] = i;
1117     }
1118   }
1119   if (!nzc) {
1120     ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1121     PetscFunctionReturn(0);
1122   }
1123 
1124   if (ins == ADD_VALUES) {
1125     for (i=0;i<nr;i++) {
1126       if (rows[i] >= 0 && nzc) {
1127         PetscInt j;
1128         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1129         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr));
1130       }
1131       vals += nc;
1132     }
1133   } else { /* INSERT_VALUES */
1134 
1135     PetscInt rst,ren;
1136     ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1137 
1138     for (i=0;i<nr;i++) {
1139       if (rows[i] >= 0 && nzc) {
1140         PetscInt j;
1141         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1142         /* nonlocal values */
1143         if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],sscr,PETSC_FALSE);CHKERRQ(ierr); }
1144         /* local values */
1145         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr));
1146       }
1147       vals += nc;
1148     }
1149   }
1150 
1151   ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr);
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1156 {
1157   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1158   HYPRE_Int      *hdnnz,*honnz;
1159   PetscInt       i,rs,re,cs,ce,bs;
1160   PetscMPIInt    size;
1161   PetscErrorCode ierr;
1162 
1163   PetscFunctionBegin;
1164   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1165   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1166   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1167   rs   = A->rmap->rstart;
1168   re   = A->rmap->rend;
1169   cs   = A->cmap->rstart;
1170   ce   = A->cmap->rend;
1171   if (!hA->ij) {
1172     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1173     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1174   } else {
1175     HYPRE_Int hrs,hre,hcs,hce;
1176     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1177     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re);
1178     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce);
1179   }
1180   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1181   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1182 
1183   if (!dnnz) {
1184     ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr);
1185     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1186   } else {
1187     hdnnz = (HYPRE_Int*)dnnz;
1188   }
1189   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1190   if (size > 1) {
1191     hypre_AuxParCSRMatrix *aux_matrix;
1192     if (!onnz) {
1193       ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr);
1194       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1195     } else {
1196       honnz = (HYPRE_Int*)onnz;
1197     }
1198     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1199        they assume the user will input the entire row values, properly sorted
1200        In PETSc, we don't make such an assumption, and we instead set this flag to 1
1201        Also, to avoid possible memory leaks, we destroy and recreate the translator
1202        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1203        the IJ matrix for us */
1204     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1205     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1206     hypre_IJMatrixTranslator(hA->ij) = NULL;
1207     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1208     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1209     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1210   } else {
1211     honnz = NULL;
1212     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1213   }
1214 
1215   /* reset assembled flag and call the initialize method */
1216   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1217   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1218   if (!dnnz) {
1219     ierr = PetscFree(hdnnz);CHKERRQ(ierr);
1220   }
1221   if (!onnz && honnz) {
1222     ierr = PetscFree(honnz);CHKERRQ(ierr);
1223   }
1224 
1225   /* Match AIJ logic */
1226   A->preallocated = PETSC_TRUE;
1227   A->assembled    = PETSC_FALSE;
1228   PetscFunctionReturn(0);
1229 }
1230 
1231 /*@C
1232    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1233 
1234    Collective on Mat
1235 
1236    Input Parameters:
1237 +  A - the matrix
1238 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1239           (same value is used for all local rows)
1240 .  dnnz - array containing the number of nonzeros in the various rows of the
1241           DIAGONAL portion of the local submatrix (possibly different for each row)
1242           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1243           The size of this array is equal to the number of local rows, i.e 'm'.
1244           For matrices that will be factored, you must leave room for (and set)
1245           the diagonal entry even if it is zero.
1246 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1247           submatrix (same value is used for all local rows).
1248 -  onnz - array containing the number of nonzeros in the various rows of the
1249           OFF-DIAGONAL portion of the local submatrix (possibly different for
1250           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1251           structure. The size of this array is equal to the number
1252           of local rows, i.e 'm'.
1253 
1254    Notes:
1255     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1256 
1257    Level: intermediate
1258 
1259 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE
1260 @*/
1261 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1262 {
1263   PetscErrorCode ierr;
1264 
1265   PetscFunctionBegin;
1266   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1267   PetscValidType(A,1);
1268   ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr);
1269   PetscFunctionReturn(0);
1270 }
1271 
1272 /*
1273    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1274 
1275    Collective
1276 
1277    Input Parameters:
1278 +  parcsr   - the pointer to the hypre_ParCSRMatrix
1279 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1280 -  copymode - PETSc copying options
1281 
1282    Output Parameter:
1283 .  A  - the matrix
1284 
1285    Level: intermediate
1286 
1287 .seealso: MatHYPRE, PetscCopyMode
1288 */
1289 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1290 {
1291   Mat                   T;
1292   Mat_HYPRE             *hA;
1293   MPI_Comm              comm;
1294   PetscInt              rstart,rend,cstart,cend,M,N;
1295   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1296   PetscErrorCode        ierr;
1297 
1298   PetscFunctionBegin;
1299   comm   = hypre_ParCSRMatrixComm(parcsr);
1300   ierr   = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
1301   ierr   = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
1302   ierr   = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
1303   ierr   = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
1304   ierr   = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr);
1305   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1306   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1307   /* access ParCSRMatrix */
1308   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1309   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1310   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1311   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1312   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1313   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1314 
1315   /* fix for empty local rows/columns */
1316   if (rend < rstart) rend = rstart;
1317   if (cend < cstart) cend = cstart;
1318 
1319   /* PETSc convention */
1320   rend++;
1321   cend++;
1322   rend = PetscMin(rend,M);
1323   cend = PetscMin(cend,N);
1324 
1325   /* create PETSc matrix with MatHYPRE */
1326   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
1327   ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr);
1328   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
1329   hA   = (Mat_HYPRE*)(T->data);
1330 
1331   /* create HYPRE_IJMatrix */
1332   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1333   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1334 
1335   /* create new ParCSR object if needed */
1336   if (ishyp && copymode == PETSC_COPY_VALUES) {
1337     hypre_ParCSRMatrix *new_parcsr;
1338     hypre_CSRMatrix    *hdiag,*hoffd,*ndiag,*noffd;
1339 
1340     new_parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1341     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1342     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1343     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1344     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1345     ierr       = PetscMemcpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag)*sizeof(PetscScalar));CHKERRQ(ierr);
1346     ierr       = PetscMemcpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd)*sizeof(PetscScalar));CHKERRQ(ierr);
1347     parcsr     = new_parcsr;
1348     copymode   = PETSC_OWN_POINTER;
1349   }
1350 
1351   /* set ParCSR object */
1352   hypre_IJMatrixObject(hA->ij) = parcsr;
1353   T->preallocated = PETSC_TRUE;
1354 
1355   /* set assembled flag */
1356   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1357   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1358   if (ishyp) {
1359     PetscMPIInt myid = 0;
1360 
1361     /* make sure we always have row_starts and col_starts available */
1362     if (HYPRE_AssumedPartitionCheck()) {
1363       ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
1364     }
1365     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1366       PetscLayout map;
1367 
1368       ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr);
1369       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1370       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1371     }
1372     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1373       PetscLayout map;
1374 
1375       ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr);
1376       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1377       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1378     }
1379     /* prevent from freeing the pointer */
1380     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1381     *A   = T;
1382     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1383     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1384   } else if (isaij) {
1385     if (copymode != PETSC_OWN_POINTER) {
1386       /* prevent from freeing the pointer */
1387       hA->inner_free = PETSC_FALSE;
1388       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1389       ierr = MatDestroy(&T);CHKERRQ(ierr);
1390     } else { /* AIJ return type with PETSC_OWN_POINTER */
1391       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
1392       *A   = T;
1393     }
1394   } else if (isis) {
1395     ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1396     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1397     ierr = MatDestroy(&T);CHKERRQ(ierr);
1398   }
1399   PetscFunctionReturn(0);
1400 }
1401 
1402 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1403 {
1404   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1405   HYPRE_Int type;
1406 
1407   PetscFunctionBegin;
1408   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1409   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1410   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1411   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1412   PetscFunctionReturn(0);
1413 }
1414 
1415 /*
1416    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1417 
1418    Not collective
1419 
1420    Input Parameters:
1421 +  A  - the MATHYPRE object
1422 
1423    Output Parameter:
1424 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1425 
1426    Level: intermediate
1427 
1428 .seealso: MatHYPRE, PetscCopyMode
1429 */
1430 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1431 {
1432   PetscErrorCode ierr;
1433 
1434   PetscFunctionBegin;
1435   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1436   PetscValidType(A,1);
1437   ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr);
1438   PetscFunctionReturn(0);
1439 }
1440 
1441 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1442 {
1443   hypre_ParCSRMatrix *parcsr;
1444   hypre_CSRMatrix    *ha;
1445   PetscInt           rst;
1446   PetscErrorCode     ierr;
1447 
1448   PetscFunctionBegin;
1449   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1450   ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr);
1451   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1452   if (missing) *missing = PETSC_FALSE;
1453   if (dd) *dd = -1;
1454   ha = hypre_ParCSRMatrixDiag(parcsr);
1455   if (ha) {
1456     PetscInt  size,i;
1457     HYPRE_Int *ii,*jj;
1458 
1459     size = hypre_CSRMatrixNumRows(ha);
1460     ii   = hypre_CSRMatrixI(ha);
1461     jj   = hypre_CSRMatrixJ(ha);
1462     for (i = 0; i < size; i++) {
1463       PetscInt  j;
1464       PetscBool found = PETSC_FALSE;
1465 
1466       for (j = ii[i]; j < ii[i+1] && !found; j++)
1467         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1468 
1469       if (!found) {
1470         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1471         if (missing) *missing = PETSC_TRUE;
1472         if (dd) *dd = i+rst;
1473         PetscFunctionReturn(0);
1474       }
1475     }
1476     if (!size) {
1477       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1478       if (missing) *missing = PETSC_TRUE;
1479       if (dd) *dd = rst;
1480     }
1481   } else {
1482     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1483     if (missing) *missing = PETSC_TRUE;
1484     if (dd) *dd = rst;
1485   }
1486   PetscFunctionReturn(0);
1487 }
1488 
1489 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1490 {
1491   hypre_ParCSRMatrix *parcsr;
1492   hypre_CSRMatrix    *ha;
1493   PetscErrorCode     ierr;
1494 
1495   PetscFunctionBegin;
1496   ierr  = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1497   /* diagonal part */
1498   ha = hypre_ParCSRMatrixDiag(parcsr);
1499   if (ha) {
1500     PetscInt    size,i;
1501     HYPRE_Int   *ii;
1502     PetscScalar *a;
1503 
1504     size = hypre_CSRMatrixNumRows(ha);
1505     a    = hypre_CSRMatrixData(ha);
1506     ii   = hypre_CSRMatrixI(ha);
1507     for (i = 0; i < ii[size]; i++) a[i] *= s;
1508   }
1509   /* offdiagonal part */
1510   ha = hypre_ParCSRMatrixOffd(parcsr);
1511   if (ha) {
1512     PetscInt    size,i;
1513     HYPRE_Int   *ii;
1514     PetscScalar *a;
1515 
1516     size = hypre_CSRMatrixNumRows(ha);
1517     a    = hypre_CSRMatrixData(ha);
1518     ii   = hypre_CSRMatrixI(ha);
1519     for (i = 0; i < ii[size]; i++) a[i] *= s;
1520   }
1521   PetscFunctionReturn(0);
1522 }
1523 
1524 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1525 {
1526   hypre_ParCSRMatrix *parcsr;
1527   HYPRE_Int          *lrows;
1528   PetscInt           rst,ren,i;
1529   PetscErrorCode     ierr;
1530 
1531   PetscFunctionBegin;
1532   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1533   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1534   ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr);
1535   ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1536   for (i=0;i<numRows;i++) {
1537     if (rows[i] < rst || rows[i] >= ren)
1538       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1539     lrows[i] = rows[i] - rst;
1540   }
1541   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1542   ierr = PetscFree(lrows);CHKERRQ(ierr);
1543   PetscFunctionReturn(0);
1544 }
1545 
1546 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1547 {
1548   PetscErrorCode      ierr;
1549 
1550   PetscFunctionBegin;
1551   if (ha) {
1552     HYPRE_Int     *ii, size;
1553     HYPRE_Complex *a;
1554 
1555     size = hypre_CSRMatrixNumRows(ha);
1556     a    = hypre_CSRMatrixData(ha);
1557     ii   = hypre_CSRMatrixI(ha);
1558 
1559     if (a) { ierr = PetscMemzero(a,(ii[size])*sizeof(HYPRE_Complex));CHKERRQ(ierr); }
1560   }
1561   PetscFunctionReturn(0);
1562 }
1563 
1564 PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1565 {
1566   hypre_ParCSRMatrix  *parcsr;
1567   PetscErrorCode      ierr;
1568 
1569   PetscFunctionBegin;
1570   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1571   /* diagonal part */
1572   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr);
1573   /* off-diagonal part */
1574   ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr);
1575   PetscFunctionReturn(0);
1576 }
1577 
1578 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],PetscScalar diag)
1579 {
1580   PetscInt        ii, jj, ibeg, iend, irow;
1581   PetscInt        *i, *j;
1582   PetscScalar     *a;
1583 
1584   PetscFunctionBegin;
1585   if (!hA) PetscFunctionReturn(0);
1586 
1587   i = (PetscInt*) hypre_CSRMatrixI(hA);
1588   j = (PetscInt*) hypre_CSRMatrixJ(hA);
1589   a = hypre_CSRMatrixData(hA);
1590 
1591   for (ii = 0; ii < N; ii++) {
1592     irow = rows[ii];
1593     ibeg = i[irow];
1594     iend = i[irow+1];
1595     for (jj = ibeg; jj < iend; jj++)
1596       if (j[jj] == irow) a[jj] = diag;
1597       else a[jj] = 0.0;
1598    }
1599    PetscFunctionReturn(0);
1600 }
1601 
1602 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1603 {
1604   hypre_ParCSRMatrix  *parcsr;
1605   PetscInt            *lrows,len;
1606   PetscErrorCode      ierr;
1607 
1608   PetscFunctionBegin;
1609   if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size \n");
1610   /* retrieve the internal matrix */
1611   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1612   /* get locally owned rows */
1613   ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr);
1614   /* zero diagonal part */
1615   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,diag);CHKERRQ(ierr);
1616   /* zero off-diagonal part */
1617   ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr);
1618 
1619   ierr = PetscFree(lrows);CHKERRQ(ierr);
1620   PetscFunctionReturn(0);
1621 }
1622 
1623 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1624 {
1625   PetscErrorCode ierr;
1626 
1627   PetscFunctionBegin;
1628   if (mat->nooffprocentries) PetscFunctionReturn(0);
1629 
1630   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
1631   PetscFunctionReturn(0);
1632 }
1633 
1634 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1635 {
1636   hypre_ParCSRMatrix  *parcsr;
1637   PetscErrorCode      ierr;
1638 
1639   PetscFunctionBegin;
1640   /* retrieve the internal matrix */
1641   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1642   /* call HYPRE API */
1643   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v));
1644   PetscFunctionReturn(0);
1645 }
1646 
1647 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1648 {
1649   hypre_ParCSRMatrix  *parcsr;
1650   PetscErrorCode      ierr;
1651 
1652   PetscFunctionBegin;
1653   /* retrieve the internal matrix */
1654   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1655   /* call HYPRE API */
1656   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v));
1657   PetscFunctionReturn(0);
1658 }
1659 
1660 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1661 {
1662   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1663   PetscInt  i;
1664 
1665   PetscFunctionBegin;
1666   if (!m || !n) PetscFunctionReturn(0);
1667   /* Ignore negative row indices
1668    * And negative column indices should be automatically ignored in hypre
1669    * */
1670   for (i=0; i<m; i++)
1671     if (idxm[i] >= 0) PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,(HYPRE_Int*)&n,(HYPRE_Int*)&idxm[i],(HYPRE_Int*)idxn,&v[i*n]));
1672   PetscFunctionReturn(0);
1673 }
1674 
1675 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
1676 {
1677   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1678 
1679   PetscFunctionBegin;
1680   switch (op)
1681   {
1682   case MAT_NO_OFF_PROC_ENTRIES:
1683     if (flg) {
1684       PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0));
1685     }
1686     break;
1687   default:
1688     break;
1689   }
1690   PetscFunctionReturn(0);
1691 }
1692 
1693 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
1694 {
1695   hypre_ParCSRMatrix *parcsr;
1696   PetscErrorCode     ierr;
1697   Mat                B;
1698   PetscViewerFormat  format;
1699   PetscErrorCode     (*mview)(Mat,PetscViewer) = NULL;
1700 
1701   PetscFunctionBegin;
1702   ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr);
1703   if (format != PETSC_VIEWER_NATIVE) {
1704     ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1705     ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr);
1706     ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr);
1707     if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");CHKERRQ(ierr);
1708     ierr = (*mview)(B,view);CHKERRQ(ierr);
1709     ierr = MatDestroy(&B);CHKERRQ(ierr);
1710   } else {
1711     Mat_HYPRE  *hA = (Mat_HYPRE*)A->data;
1712     PetscMPIInt size;
1713     PetscBool   isascii;
1714     const char *filename;
1715 
1716     /* HYPRE uses only text files */
1717     ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr);
1718     if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name);
1719     ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr);
1720     PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename));
1721     ierr = MPI_Comm_size(hA->comm,&size);CHKERRQ(ierr);
1722     if (size > 1) {
1723       ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr);
1724     } else {
1725       ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr);
1726     }
1727   }
1728   PetscFunctionReturn(0);
1729 }
1730 
1731 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B)
1732 {
1733   hypre_ParCSRMatrix *parcsr;
1734   PetscErrorCode     ierr;
1735   PetscCopyMode      cpmode;
1736 
1737   PetscFunctionBegin;
1738   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1739   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
1740     parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1741     cpmode = PETSC_OWN_POINTER;
1742   } else {
1743     cpmode = PETSC_COPY_VALUES;
1744   }
1745   ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr);
1746   PetscFunctionReturn(0);
1747 }
1748 
1749 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
1750 {
1751   hypre_ParCSRMatrix *acsr,*bcsr;
1752   PetscErrorCode     ierr;
1753 
1754   PetscFunctionBegin;
1755   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
1756     ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr);
1757     ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr);
1758     PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1));
1759     ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1760     ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1761   } else {
1762     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1763   }
1764   PetscFunctionReturn(0);
1765 }
1766 
1767 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
1768 {
1769   hypre_ParCSRMatrix *parcsr;
1770   hypre_CSRMatrix    *dmat;
1771   PetscScalar        *a,*data = NULL;
1772   PetscInt           i,*diag = NULL;
1773   PetscBool          cong;
1774   PetscErrorCode     ierr;
1775 
1776   PetscFunctionBegin;
1777   ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr);
1778   if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns");
1779 #if defined(PETSC_USE_DEBUG)
1780   {
1781     PetscBool miss;
1782     ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr);
1783     if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing");
1784   }
1785 #endif
1786   ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr);
1787   dmat = hypre_ParCSRMatrixDiag(parcsr);
1788   if (dmat) {
1789     ierr = VecGetArray(d,&a);CHKERRQ(ierr);
1790     diag = (PetscInt*)(hypre_CSRMatrixI(dmat));
1791     data = (PetscScalar*)(hypre_CSRMatrixData(dmat));
1792     for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]];
1793     ierr = VecRestoreArray(d,&a);CHKERRQ(ierr);
1794   }
1795   PetscFunctionReturn(0);
1796 }
1797 
1798 #include <petscblaslapack.h>
1799 
1800 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str)
1801 {
1802   PetscErrorCode ierr;
1803 
1804   PetscFunctionBegin;
1805   if (str == SAME_NONZERO_PATTERN) {
1806     hypre_ParCSRMatrix *x,*y;
1807     hypre_CSRMatrix    *xloc,*yloc;
1808     PetscInt           xnnz,ynnz;
1809     PetscScalar        *xarr,*yarr;
1810     PetscBLASInt       one=1,bnz;
1811 
1812     ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr);
1813     ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr);
1814 
1815     /* diagonal block */
1816     xloc = hypre_ParCSRMatrixDiag(x);
1817     yloc = hypre_ParCSRMatrixDiag(y);
1818     xnnz = 0;
1819     ynnz = 0;
1820     xarr = NULL;
1821     yarr = NULL;
1822     if (xloc) {
1823       xarr = (PetscScalar*)(hypre_CSRMatrixData(xloc));
1824       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1825     }
1826     if (yloc) {
1827       yarr = (PetscScalar*)(hypre_CSRMatrixData(yloc));
1828       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1829     }
1830     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz);
1831     ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr);
1832     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,xarr,&one,yarr,&one));
1833 
1834     /* off-diagonal block */
1835     xloc = hypre_ParCSRMatrixOffd(x);
1836     yloc = hypre_ParCSRMatrixOffd(y);
1837     xnnz = 0;
1838     ynnz = 0;
1839     xarr = NULL;
1840     yarr = NULL;
1841     if (xloc) {
1842       xarr = (PetscScalar*)(hypre_CSRMatrixData(xloc));
1843       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1844     }
1845     if (yloc) {
1846       yarr = (PetscScalar*)(hypre_CSRMatrixData(yloc));
1847       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1848     }
1849     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz);
1850     ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr);
1851     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,xarr,&one,yarr,&one));
1852   } else if (str == SUBSET_NONZERO_PATTERN) {
1853     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1854   } else {
1855     Mat B;
1856 
1857     ierr = MatAXPY_Basic_Preallocate(Y,X,&B);CHKERRQ(ierr);
1858     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
1859     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
1860   }
1861   PetscFunctionReturn(0);
1862 }
1863 
1864 /*MC
1865    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
1866           based on the hypre IJ interface.
1867 
1868    Level: intermediate
1869 
1870 .seealso: MatCreate()
1871 M*/
1872 
1873 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
1874 {
1875   Mat_HYPRE      *hB;
1876   PetscErrorCode ierr;
1877 
1878   PetscFunctionBegin;
1879   ierr           = PetscNewLog(B,&hB);CHKERRQ(ierr);
1880   hB->inner_free = PETSC_TRUE;
1881   hB->available  = PETSC_TRUE;
1882   hB->size       = 0;
1883   hB->array      = NULL;
1884 
1885   B->data       = (void*)hB;
1886   B->rmap->bs   = 1;
1887   B->assembled  = PETSC_FALSE;
1888 
1889   ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1890   B->ops->mult             = MatMult_HYPRE;
1891   B->ops->multtranspose    = MatMultTranspose_HYPRE;
1892   B->ops->multadd          = MatMultAdd_HYPRE;
1893   B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE;
1894   B->ops->setup            = MatSetUp_HYPRE;
1895   B->ops->destroy          = MatDestroy_HYPRE;
1896   B->ops->assemblyend      = MatAssemblyEnd_HYPRE;
1897   B->ops->assemblybegin    = MatAssemblyBegin_HYPRE;
1898   B->ops->ptap             = MatPtAP_HYPRE_HYPRE;
1899   B->ops->matmult          = MatMatMult_HYPRE_HYPRE;
1900   B->ops->setvalues        = MatSetValues_HYPRE;
1901   B->ops->missingdiagonal  = MatMissingDiagonal_HYPRE;
1902   B->ops->scale            = MatScale_HYPRE;
1903   B->ops->zerorowscolumns  = MatZeroRowsColumns_HYPRE;
1904   B->ops->zeroentries      = MatZeroEntries_HYPRE;
1905   B->ops->zerorows         = MatZeroRows_HYPRE;
1906   B->ops->getrow           = MatGetRow_HYPRE;
1907   B->ops->restorerow       = MatRestoreRow_HYPRE;
1908   B->ops->getvalues        = MatGetValues_HYPRE;
1909   B->ops->setoption        = MatSetOption_HYPRE;
1910   B->ops->duplicate        = MatDuplicate_HYPRE;
1911   B->ops->copy             = MatCopy_HYPRE;
1912   B->ops->view             = MatView_HYPRE;
1913   B->ops->getdiagonal      = MatGetDiagonal_HYPRE;
1914   B->ops->axpy             = MatAXPY_HYPRE;
1915 
1916   /* build cache for off array entries formed */
1917   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
1918 
1919   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr);
1920   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
1921   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
1922   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
1923   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1924   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1925   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr);
1926   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr);
1927   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);CHKERRQ(ierr);
1928   PetscFunctionReturn(0);
1929 }
1930 
1931 static PetscErrorCode hypre_array_destroy(void *ptr)
1932 {
1933    PetscFunctionBegin;
1934    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
1935    PetscFunctionReturn(0);
1936 }
1937