xref: /petsc/src/mat/impls/hypre/mhypre.c (revision 609bdbee21ea3be08735c64dbe00a9ab27759925)
1 
2 /*
3     Creates hypre ijmatrix from PETSc matrix
4 */
5 
6 #include <petscmathypre.h>
7 #include <petsc/private/matimpl.h>
8 #include <../src/mat/impls/hypre/mhypre.h>
9 #include <../src/mat/impls/aij/mpi/mpiaij.h>
10 #include <../src/vec/vec/impls/hypre/vhyp.h>
11 #include <HYPRE.h>
12 #include <HYPRE_utilities.h>
13 #include <_hypre_parcsr_ls.h>
14 
15 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
16 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
17 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
18 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
19 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool);
20 static PetscErrorCode hypre_array_destroy(void*);
21 
22 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
23 {
24   PetscErrorCode ierr;
25   PetscInt       i,n_d,n_o;
26   const PetscInt *ia_d,*ia_o;
27   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
28   PetscInt       *nnz_d=NULL,*nnz_o=NULL;
29 
30   PetscFunctionBegin;
31   if (A_d) { /* determine number of nonzero entries in local diagonal part */
32     ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr);
33     if (done_d) {
34       ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr);
35       for (i=0; i<n_d; i++) {
36         nnz_d[i] = ia_d[i+1] - ia_d[i];
37       }
38     }
39     ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr);
40   }
41   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
42     ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
43     if (done_o) {
44       ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr);
45       for (i=0; i<n_o; i++) {
46         nnz_o[i] = ia_o[i+1] - ia_o[i];
47       }
48     }
49     ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr);
50   }
51   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
52     if (!done_o) { /* only diagonal part */
53       ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr);
54       for (i=0; i<n_d; i++) {
55         nnz_o[i] = 0;
56       }
57     }
58     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
59     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
60     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
61   }
62   PetscFunctionReturn(0);
63 }
64 
65 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
66 {
67   PetscErrorCode ierr;
68   PetscInt       rstart,rend,cstart,cend;
69 
70   PetscFunctionBegin;
71   ierr   = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
72   ierr   = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
73   rstart = A->rmap->rstart;
74   rend   = A->rmap->rend;
75   cstart = A->cmap->rstart;
76   cend   = A->cmap->rend;
77   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
78   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
79   {
80     PetscBool      same;
81     Mat            A_d,A_o;
82     const PetscInt *colmap;
83     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr);
84     if (same) {
85       ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
86       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
87       PetscFunctionReturn(0);
88     }
89     ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr);
90     if (same) {
91       ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr);
92       ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr);
93       PetscFunctionReturn(0);
94     }
95     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr);
96     if (same) {
97       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
98       PetscFunctionReturn(0);
99     }
100     ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr);
101     if (same) {
102       ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr);
103       PetscFunctionReturn(0);
104     }
105   }
106   PetscFunctionReturn(0);
107 }
108 
109 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
110 {
111   PetscErrorCode    ierr;
112   PetscInt          i,rstart,rend,ncols,nr,nc;
113   const PetscScalar *values;
114   const PetscInt    *cols;
115   PetscBool         flg;
116 
117   PetscFunctionBegin;
118   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr);
119   ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr);
120   if (flg && nr == nc) {
121     ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr);
122     PetscFunctionReturn(0);
123   }
124   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
125   if (flg) {
126     ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr);
127     PetscFunctionReturn(0);
128   }
129 
130   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
131   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
132   for (i=rstart; i<rend; i++) {
133     ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
134     if (ncols) {
135       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
136     }
137     ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr);
138   }
139   PetscFunctionReturn(0);
140 }
141 
142 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
143 {
144   PetscErrorCode        ierr;
145   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
146   HYPRE_Int             type;
147   hypre_ParCSRMatrix    *par_matrix;
148   hypre_AuxParCSRMatrix *aux_matrix;
149   hypre_CSRMatrix       *hdiag;
150 
151   PetscFunctionBegin;
152   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
153   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
154   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
155   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
156   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
157   /*
158        this is the Hack part where we monkey directly with the hypre datastructures
159   */
160   ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));
161   ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));
162   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
163 
164   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
165   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
166   PetscFunctionReturn(0);
167 }
168 
169 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
170 {
171   PetscErrorCode        ierr;
172   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
173   Mat_SeqAIJ            *pdiag,*poffd;
174   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
175   HYPRE_Int             type;
176   hypre_ParCSRMatrix    *par_matrix;
177   hypre_AuxParCSRMatrix *aux_matrix;
178   hypre_CSRMatrix       *hdiag,*hoffd;
179 
180   PetscFunctionBegin;
181   pdiag = (Mat_SeqAIJ*) pA->A->data;
182   poffd = (Mat_SeqAIJ*) pA->B->data;
183   /* cstart is only valid for square MPIAIJ layed out in the usual way */
184   ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr);
185 
186   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
187   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
188   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
189   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
190   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
191   hoffd = hypre_ParCSRMatrixOffd(par_matrix);
192 
193   /*
194        this is the Hack part where we monkey directly with the hypre datastructures
195   */
196   ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
197   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
198   jj  = (PetscInt*)hdiag->j;
199   pjj = (PetscInt*)pdiag->j;
200   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
201   ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
202   ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
203   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
204      If we hacked a hypre a bit more we might be able to avoid this step */
205   jj  = (PetscInt*) hoffd->j;
206   pjj = (PetscInt*) poffd->j;
207   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
208   ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));
209 
210   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
211   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
212   PetscFunctionReturn(0);
213 }
214 
215 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
216 {
217   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
218   Mat                    lA;
219   ISLocalToGlobalMapping rl2g,cl2g;
220   IS                     is;
221   hypre_ParCSRMatrix     *hA;
222   hypre_CSRMatrix        *hdiag,*hoffd;
223   MPI_Comm               comm;
224   PetscScalar            *hdd,*hod,*aa,*data;
225   HYPRE_Int              *col_map_offd,*hdi,*hdj,*hoi,*hoj;
226   PetscInt               *ii,*jj,*iptr,*jptr;
227   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
228   HYPRE_Int              type;
229   PetscErrorCode         ierr;
230 
231   PetscFunctionBegin;
232   comm = PetscObjectComm((PetscObject)A);
233   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
234   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
235   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
236   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
237   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
238   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
239   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
240   hdiag = hypre_ParCSRMatrixDiag(hA);
241   hoffd = hypre_ParCSRMatrixOffd(hA);
242   dr    = hypre_CSRMatrixNumRows(hdiag);
243   dc    = hypre_CSRMatrixNumCols(hdiag);
244   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
245   hdi   = hypre_CSRMatrixI(hdiag);
246   hdj   = hypre_CSRMatrixJ(hdiag);
247   hdd   = hypre_CSRMatrixData(hdiag);
248   oc    = hypre_CSRMatrixNumCols(hoffd);
249   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
250   hoi   = hypre_CSRMatrixI(hoffd);
251   hoj   = hypre_CSRMatrixJ(hoffd);
252   hod   = hypre_CSRMatrixData(hoffd);
253   if (reuse != MAT_REUSE_MATRIX) {
254     PetscInt *aux;
255 
256     /* generate l2g maps for rows and cols */
257     ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr);
258     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
259     ierr = ISDestroy(&is);CHKERRQ(ierr);
260     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
261     ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr);
262     for (i=0; i<dc; i++) aux[i] = i+stc;
263     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
264     ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
265     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
266     ierr = ISDestroy(&is);CHKERRQ(ierr);
267     /* create MATIS object */
268     ierr = MatCreate(comm,B);CHKERRQ(ierr);
269     ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr);
270     ierr = MatSetType(*B,MATIS);CHKERRQ(ierr);
271     ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr);
272     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
273     ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
274 
275     /* allocate CSR for local matrix */
276     ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr);
277     ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr);
278     ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr);
279   } else {
280     PetscInt  nr;
281     PetscBool done;
282     ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr);
283     ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr);
284     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
285     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
286     ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr);
287   }
288   /* merge local matrices */
289   ii   = iptr;
290   jj   = jptr;
291   aa   = data;
292   *ii  = *(hdi++) + *(hoi++);
293   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
294     PetscScalar *aold = aa;
295     PetscInt    *jold = jj,nc = jd+jo;
296     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
297     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
298     *(++ii) = *(hdi++) + *(hoi++);
299     ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr);
300   }
301   for (; cum<dr; cum++) *(++ii) = nnz;
302   if (reuse != MAT_REUSE_MATRIX) {
303     Mat_SeqAIJ* a;
304 
305     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr);
306     ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr);
307     /* hack SeqAIJ */
308     a          = (Mat_SeqAIJ*)(lA->data);
309     a->free_a  = PETSC_TRUE;
310     a->free_ij = PETSC_TRUE;
311     ierr = MatDestroy(&lA);CHKERRQ(ierr);
312   }
313   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
314   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
315   if (reuse == MAT_INPLACE_MATRIX) {
316     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
317   }
318   PetscFunctionReturn(0);
319 }
320 
321 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
322 {
323   Mat_HYPRE      *hB;
324   MPI_Comm       comm = PetscObjectComm((PetscObject)A);
325   PetscErrorCode ierr;
326 
327   PetscFunctionBegin;
328   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX");
329   if (reuse == MAT_REUSE_MATRIX) {
330     /* always destroy the old matrix and create a new memory;
331        hope this does not churn the memory too much. The problem
332        is I do not know if it is possible to put the matrix back to
333        its initial state so that we can directly copy the values
334        the second time through. */
335     hB = (Mat_HYPRE*)((*B)->data);
336     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
337   } else {
338     ierr = MatCreate(comm,B);CHKERRQ(ierr);
339     ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr);
340     ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
341     hB   = (Mat_HYPRE*)((*B)->data);
342   }
343   ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr);
344   ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr);
345   (*B)->preallocated = PETSC_TRUE;
346   ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
347   ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
348   PetscFunctionReturn(0);
349 }
350 
351 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
352 {
353   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
354   hypre_ParCSRMatrix *parcsr;
355   hypre_CSRMatrix    *hdiag,*hoffd;
356   MPI_Comm           comm;
357   PetscScalar        *da,*oa,*aptr;
358   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
359   PetscInt           i,dnnz,onnz,m,n;
360   HYPRE_Int          type;
361   PetscMPIInt        size;
362   PetscErrorCode     ierr;
363 
364   PetscFunctionBegin;
365   comm = PetscObjectComm((PetscObject)A);
366   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
367   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
368   if (reuse == MAT_REUSE_MATRIX) {
369     PetscBool ismpiaij,isseqaij;
370     ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
371     ierr = PetscObjectTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
372     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
373   }
374   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
375 
376   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
377   hdiag = hypre_ParCSRMatrixDiag(parcsr);
378   hoffd = hypre_ParCSRMatrixOffd(parcsr);
379   m     = hypre_CSRMatrixNumRows(hdiag);
380   n     = hypre_CSRMatrixNumCols(hdiag);
381   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
382   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
383   if (reuse == MAT_INITIAL_MATRIX) {
384     ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr);
385     ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr);
386     ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr);
387   } else if (reuse == MAT_REUSE_MATRIX) {
388     PetscInt  nr;
389     PetscBool done;
390     if (size > 1) {
391       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
392 
393       ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
394       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
395       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
396       ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr);
397     } else {
398       ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr);
399       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
400       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
401       ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr);
402     }
403   } else { /* MAT_INPLACE_MATRIX */
404     dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
405     djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
406     da  = hypre_CSRMatrixData(hdiag);
407   }
408   ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
409   ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr);
410   ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr);
411   iptr = djj;
412   aptr = da;
413   for (i=0; i<m; i++) {
414     PetscInt nc = dii[i+1]-dii[i];
415     ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
416     iptr += nc;
417     aptr += nc;
418   }
419   if (size > 1) {
420     HYPRE_Int *offdj,*coffd;
421 
422     if (reuse == MAT_INITIAL_MATRIX) {
423       ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr);
424       ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr);
425       ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr);
426     } else if (reuse == MAT_REUSE_MATRIX) {
427       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
428       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
429       PetscBool  done;
430 
431       ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr);
432       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
433       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
434       ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr);
435     } else { /* MAT_INPLACE_MATRIX */
436       oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
437       ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
438       oa  = hypre_CSRMatrixData(hoffd);
439     }
440     ierr  = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
441     offdj = hypre_CSRMatrixJ(hoffd);
442     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
443     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
444     ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr);
445     iptr = ojj;
446     aptr = oa;
447     for (i=0; i<m; i++) {
448        PetscInt nc = oii[i+1]-oii[i];
449        ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr);
450        iptr += nc;
451        aptr += nc;
452     }
453     if (reuse == MAT_INITIAL_MATRIX) {
454       Mat_MPIAIJ *b;
455       Mat_SeqAIJ *d,*o;
456 
457       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr);
458       /* hack MPIAIJ */
459       b          = (Mat_MPIAIJ*)((*B)->data);
460       d          = (Mat_SeqAIJ*)b->A->data;
461       o          = (Mat_SeqAIJ*)b->B->data;
462       d->free_a  = PETSC_TRUE;
463       d->free_ij = PETSC_TRUE;
464       o->free_a  = PETSC_TRUE;
465       o->free_ij = PETSC_TRUE;
466     } else if (reuse == MAT_INPLACE_MATRIX) {
467       Mat T;
468       ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr);
469       hypre_CSRMatrixI(hdiag)    = NULL;
470       hypre_CSRMatrixJ(hdiag)    = NULL;
471       hypre_CSRMatrixData(hdiag) = NULL;
472       hypre_CSRMatrixI(hoffd)    = NULL;
473       hypre_CSRMatrixJ(hoffd)    = NULL;
474       hypre_CSRMatrixData(hoffd) = NULL;
475       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
476     }
477   } else {
478     oii  = NULL;
479     ojj  = NULL;
480     oa   = NULL;
481     if (reuse == MAT_INITIAL_MATRIX) {
482       Mat_SeqAIJ* b;
483       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr);
484       /* hack SeqAIJ */
485       b          = (Mat_SeqAIJ*)((*B)->data);
486       b->free_a  = PETSC_TRUE;
487       b->free_ij = PETSC_TRUE;
488     } else if (reuse == MAT_INPLACE_MATRIX) {
489       Mat T;
490       ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr);
491       hypre_CSRMatrixI(hdiag)    = NULL;
492       hypre_CSRMatrixJ(hdiag)    = NULL;
493       hypre_CSRMatrixData(hdiag) = NULL;
494       ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr);
495     }
496   }
497 
498   /* we have to use hypre_Tfree to free the arrays */
499   if (reuse == MAT_INPLACE_MATRIX) {
500     void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
501     const char *names[6] = {"_hypre_csr_dii",
502                             "_hypre_csr_djj",
503                             "_hypre_csr_da",
504                             "_hypre_csr_oii",
505                             "_hypre_csr_ojj",
506                             "_hypre_csr_oa"};
507     for (i=0; i<6; i++) {
508       PetscContainer c;
509 
510       ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr);
511       ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr);
512       ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr);
513       ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr);
514       ierr = PetscContainerDestroy(&c);CHKERRQ(ierr);
515     }
516   }
517   PetscFunctionReturn(0);
518 }
519 
520 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
521 {
522   hypre_ParCSRMatrix *tA;
523   hypre_CSRMatrix    *hdiag,*hoffd;
524   Mat_SeqAIJ         *diag,*offd;
525   PetscInt           *garray,noffd,dnnz,onnz,*row_starts,*col_starts;
526   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
527   PetscBool          ismpiaij,isseqaij;
528   PetscErrorCode     ierr;
529 
530   PetscFunctionBegin;
531   ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
532   ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
533   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type);
534   if (ismpiaij) {
535     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
536 
537     diag   = (Mat_SeqAIJ*)a->A->data;
538     offd   = (Mat_SeqAIJ*)a->B->data;
539     garray = a->garray;
540     noffd  = a->B->cmap->N;
541     dnnz   = diag->nz;
542     onnz   = offd->nz;
543   } else {
544     diag   = (Mat_SeqAIJ*)A->data;
545     offd   = NULL;
546     garray = NULL;
547     noffd  = 0;
548     dnnz   = diag->nz;
549     onnz   = 0;
550   }
551 
552   /* create a temporary ParCSR */
553   if (HYPRE_AssumedPartitionCheck()) {
554     PetscMPIInt myid;
555 
556     ierr       = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
557     row_starts = A->rmap->range + myid;
558     col_starts = A->cmap->range + myid;
559   } else {
560     row_starts = A->rmap->range;
561     col_starts = A->cmap->range;
562   }
563   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz);
564   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
565   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
566 
567   /* set diagonal part */
568   hdiag = hypre_ParCSRMatrixDiag(tA);
569   hypre_CSRMatrixI(hdiag)           = (HYPRE_Int*)diag->i;
570   hypre_CSRMatrixJ(hdiag)           = (HYPRE_Int*)diag->j;
571   hypre_CSRMatrixData(hdiag)        = diag->a;
572   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
573   hypre_CSRMatrixSetRownnz(hdiag);
574   hypre_CSRMatrixSetDataOwner(hdiag,0);
575 
576   /* set offdiagonal part */
577   hoffd = hypre_ParCSRMatrixOffd(tA);
578   if (offd) {
579     hypre_CSRMatrixI(hoffd)           = (HYPRE_Int*)offd->i;
580     hypre_CSRMatrixJ(hoffd)           = (HYPRE_Int*)offd->j;
581     hypre_CSRMatrixData(hoffd)        = offd->a;
582     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
583     hypre_CSRMatrixSetRownnz(hoffd);
584     hypre_CSRMatrixSetDataOwner(hoffd,0);
585     hypre_ParCSRMatrixSetNumNonzeros(tA);
586     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray;
587   }
588   *hA = tA;
589   PetscFunctionReturn(0);
590 }
591 
592 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
593 {
594   hypre_CSRMatrix    *hdiag,*hoffd;
595 
596   PetscFunctionBegin;
597   hdiag = hypre_ParCSRMatrixDiag(*hA);
598   hoffd = hypre_ParCSRMatrixOffd(*hA);
599   /* set pointers to NULL before destroying tA */
600   hypre_CSRMatrixI(hdiag)           = NULL;
601   hypre_CSRMatrixJ(hdiag)           = NULL;
602   hypre_CSRMatrixData(hdiag)        = NULL;
603   hypre_CSRMatrixI(hoffd)           = NULL;
604   hypre_CSRMatrixJ(hoffd)           = NULL;
605   hypre_CSRMatrixData(hoffd)        = NULL;
606   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
607   hypre_ParCSRMatrixDestroy(*hA);
608   *hA = NULL;
609   PetscFunctionReturn(0);
610 }
611 
612 /* calls RAP from BoomerAMG:
613    the resulting ParCSR will not own the column and row starts
614    It looks like we don't need to have the diagonal entries
615    ordered first in the rows of the diagonal part
616    for boomerAMGBuildCoarseOperator to work */
617 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
618 {
619   PetscErrorCode ierr;
620   HYPRE_Int      P_owns_col_starts,R_owns_row_starts;
621 
622   PetscFunctionBegin;
623   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
624   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
625   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
626   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
627   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
628   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
629   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
630   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
631   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
632   PetscFunctionReturn(0);
633 }
634 
635 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
636 {
637   Mat                B;
638   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
639   PetscErrorCode     ierr;
640 
641   PetscFunctionBegin;
642   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
643   ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr);
644   ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr);
645   ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
646   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
647   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
648   ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr);
649   PetscFunctionReturn(0);
650 }
651 
652 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
653 {
654   PetscErrorCode     ierr;
655   PetscFunctionBegin;
656   ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
657   ierr                   = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
658   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
659   PetscFunctionReturn(0);
660 }
661 
662 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
663 {
664   Mat                B;
665   Mat_HYPRE          *hP;
666   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
667   HYPRE_Int          type;
668   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
669   PetscBool          ishypre;
670   PetscErrorCode     ierr;
671 
672   PetscFunctionBegin;
673   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
674   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
675   hP = (Mat_HYPRE*)P->data;
676   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
677   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
678   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
679 
680   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
681   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
682   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
683 
684   /* create temporary matrix and merge to C */
685   ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
686   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
687   PetscFunctionReturn(0);
688 }
689 
690 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
691 {
692   PetscErrorCode ierr;
693 
694   PetscFunctionBegin;
695   if (scall == MAT_INITIAL_MATRIX) {
696     const char *deft = MATAIJ;
697     char       type[256];
698     PetscBool  flg;
699 
700     ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr);
701     ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr);
702     ierr = PetscOptionsEnd();CHKERRQ(ierr);
703     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
704     ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
705     if (flg) {
706       ierr = MatSetType(*C,type);CHKERRQ(ierr);
707     } else {
708       ierr = MatSetType(*C,deft);CHKERRQ(ierr);
709     }
710     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
711     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
712   }
713   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
714   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
715   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
716   PetscFunctionReturn(0);
717 }
718 
719 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
720 {
721   Mat                B;
722   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
723   Mat_HYPRE          *hA,*hP;
724   PetscBool          ishypre;
725   HYPRE_Int          type;
726   PetscErrorCode     ierr;
727 
728   PetscFunctionBegin;
729   ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr);
730   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
731   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
732   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
733   hA = (Mat_HYPRE*)A->data;
734   hP = (Mat_HYPRE*)P->data;
735   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
736   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
737   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
738   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
739   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
740   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
741   ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr);
742   ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr);
743   ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr);
744   PetscFunctionReturn(0);
745 }
746 
747 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
748 {
749   PetscErrorCode ierr;
750 
751   PetscFunctionBegin;
752   if (scall == MAT_INITIAL_MATRIX) {
753     ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
754     ierr                   = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
755     ierr                   = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
756     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
757     ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
758   }
759   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
760   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
761   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
762   PetscFunctionReturn(0);
763 }
764 
765 /* calls hypre_ParMatmul
766    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
767    hypre_ParMatrixCreate does not duplicate the communicator
768    It looks like we don't need to have the diagonal entries
769    ordered first in the rows of the diagonal part
770    for boomerAMGBuildCoarseOperator to work */
771 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
772 {
773   PetscFunctionBegin;
774   PetscStackPush("hypre_ParMatmul");
775   *hAB = hypre_ParMatmul(hA,hB);
776   PetscStackPop;
777   PetscFunctionReturn(0);
778 }
779 
780 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
781 {
782   Mat                D;
783   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
784   PetscErrorCode     ierr;
785 
786   PetscFunctionBegin;
787   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
788   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
789   ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr);
790   ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
791   ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr);
792   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
793   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
794   PetscFunctionReturn(0);
795 }
796 
797 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
798 {
799   PetscErrorCode ierr;
800 
801   PetscFunctionBegin;
802   ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
803   ierr                      = MatSetType(*C,MATAIJ);CHKERRQ(ierr);
804   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
805   PetscFunctionReturn(0);
806 }
807 
808 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
809 {
810   Mat                D;
811   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
812   Mat_HYPRE          *hA,*hB;
813   PetscBool          ishypre;
814   HYPRE_Int          type;
815   PetscErrorCode     ierr;
816 
817   PetscFunctionBegin;
818   ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr);
819   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
820   ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr);
821   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
822   hA = (Mat_HYPRE*)A->data;
823   hB = (Mat_HYPRE*)B->data;
824   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
825   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
826   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
827   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
828   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
829   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
830   ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr);
831   ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr);
832   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
833   ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr);
834   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
835   PetscFunctionReturn(0);
836 }
837 
838 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
839 {
840   PetscErrorCode ierr;
841 
842   PetscFunctionBegin;
843   if (scall == MAT_INITIAL_MATRIX) {
844     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
845     ierr                      = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr);
846     ierr                      = MatSetType(*C,MATHYPRE);CHKERRQ(ierr);
847     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
848     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
849   }
850   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
851   ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr);
852   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
853   PetscFunctionReturn(0);
854 }
855 
856 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
857 {
858   Mat                E;
859   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
860   PetscErrorCode     ierr;
861 
862   PetscFunctionBegin;
863   ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr);
864   ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr);
865   ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr);
866   ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr);
867   ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr);
868   ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr);
869   ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr);
870   ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr);
871   ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr);
872   PetscFunctionReturn(0);
873 }
874 
875 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
876 {
877   PetscErrorCode ierr;
878 
879   PetscFunctionBegin;
880   ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr);
881   ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr);
882   PetscFunctionReturn(0);
883 }
884 
885 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
886 {
887   PetscErrorCode ierr;
888 
889   PetscFunctionBegin;
890   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr);
891   PetscFunctionReturn(0);
892 }
893 
894 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
895 {
896   PetscErrorCode ierr;
897 
898   PetscFunctionBegin;
899   ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr);
900   PetscFunctionReturn(0);
901 }
902 
903 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans)
904 {
905   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
906   hypre_ParCSRMatrix *parcsr;
907   hypre_ParVector    *hx,*hy;
908   PetscScalar        *ax,*ay,*sax,*say;
909   PetscErrorCode     ierr;
910 
911   PetscFunctionBegin;
912   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
913   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
914   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
915   ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
916   ierr = VecGetArray(y,&ay);CHKERRQ(ierr);
917   if (trans) {
918     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
919     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
920     hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx);
921     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
922     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
923   } else {
924     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
925     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
926     hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy);
927     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
928     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
929   }
930   ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr);
931   ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 static PetscErrorCode MatDestroy_HYPRE(Mat A)
936 {
937   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
938   PetscErrorCode ierr;
939 
940   PetscFunctionBegin;
941   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
942   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
943   if (hA->ij) {
944     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
945     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
946   }
947   if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); }
948   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr);
949   ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr);
950   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr);
951   ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr);
952   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr);
953   ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr);
954   ierr = PetscFree(A->data);CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957 
958 static PetscErrorCode MatSetUp_HYPRE(Mat A)
959 {
960   PetscErrorCode ierr;
961 
962   PetscFunctionBegin;
963   ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr);
964   PetscFunctionReturn(0);
965 }
966 
967 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
968 {
969   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
970   Vec                x,b;
971   PetscErrorCode     ierr;
972 
973   PetscFunctionBegin;
974   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
975   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
976   if (hA->x) PetscFunctionReturn(0);
977   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
978   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
979   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr);
980   ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr);
981   ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr);
982   ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr);
983   ierr = VecDestroy(&x);CHKERRQ(ierr);
984   ierr = VecDestroy(&b);CHKERRQ(ierr);
985   PetscFunctionReturn(0);
986 }
987 
988 #define MATHYPRE_SCRATCH 2048
989 
990 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
991 {
992   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
993   PetscScalar        *vals = (PetscScalar *)v;
994   PetscScalar        sscr[MATHYPRE_SCRATCH];
995   HYPRE_Int          cscr[2][MATHYPRE_SCRATCH];
996   HYPRE_Int          i,nzc;
997   PetscErrorCode     ierr;
998 
999   PetscFunctionBegin;
1000   for (i=0,nzc=0;i<nc;i++) {
1001     if (cols[i] >= 0) {
1002       cscr[0][nzc  ] = cols[i];
1003       cscr[1][nzc++] = i;
1004     }
1005   }
1006   if (!nzc) PetscFunctionReturn(0);
1007 
1008   if (ins == ADD_VALUES) {
1009     for (i=0;i<nr;i++) {
1010       if (rows[i] >= 0 && nzc) {
1011         PetscInt j;
1012         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1013         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr));
1014       }
1015       vals += nc;
1016     }
1017   } else { /* INSERT_VALUES */
1018 #if defined(PETSC_USE_DEBUG)
1019     /* Insert values cannot be used to insert offproc entries */
1020     PetscInt rst,ren;
1021     ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr);
1022     for (i=0;i<nr;i++)
1023       if (rows[i] >= 0 && (rows[i] < rst || rows[i] >= ren)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use INSERT_VALUES for off-proc entries with MatHYPRE. Use ADD_VALUES instead");
1024 #endif
1025     for (i=0;i<nr;i++) {
1026       if (rows[i] >= 0 && nzc) {
1027         PetscInt j;
1028         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1029         PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr));
1030       }
1031       vals += nc;
1032     }
1033   }
1034   PetscFunctionReturn(0);
1035 }
1036 
1037 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1038 {
1039   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1040   HYPRE_Int          *hdnnz,*honnz;
1041   PetscInt           i,rs,re,cs,ce,bs;
1042   PetscMPIInt        size;
1043   PetscErrorCode     ierr;
1044 
1045   PetscFunctionBegin;
1046   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1047   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1048   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1049   rs   = A->rmap->rstart;
1050   re   = A->rmap->rend;
1051   cs   = A->cmap->rstart;
1052   ce   = A->cmap->rend;
1053   if (!hA->ij) {
1054     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1055     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1056   } else {
1057     HYPRE_Int hrs,hre,hcs,hce;
1058     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1059     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re);
1060     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce);
1061   }
1062   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1063 
1064   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1065   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1066 
1067   if (!dnnz) {
1068     ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr);
1069     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1070   } else {
1071     hdnnz = (HYPRE_Int*)dnnz;
1072   }
1073   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1074   if (size > 1) {
1075     if (!onnz) {
1076       ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr);
1077       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1078     } else {
1079       honnz = (HYPRE_Int*)onnz;
1080     }
1081     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1082   } else {
1083     honnz = NULL;
1084     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1085   }
1086   if (!dnnz) {
1087     ierr = PetscFree(hdnnz);CHKERRQ(ierr);
1088   }
1089   if (!onnz && honnz) {
1090     ierr = PetscFree(honnz);CHKERRQ(ierr);
1091   }
1092   A->preallocated = PETSC_TRUE;
1093 
1094   /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0 */
1095   {
1096     hypre_AuxParCSRMatrix *aux_matrix;
1097     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1098     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1099   }
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 /*@C
1104    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1105 
1106    Collective on Mat
1107 
1108    Input Parameters:
1109 +  A - the matrix
1110 .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1111           (same value is used for all local rows)
1112 .  dnnz - array containing the number of nonzeros in the various rows of the
1113           DIAGONAL portion of the local submatrix (possibly different for each row)
1114           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1115           The size of this array is equal to the number of local rows, i.e 'm'.
1116           For matrices that will be factored, you must leave room for (and set)
1117           the diagonal entry even if it is zero.
1118 .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1119           submatrix (same value is used for all local rows).
1120 -  onnz - array containing the number of nonzeros in the various rows of the
1121           OFF-DIAGONAL portion of the local submatrix (possibly different for
1122           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1123           structure. The size of this array is equal to the number
1124           of local rows, i.e 'm'.
1125 
1126    Notes: If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1127 
1128    Level: intermediate
1129 
1130 .keywords: matrix, aij, compressed row, sparse, parallel
1131 
1132 .seealso: MatCreate(), MatMPIAIJSetPreallocation, MATHYPRE
1133 @*/
1134 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1135 {
1136   PetscErrorCode ierr;
1137 
1138   PetscFunctionBegin;
1139   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1140   PetscValidType(A,1);
1141   ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 /*
1146    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1147 
1148    Collective
1149 
1150    Input Parameters:
1151 +  vparcsr  - the pointer to the hypre_ParCSRMatrix
1152 .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1153 -  copymode - PETSc copying options
1154 
1155    Output Parameter:
1156 .  A  - the matrix
1157 
1158    Level: intermediate
1159 
1160 .seealso: MatHYPRE, PetscCopyMode
1161 */
1162 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1163 {
1164   Mat                   T;
1165   Mat_HYPRE             *hA;
1166   hypre_ParCSRMatrix    *parcsr;
1167   MPI_Comm              comm;
1168   PetscInt              rstart,rend,cstart,cend,M,N;
1169   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1170   PetscErrorCode        ierr;
1171 
1172   PetscFunctionBegin;
1173   parcsr = (hypre_ParCSRMatrix *)vparcsr;
1174   comm   = hypre_ParCSRMatrixComm(parcsr);
1175   ierr   = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
1176   ierr   = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
1177   ierr   = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr);
1178   ierr   = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr);
1179   ierr   = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr);
1180   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1181   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1182   if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES");
1183 
1184   /* access ParCSRMatrix */
1185   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1186   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1187   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1188   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1189   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1190   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1191 
1192   /* fix for empty local rows/columns */
1193   if (rend < rstart) rend = rstart;
1194   if (cend < cstart) cend = cstart;
1195 
1196   /* create PETSc matrix with MatHYPRE */
1197   ierr = MatCreate(comm,&T);CHKERRQ(ierr);
1198   ierr = MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);CHKERRQ(ierr);
1199   ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr);
1200   hA   = (Mat_HYPRE*)(T->data);
1201 
1202   /* create HYPRE_IJMatrix */
1203   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1204 
1205   /* set ParCSR object */
1206   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1207   hypre_IJMatrixObject(hA->ij) = parcsr;
1208   T->preallocated = PETSC_TRUE;
1209 
1210   /* set assembled flag */
1211   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1212   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1213   if (ishyp) {
1214     PetscMPIInt myid = 0;
1215 
1216     /* make sure we always have row_starts and col_starts available */
1217     if (HYPRE_AssumedPartitionCheck()) {
1218       ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr);
1219     }
1220     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1221       PetscLayout map;
1222 
1223       ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr);
1224       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1225       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1226     }
1227     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1228       PetscLayout map;
1229 
1230       ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr);
1231       ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
1232       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1233     }
1234     /* prevent from freeing the pointer */
1235     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1236     *A   = T;
1237     ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1238     ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1239   } else if (isaij) {
1240     if (copymode != PETSC_OWN_POINTER) {
1241       /* prevent from freeing the pointer */
1242       hA->inner_free = PETSC_FALSE;
1243       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1244       ierr = MatDestroy(&T);CHKERRQ(ierr);
1245     } else { /* AIJ return type with PETSC_OWN_POINTER */
1246       ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr);
1247       *A   = T;
1248     }
1249   } else if (isis) {
1250     ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr);
1251     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1252     ierr = MatDestroy(&T);CHKERRQ(ierr);
1253   }
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1258 {
1259   Mat_HYPRE*            hA = (Mat_HYPRE*)A->data;
1260   HYPRE_Int             type;
1261   PetscErrorCode        ierr;
1262 
1263   PetscFunctionBegin;
1264   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1265   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1266   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1267   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1268   PetscFunctionReturn(0);
1269 }
1270 
1271 /*
1272    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1273 
1274    Not collective
1275 
1276    Input Parameters:
1277 +  A  - the MATHYPRE object
1278 
1279    Output Parameter:
1280 .  parcsr  - the pointer to the hypre_ParCSRMatrix
1281 
1282    Level: intermediate
1283 
1284 .seealso: MatHYPRE, PetscCopyMode
1285 */
1286 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1287 {
1288   PetscErrorCode ierr;
1289 
1290   PetscFunctionBegin;
1291   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1292   PetscValidType(A,1);
1293   ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr);
1294   PetscFunctionReturn(0);
1295 }
1296 
1297 /*MC
1298    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
1299           based on the hypre IJ interface.
1300 
1301    Level: intermediate
1302 
1303 .seealso: MatCreate()
1304 M*/
1305 
1306 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
1307 {
1308   Mat_HYPRE      *hB;
1309   PetscErrorCode ierr;
1310 
1311   PetscFunctionBegin;
1312   ierr           = PetscNewLog(B,&hB);CHKERRQ(ierr);
1313   hB->inner_free = PETSC_TRUE;
1314 
1315   B->data       = (void*)hB;
1316   B->rmap->bs   = 1;
1317   B->assembled  = PETSC_FALSE;
1318 
1319   B->ops->mult          = MatMult_HYPRE;
1320   B->ops->multtranspose = MatMultTranspose_HYPRE;
1321   B->ops->setup         = MatSetUp_HYPRE;
1322   B->ops->destroy       = MatDestroy_HYPRE;
1323   B->ops->assemblyend   = MatAssemblyEnd_HYPRE;
1324   B->ops->ptap          = MatPtAP_HYPRE_HYPRE;
1325   B->ops->matmult       = MatMatMult_HYPRE_HYPRE;
1326   B->ops->setvalues     = MatSetValues_HYPRE;
1327 
1328   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr);
1329   ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr);
1330   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr);
1331   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr);
1332   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1333   ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr);
1334   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr);
1335   ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr);
1336   PetscFunctionReturn(0);
1337 }
1338 
1339 static PetscErrorCode hypre_array_destroy(void *ptr)
1340 {
1341    PetscFunctionBegin;
1342    hypre_TFree(ptr);
1343    PetscFunctionReturn(0);
1344 }
1345