1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 /*MC 7 MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices 8 based on the hypre IJ interface. 9 10 Level: intermediate 11 12 .seealso: MatCreate() 13 M*/ 14 15 #include <petscmathypre.h> 16 #include <petsc/private/matimpl.h> 17 #include <../src/mat/impls/hypre/mhypre.h> 18 #include <../src/mat/impls/aij/mpi/mpiaij.h> 19 #include <../src/vec/vec/impls/hypre/vhyp.h> 20 #include <HYPRE.h> 21 #include <HYPRE_utilities.h> 22 #include <_hypre_parcsr_ls.h> 23 24 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 25 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 27 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 28 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool); 29 static PetscErrorCode hypre_array_destroy(void*); 30 31 #undef __FUNCT__ 32 #define __FUNCT__ "MatHYPRE_IJMatrixPreallocate" 33 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 34 { 35 PetscErrorCode ierr; 36 PetscInt i,n_d,n_o; 37 const PetscInt *ia_d,*ia_o; 38 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 39 PetscInt *nnz_d=NULL,*nnz_o=NULL; 40 41 PetscFunctionBegin; 42 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 43 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 44 if (done_d) { 45 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 46 for (i=0; i<n_d; i++) { 47 nnz_d[i] = ia_d[i+1] - ia_d[i]; 48 } 49 } 50 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 51 } 52 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 53 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 54 if (done_o) { 55 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 56 for (i=0; i<n_o; i++) { 57 nnz_o[i] = ia_o[i+1] - ia_o[i]; 58 } 59 } 60 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 61 } 62 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 63 if (!done_o) { /* only diagonal part */ 64 ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr); 65 for (i=0; i<n_d; i++) { 66 nnz_o[i] = 0; 67 } 68 } 69 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o)); 70 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 71 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 72 } 73 PetscFunctionReturn(0); 74 } 75 76 #undef __FUNCT__ 77 #define __FUNCT__ "MatHYPRE_CreateFromMat" 78 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 79 { 80 PetscErrorCode ierr; 81 PetscInt rstart,rend,cstart,cend; 82 83 PetscFunctionBegin; 84 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 85 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 86 rstart = A->rmap->rstart; 87 rend = A->rmap->rend; 88 cstart = A->cmap->rstart; 89 cend = A->cmap->rend; 90 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 91 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 92 { 93 PetscBool same; 94 Mat A_d,A_o; 95 const PetscInt *colmap; 96 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 97 if (same) { 98 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 99 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 100 PetscFunctionReturn(0); 101 } 102 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 103 if (same) { 104 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 105 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 106 PetscFunctionReturn(0); 107 } 108 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 109 if (same) { 110 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 111 PetscFunctionReturn(0); 112 } 113 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 114 if (same) { 115 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 116 PetscFunctionReturn(0); 117 } 118 } 119 PetscFunctionReturn(0); 120 } 121 122 #undef __FUNCT__ 123 #define __FUNCT__ "MatHYPRE_IJMatrixCopy" 124 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 125 { 126 PetscErrorCode ierr; 127 PetscInt i,rstart,rend,ncols,nr,nc; 128 const PetscScalar *values; 129 const PetscInt *cols; 130 PetscBool flg; 131 132 PetscFunctionBegin; 133 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 134 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 135 if (flg && nr == nc) { 136 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 137 PetscFunctionReturn(0); 138 } 139 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 140 if (flg) { 141 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 142 PetscFunctionReturn(0); 143 } 144 145 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 146 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 147 for (i=rstart; i<rend; i++) { 148 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 149 if (ncols) { 150 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values)); 151 } 152 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 153 } 154 PetscFunctionReturn(0); 155 } 156 157 #undef __FUNCT__ 158 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_SeqAIJ" 159 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 160 { 161 PetscErrorCode ierr; 162 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 163 HYPRE_Int type; 164 hypre_ParCSRMatrix *par_matrix; 165 hypre_AuxParCSRMatrix *aux_matrix; 166 hypre_CSRMatrix *hdiag; 167 168 PetscFunctionBegin; 169 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 170 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 171 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 172 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 173 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 174 /* 175 this is the Hack part where we monkey directly with the hypre datastructures 176 */ 177 ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt)); 178 ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt)); 179 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar)); 180 181 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 182 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 183 PetscFunctionReturn(0); 184 } 185 186 #undef __FUNCT__ 187 #define __FUNCT__ "MatHYPRE_IJMatrixFastCopy_MPIAIJ" 188 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 189 { 190 PetscErrorCode ierr; 191 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 192 Mat_SeqAIJ *pdiag,*poffd; 193 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 194 HYPRE_Int type; 195 hypre_ParCSRMatrix *par_matrix; 196 hypre_AuxParCSRMatrix *aux_matrix; 197 hypre_CSRMatrix *hdiag,*hoffd; 198 199 PetscFunctionBegin; 200 pdiag = (Mat_SeqAIJ*) pA->A->data; 201 poffd = (Mat_SeqAIJ*) pA->B->data; 202 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 203 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 204 205 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 206 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 207 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 208 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 209 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 210 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 211 212 /* 213 this is the Hack part where we monkey directly with the hypre datastructures 214 */ 215 ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 216 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 217 jj = (PetscInt*)hdiag->j; 218 pjj = (PetscInt*)pdiag->j; 219 for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i]; 220 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar)); 221 ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 222 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 223 If we hacked a hypre a bit more we might be able to avoid this step */ 224 jj = (PetscInt*) hoffd->j; 225 pjj = (PetscInt*) poffd->j; 226 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 227 ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar)); 228 229 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 230 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 231 PetscFunctionReturn(0); 232 } 233 234 #undef __FUNCT__ 235 #define __FUNCT__ "MatConvert_HYPRE_IS" 236 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 237 { 238 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 239 Mat lA; 240 ISLocalToGlobalMapping rl2g,cl2g; 241 IS is; 242 hypre_ParCSRMatrix *hA; 243 hypre_CSRMatrix *hdiag,*hoffd; 244 MPI_Comm comm; 245 PetscScalar *hdd,*hod,*aa,*data; 246 HYPRE_Int *col_map_offd,*hdi,*hdj,*hoi,*hoj; 247 PetscInt *ii,*jj,*iptr,*jptr; 248 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 249 HYPRE_Int type; 250 PetscErrorCode ierr; 251 252 PetscFunctionBegin; 253 comm = PetscObjectComm((PetscObject)A); 254 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 255 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 256 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 257 M = hypre_ParCSRMatrixGlobalNumRows(hA); 258 N = hypre_ParCSRMatrixGlobalNumCols(hA); 259 str = hypre_ParCSRMatrixFirstRowIndex(hA); 260 stc = hypre_ParCSRMatrixFirstColDiag(hA); 261 hdiag = hypre_ParCSRMatrixDiag(hA); 262 hoffd = hypre_ParCSRMatrixOffd(hA); 263 dr = hypre_CSRMatrixNumRows(hdiag); 264 dc = hypre_CSRMatrixNumCols(hdiag); 265 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 266 hdi = hypre_CSRMatrixI(hdiag); 267 hdj = hypre_CSRMatrixJ(hdiag); 268 hdd = hypre_CSRMatrixData(hdiag); 269 oc = hypre_CSRMatrixNumCols(hoffd); 270 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 271 hoi = hypre_CSRMatrixI(hoffd); 272 hoj = hypre_CSRMatrixJ(hoffd); 273 hod = hypre_CSRMatrixData(hoffd); 274 if (reuse != MAT_REUSE_MATRIX) { 275 PetscInt *aux; 276 277 /* generate l2g maps for rows and cols */ 278 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 279 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 280 ierr = ISDestroy(&is);CHKERRQ(ierr); 281 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 282 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 283 for (i=0; i<dc; i++) aux[i] = i+stc; 284 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 285 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 286 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 287 ierr = ISDestroy(&is);CHKERRQ(ierr); 288 /* create MATIS object */ 289 ierr = MatCreate(comm,B);CHKERRQ(ierr); 290 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 291 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 292 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 293 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 294 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 295 296 /* allocate CSR for local matrix */ 297 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 298 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 299 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 300 } else { 301 PetscInt nr; 302 PetscBool done; 303 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 304 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 305 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 306 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 307 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 308 } 309 /* merge local matrices */ 310 ii = iptr; 311 jj = jptr; 312 aa = data; 313 *ii = *(hdi++) + *(hoi++); 314 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 315 PetscScalar *aold = aa; 316 PetscInt *jold = jj,nc = jd+jo; 317 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 318 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 319 *(++ii) = *(hdi++) + *(hoi++); 320 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 321 } 322 for (; cum<dr; cum++) *(++ii) = nnz; 323 if (reuse != MAT_REUSE_MATRIX) { 324 Mat_SeqAIJ* a; 325 326 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 327 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 328 /* hack SeqAIJ */ 329 a = (Mat_SeqAIJ*)(lA->data); 330 a->free_a = PETSC_TRUE; 331 a->free_ij = PETSC_TRUE; 332 ierr = MatDestroy(&lA);CHKERRQ(ierr); 333 } 334 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 335 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 336 if (reuse == MAT_INPLACE_MATRIX) { 337 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 338 } 339 PetscFunctionReturn(0); 340 } 341 342 #undef __FUNCT__ 343 #define __FUNCT__ "MatConvert_AIJ_HYPRE" 344 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 345 { 346 Mat_HYPRE *hB; 347 MPI_Comm comm = PetscObjectComm((PetscObject)A); 348 PetscErrorCode ierr; 349 350 PetscFunctionBegin; 351 if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX"); 352 if (reuse == MAT_REUSE_MATRIX) { 353 /* always destroy the old matrix and create a new memory; 354 hope this does not churn the memory too much. The problem 355 is I do not know if it is possible to put the matrix back to 356 its initial state so that we can directly copy the values 357 the second time through. */ 358 hB = (Mat_HYPRE*)((*B)->data); 359 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 360 } else { 361 ierr = MatCreate(comm,B);CHKERRQ(ierr); 362 ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr); 363 ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 364 hB = (Mat_HYPRE*)((*B)->data); 365 } 366 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 367 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 368 (*B)->preallocated = PETSC_TRUE; 369 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 370 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 371 PetscFunctionReturn(0); 372 } 373 374 #undef __FUNCT__ 375 #define __FUNCT__ "MatConvert_HYPRE_AIJ" 376 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 377 { 378 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 379 hypre_ParCSRMatrix *parcsr; 380 hypre_CSRMatrix *hdiag,*hoffd; 381 MPI_Comm comm; 382 PetscScalar *da,*oa,*aptr; 383 PetscInt *dii,*djj,*oii,*ojj,*iptr; 384 PetscInt i,dnnz,onnz,m,n; 385 HYPRE_Int type; 386 PetscMPIInt size; 387 PetscErrorCode ierr; 388 389 PetscFunctionBegin; 390 comm = PetscObjectComm((PetscObject)A); 391 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 392 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 393 if (reuse == MAT_REUSE_MATRIX) { 394 PetscBool ismpiaij,isseqaij; 395 ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 396 ierr = PetscObjectTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 397 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 398 } 399 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 400 401 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 402 hdiag = hypre_ParCSRMatrixDiag(parcsr); 403 hoffd = hypre_ParCSRMatrixOffd(parcsr); 404 m = hypre_CSRMatrixNumRows(hdiag); 405 n = hypre_CSRMatrixNumCols(hdiag); 406 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 407 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 408 if (reuse == MAT_INITIAL_MATRIX) { 409 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 410 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 411 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 412 } else if (reuse == MAT_REUSE_MATRIX) { 413 PetscInt nr; 414 PetscBool done; 415 if (size > 1) { 416 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 417 418 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 419 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 420 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 421 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 422 } else { 423 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 424 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 425 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 426 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 427 } 428 } else { /* MAT_INPLACE_MATRIX */ 429 dii = (PetscInt*)hypre_CSRMatrixI(hdiag); 430 djj = (PetscInt*)hypre_CSRMatrixJ(hdiag); 431 da = hypre_CSRMatrixData(hdiag); 432 } 433 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 434 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr); 435 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr); 436 iptr = djj; 437 aptr = da; 438 for (i=0; i<m; i++) { 439 PetscInt nc = dii[i+1]-dii[i]; 440 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 441 iptr += nc; 442 aptr += nc; 443 } 444 if (size > 1) { 445 HYPRE_Int *offdj,*coffd; 446 447 if (reuse == MAT_INITIAL_MATRIX) { 448 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 449 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 450 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 451 } else if (reuse == MAT_REUSE_MATRIX) { 452 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 453 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 454 PetscBool done; 455 456 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 457 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 458 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 459 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 460 } else { /* MAT_INPLACE_MATRIX */ 461 oii = (PetscInt*)hypre_CSRMatrixI(hoffd); 462 ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd); 463 oa = hypre_CSRMatrixData(hoffd); 464 } 465 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 466 offdj = hypre_CSRMatrixJ(hoffd); 467 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 468 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 469 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr); 470 iptr = ojj; 471 aptr = oa; 472 for (i=0; i<m; i++) { 473 PetscInt nc = oii[i+1]-oii[i]; 474 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 475 iptr += nc; 476 aptr += nc; 477 } 478 if (reuse == MAT_INITIAL_MATRIX) { 479 Mat_MPIAIJ *b; 480 Mat_SeqAIJ *d,*o; 481 482 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 483 /* hack MPIAIJ */ 484 b = (Mat_MPIAIJ*)((*B)->data); 485 d = (Mat_SeqAIJ*)b->A->data; 486 o = (Mat_SeqAIJ*)b->B->data; 487 d->free_a = PETSC_TRUE; 488 d->free_ij = PETSC_TRUE; 489 o->free_a = PETSC_TRUE; 490 o->free_ij = PETSC_TRUE; 491 } else if (reuse == MAT_INPLACE_MATRIX) { 492 Mat T; 493 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr); 494 hypre_CSRMatrixI(hdiag) = NULL; 495 hypre_CSRMatrixJ(hdiag) = NULL; 496 hypre_CSRMatrixData(hdiag) = NULL; 497 hypre_CSRMatrixI(hoffd) = NULL; 498 hypre_CSRMatrixJ(hoffd) = NULL; 499 hypre_CSRMatrixData(hoffd) = NULL; 500 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 501 } 502 } else { 503 oii = NULL; 504 ojj = NULL; 505 oa = NULL; 506 if (reuse == MAT_INITIAL_MATRIX) { 507 Mat_SeqAIJ* b; 508 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 509 /* hack SeqAIJ */ 510 b = (Mat_SeqAIJ*)((*B)->data); 511 b->free_a = PETSC_TRUE; 512 b->free_ij = PETSC_TRUE; 513 } else if (reuse == MAT_INPLACE_MATRIX) { 514 Mat T; 515 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr); 516 hypre_CSRMatrixI(hdiag) = NULL; 517 hypre_CSRMatrixJ(hdiag) = NULL; 518 hypre_CSRMatrixData(hdiag) = NULL; 519 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 520 } 521 } 522 523 /* we have to use hypre_Tfree to free the arrays */ 524 if (reuse == MAT_INPLACE_MATRIX) { 525 void *ptrs[6] = {dii,djj,da,oii,ojj,oa}; 526 const char *names[6] = {"_hypre_csr_dii", 527 "_hypre_csr_djj", 528 "_hypre_csr_da", 529 "_hypre_csr_oii", 530 "_hypre_csr_ojj", 531 "_hypre_csr_oa"}; 532 for (i=0; i<6; i++) { 533 PetscContainer c; 534 535 ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr); 536 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 537 ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr); 538 ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr); 539 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 540 } 541 } 542 PetscFunctionReturn(0); 543 } 544 545 #undef __FUNCT__ 546 #define __FUNCT__ "MatAIJGetParCSR_Private" 547 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 548 { 549 hypre_ParCSRMatrix *tA; 550 hypre_CSRMatrix *hdiag,*hoffd; 551 Mat_SeqAIJ *diag,*offd; 552 PetscInt *garray,noffd,dnnz,onnz,*row_starts,*col_starts; 553 MPI_Comm comm = PetscObjectComm((PetscObject)A); 554 PetscBool ismpiaij,isseqaij; 555 PetscErrorCode ierr; 556 557 PetscFunctionBegin; 558 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 559 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 560 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type); 561 if (ismpiaij) { 562 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 563 564 diag = (Mat_SeqAIJ*)a->A->data; 565 offd = (Mat_SeqAIJ*)a->B->data; 566 garray = a->garray; 567 noffd = a->B->cmap->N; 568 dnnz = diag->nz; 569 onnz = offd->nz; 570 } else { 571 diag = (Mat_SeqAIJ*)A->data; 572 offd = NULL; 573 garray = NULL; 574 noffd = 0; 575 dnnz = diag->nz; 576 onnz = 0; 577 } 578 579 /* create a temporary ParCSR */ 580 if (HYPRE_AssumedPartitionCheck()) { 581 PetscMPIInt myid; 582 583 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 584 row_starts = A->rmap->range + myid; 585 col_starts = A->cmap->range + myid; 586 } else { 587 row_starts = A->rmap->range; 588 col_starts = A->cmap->range; 589 } 590 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz); 591 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 592 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 593 594 /* set diagonal part */ 595 hdiag = hypre_ParCSRMatrixDiag(tA); 596 hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i; 597 hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j; 598 hypre_CSRMatrixData(hdiag) = diag->a; 599 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 600 hypre_CSRMatrixSetRownnz(hdiag); 601 hypre_CSRMatrixSetDataOwner(hdiag,0); 602 603 /* set offdiagonal part */ 604 hoffd = hypre_ParCSRMatrixOffd(tA); 605 if (offd) { 606 hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i; 607 hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j; 608 hypre_CSRMatrixData(hoffd) = offd->a; 609 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 610 hypre_CSRMatrixSetRownnz(hoffd); 611 hypre_CSRMatrixSetDataOwner(hoffd,0); 612 hypre_ParCSRMatrixSetNumNonzeros(tA); 613 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray; 614 } 615 *hA = tA; 616 PetscFunctionReturn(0); 617 } 618 619 #undef __FUNCT__ 620 #define __FUNCT__ "MatAIJRestoreParCSR_Private" 621 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 622 { 623 hypre_CSRMatrix *hdiag,*hoffd; 624 625 PetscFunctionBegin; 626 hdiag = hypre_ParCSRMatrixDiag(*hA); 627 hoffd = hypre_ParCSRMatrixOffd(*hA); 628 /* set pointers to NULL before destroying tA */ 629 hypre_CSRMatrixI(hdiag) = NULL; 630 hypre_CSRMatrixJ(hdiag) = NULL; 631 hypre_CSRMatrixData(hdiag) = NULL; 632 hypre_CSRMatrixI(hoffd) = NULL; 633 hypre_CSRMatrixJ(hoffd) = NULL; 634 hypre_CSRMatrixData(hoffd) = NULL; 635 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 636 hypre_ParCSRMatrixDestroy(*hA); 637 *hA = NULL; 638 PetscFunctionReturn(0); 639 } 640 641 /* calls RAP from BoomerAMG: 642 the resulting ParCSR will not own the column and row starts 643 It looks like we don't need to have the diagonal entries 644 ordered first in the rows of the diagonal part 645 for boomerAMGBuildCoarseOperator to work */ 646 #undef __FUNCT__ 647 #define __FUNCT__ "MatHYPRE_ParCSR_RAP" 648 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, 649 hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 650 { 651 PetscErrorCode ierr; 652 HYPRE_Int P_owns_col_starts,R_owns_row_starts; 653 654 PetscFunctionBegin; 655 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 656 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 657 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP)); 658 PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP)); 659 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 660 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0); 661 hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0); 662 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1); 663 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1); 664 PetscFunctionReturn(0); 665 } 666 667 #undef __FUNCT__ 668 #define __FUNCT__ "MatPtAPNumeric_AIJ_AIJ_wHYPRE" 669 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C) 670 { 671 Mat B; 672 hypre_ParCSRMatrix *hA,*hP,*hPtAP; 673 PetscErrorCode ierr; 674 675 PetscFunctionBegin; 676 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 677 ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr); 678 ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr); 679 ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 680 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 681 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 682 ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr); 683 PetscFunctionReturn(0); 684 } 685 686 #undef __FUNCT__ 687 #define __FUNCT__ "MatPtAPSymbolic_AIJ_AIJ_wHYPRE" 688 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C) 689 { 690 PetscErrorCode ierr; 691 PetscFunctionBegin; 692 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 693 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 694 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 695 PetscFunctionReturn(0); 696 } 697 698 #undef __FUNCT__ 699 #define __FUNCT__ "MatPtAPNumeric_AIJ_HYPRE" 700 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C) 701 { 702 Mat B; 703 Mat_HYPRE *hP; 704 hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr; 705 HYPRE_Int type; 706 MPI_Comm comm = PetscObjectComm((PetscObject)A); 707 PetscBool ishypre; 708 PetscErrorCode ierr; 709 710 PetscFunctionBegin; 711 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 712 if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 713 hP = (Mat_HYPRE*)P->data; 714 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 715 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 716 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 717 718 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 719 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 720 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 721 722 /* create temporary matrix and merge to C */ 723 ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 724 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 725 PetscFunctionReturn(0); 726 } 727 728 #undef __FUNCT__ 729 #define __FUNCT__ "MatPtAP_AIJ_HYPRE" 730 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 731 { 732 PetscErrorCode ierr; 733 734 PetscFunctionBegin; 735 if (scall == MAT_INITIAL_MATRIX) { 736 const char *deft = MATAIJ; 737 char type[256]; 738 PetscBool flg; 739 740 ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr); 741 ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr); 742 ierr = PetscOptionsEnd();CHKERRQ(ierr); 743 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 744 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 745 if (flg) { 746 ierr = MatSetType(*C,type);CHKERRQ(ierr); 747 } else { 748 ierr = MatSetType(*C,deft);CHKERRQ(ierr); 749 } 750 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 751 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 752 } 753 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 754 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 755 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 756 PetscFunctionReturn(0); 757 } 758 759 #undef __FUNCT__ 760 #define __FUNCT__ "MatPtAPNumeric_HYPRE_HYPRE" 761 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C) 762 { 763 Mat B; 764 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 765 Mat_HYPRE *hA,*hP; 766 PetscBool ishypre; 767 HYPRE_Int type; 768 PetscErrorCode ierr; 769 770 PetscFunctionBegin; 771 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 772 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 773 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 774 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 775 hA = (Mat_HYPRE*)A->data; 776 hP = (Mat_HYPRE*)P->data; 777 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 778 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 779 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 780 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 781 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 782 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 783 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 784 ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 785 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 786 PetscFunctionReturn(0); 787 } 788 789 #undef __FUNCT__ 790 #define __FUNCT__ "MatPtAP_HYPRE_HYPRE" 791 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 792 { 793 PetscErrorCode ierr; 794 795 PetscFunctionBegin; 796 if (scall == MAT_INITIAL_MATRIX) { 797 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 798 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 799 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 800 (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 801 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 802 } 803 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 804 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 805 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 806 PetscFunctionReturn(0); 807 } 808 809 /* calls hypre_ParMatmul 810 hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA 811 hypre_ParMatrixCreate does not duplicate the communicator 812 It looks like we don't need to have the diagonal entries 813 ordered first in the rows of the diagonal part 814 for boomerAMGBuildCoarseOperator to work */ 815 #undef __FUNCT__ 816 #define __FUNCT__ "MatHYPRE_ParCSR_MatMatMult" 817 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB) 818 { 819 PetscFunctionBegin; 820 PetscStackPush("hypre_ParMatmul"); 821 *hAB = hypre_ParMatmul(hA,hB); 822 PetscStackPop; 823 PetscFunctionReturn(0); 824 } 825 826 #undef __FUNCT__ 827 #define __FUNCT__ "MatMatMultNumeric_AIJ_AIJ_wHYPRE" 828 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C) 829 { 830 Mat D; 831 hypre_ParCSRMatrix *hA,*hB,*hAB = NULL; 832 PetscErrorCode ierr; 833 834 PetscFunctionBegin; 835 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 836 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 837 ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr); 838 ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 839 ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr); 840 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 841 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 842 PetscFunctionReturn(0); 843 } 844 845 #undef __FUNCT__ 846 #define __FUNCT__ "MatMatMultSymbolic_AIJ_AIJ_wHYPRE" 847 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C) 848 { 849 PetscErrorCode ierr; 850 851 PetscFunctionBegin; 852 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 853 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 854 (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 855 PetscFunctionReturn(0); 856 } 857 858 #undef __FUNCT__ 859 #define __FUNCT__ "MatMatMultNumeric_HYPRE_HYPRE" 860 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C) 861 { 862 Mat D; 863 hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL; 864 Mat_HYPRE *hA,*hB; 865 PetscBool ishypre; 866 HYPRE_Int type; 867 PetscErrorCode ierr; 868 869 PetscFunctionBegin; 870 ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr); 871 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE); 872 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 873 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 874 hA = (Mat_HYPRE*)A->data; 875 hB = (Mat_HYPRE*)B->data; 876 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 877 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 878 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type)); 879 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 880 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 881 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr)); 882 ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr); 883 ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 884 /* need to use HeaderReplace because HeaderMerge messes up with the communicator */ 885 ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr); 886 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 887 PetscFunctionReturn(0); 888 } 889 890 #undef __FUNCT__ 891 #define __FUNCT__ "MatMatMult_HYPRE_HYPRE" 892 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 893 { 894 PetscErrorCode ierr; 895 896 PetscFunctionBegin; 897 if (scall == MAT_INITIAL_MATRIX) { 898 ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 899 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 900 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 901 (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 902 ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 903 } 904 ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 905 ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr); 906 ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 907 PetscFunctionReturn(0); 908 } 909 910 911 #undef __FUNCT__ 912 #define __FUNCT__ "MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE" 913 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D) 914 { 915 Mat E; 916 hypre_ParCSRMatrix *hA,*hB,*hC,*hABC; 917 PetscErrorCode ierr; 918 919 PetscFunctionBegin; 920 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 921 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 922 ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr); 923 ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr); 924 ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr); 925 ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr); 926 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 927 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 928 ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr); 929 PetscFunctionReturn(0); 930 } 931 932 #undef __FUNCT__ 933 #define __FUNCT__ "MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE" 934 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D) 935 { 936 PetscErrorCode ierr; 937 938 PetscFunctionBegin; 939 ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr); 940 ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr); 941 PetscFunctionReturn(0); 942 } 943 944 #undef __FUNCT__ 945 #define __FUNCT__ "MatMultTranspose_HYPRE" 946 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 947 { 948 PetscErrorCode ierr; 949 950 PetscFunctionBegin; 951 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr); 952 PetscFunctionReturn(0); 953 } 954 955 #undef __FUNCT__ 956 #define __FUNCT__ "MatMult_HYPRE" 957 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 958 { 959 PetscErrorCode ierr; 960 961 PetscFunctionBegin; 962 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr); 963 PetscFunctionReturn(0); 964 } 965 966 #undef __FUNCT__ 967 #define __FUNCT__ "MatHYPRE_MultKernel_Private" 968 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans) 969 { 970 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 971 hypre_ParCSRMatrix *parcsr; 972 hypre_ParVector *hx,*hy; 973 PetscScalar *ax,*ay,*sax,*say; 974 PetscErrorCode ierr; 975 976 PetscFunctionBegin; 977 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 978 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 979 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 980 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 981 ierr = VecGetArray(y,&ay);CHKERRQ(ierr); 982 if (trans) { 983 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 984 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 985 hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx); 986 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 987 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 988 } else { 989 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 990 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 991 hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy); 992 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 993 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 994 } 995 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 996 ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr); 997 PetscFunctionReturn(0); 998 } 999 1000 #undef __FUNCT__ 1001 #define __FUNCT__ "MatDestroy_HYPRE" 1002 static PetscErrorCode MatDestroy_HYPRE(Mat A) 1003 { 1004 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1005 PetscErrorCode ierr; 1006 1007 PetscFunctionBegin; 1008 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 1009 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 1010 if (hA->ij) { 1011 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 1012 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 1013 } 1014 if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); } 1015 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 1016 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 1017 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr); 1018 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 1019 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr); 1020 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr); 1021 ierr = PetscFree(A->data);CHKERRQ(ierr); 1022 PetscFunctionReturn(0); 1023 } 1024 1025 #undef __FUNCT__ 1026 #define __FUNCT__ "MatSetUp_HYPRE" 1027 static PetscErrorCode MatSetUp_HYPRE(Mat A) 1028 { 1029 PetscErrorCode ierr; 1030 1031 PetscFunctionBegin; 1032 ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr); 1033 PetscFunctionReturn(0); 1034 } 1035 1036 #undef __FUNCT__ 1037 #define __FUNCT__ "MatAssemblyEnd_HYPRE" 1038 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 1039 { 1040 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1041 Vec x,b; 1042 PetscErrorCode ierr; 1043 1044 PetscFunctionBegin; 1045 if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 1046 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 1047 if (hA->x) PetscFunctionReturn(0); 1048 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1049 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1050 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 1051 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 1052 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 1053 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 1054 ierr = VecDestroy(&x);CHKERRQ(ierr); 1055 ierr = VecDestroy(&b);CHKERRQ(ierr); 1056 PetscFunctionReturn(0); 1057 } 1058 1059 #define MATHYPRE_SCRATCH 2048 1060 1061 #undef __FUNCT__ 1062 #define __FUNCT__ "MatSetValues_HYPRE" 1063 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 1064 { 1065 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1066 PetscScalar *vals = (PetscScalar *)v; 1067 PetscScalar sscr[MATHYPRE_SCRATCH]; 1068 HYPRE_Int cscr[2][MATHYPRE_SCRATCH]; 1069 HYPRE_Int i,nzc; 1070 PetscErrorCode ierr; 1071 1072 PetscFunctionBegin; 1073 for (i=0,nzc=0;i<nc;i++) { 1074 if (cols[i] >= 0) { 1075 cscr[0][nzc ] = cols[i]; 1076 cscr[1][nzc++] = i; 1077 } 1078 } 1079 if (!nzc) PetscFunctionReturn(0); 1080 1081 if (ins == ADD_VALUES) { 1082 for (i=0;i<nr;i++) { 1083 if (rows[i] >= 0 && nzc) { 1084 PetscInt j; 1085 for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]]; 1086 PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr)); 1087 } 1088 vals += nc; 1089 } 1090 } else { /* INSERT_VALUES */ 1091 #if defined(PETSC_USE_DEBUG) 1092 /* Insert values cannot be used to insert offproc entries */ 1093 PetscInt rst,ren; 1094 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1095 for (i=0;i<nr;i++) 1096 if (rows[i] >= 0 && (rows[i] < rst || rows[i] >= ren)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use INSERT_VALUES for off-proc entries with MatHYPRE. Use ADD_VALUES instead"); 1097 #endif 1098 for (i=0;i<nr;i++) { 1099 if (rows[i] >= 0 && nzc) { 1100 PetscInt j; 1101 for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]]; 1102 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr)); 1103 } 1104 vals += nc; 1105 } 1106 } 1107 PetscFunctionReturn(0); 1108 } 1109 1110 #undef __FUNCT__ 1111 #define __FUNCT__ "MatHYPRESetPreallocation_HYPRE" 1112 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1113 { 1114 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1115 HYPRE_Int *hdnnz,*honnz; 1116 PetscInt i,rs,re,cs,ce,bs; 1117 PetscMPIInt size; 1118 PetscErrorCode ierr; 1119 1120 PetscFunctionBegin; 1121 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1122 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1123 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1124 rs = A->rmap->rstart; 1125 re = A->rmap->rend; 1126 cs = A->cmap->rstart; 1127 ce = A->cmap->rend; 1128 if (!hA->ij) { 1129 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij)); 1130 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1131 } else { 1132 HYPRE_Int hrs,hre,hcs,hce; 1133 PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce)); 1134 if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re); 1135 if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce); 1136 } 1137 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1138 1139 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs; 1140 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs; 1141 1142 if (!dnnz) { 1143 ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr); 1144 for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz; 1145 } else { 1146 hdnnz = (HYPRE_Int*)dnnz; 1147 } 1148 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 1149 if (size > 1) { 1150 if (!onnz) { 1151 ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr); 1152 for (i=0;i<A->rmap->n;i++) honnz[i] = onz; 1153 } else { 1154 honnz = (HYPRE_Int*)onnz; 1155 } 1156 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz)); 1157 } else { 1158 honnz = NULL; 1159 PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz)); 1160 } 1161 if (!dnnz) { 1162 ierr = PetscFree(hdnnz);CHKERRQ(ierr); 1163 } 1164 if (!onnz && honnz) { 1165 ierr = PetscFree(honnz);CHKERRQ(ierr); 1166 } 1167 A->preallocated = PETSC_TRUE; 1168 1169 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0 */ 1170 { 1171 hypre_AuxParCSRMatrix *aux_matrix; 1172 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1173 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 1174 } 1175 PetscFunctionReturn(0); 1176 } 1177 1178 /*@C 1179 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1180 1181 Collective on Mat 1182 1183 Input Parameters: 1184 + A - the matrix 1185 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1186 (same value is used for all local rows) 1187 . dnnz - array containing the number of nonzeros in the various rows of the 1188 DIAGONAL portion of the local submatrix (possibly different for each row) 1189 or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure. 1190 The size of this array is equal to the number of local rows, i.e 'm'. 1191 For matrices that will be factored, you must leave room for (and set) 1192 the diagonal entry even if it is zero. 1193 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1194 submatrix (same value is used for all local rows). 1195 - onnz - array containing the number of nonzeros in the various rows of the 1196 OFF-DIAGONAL portion of the local submatrix (possibly different for 1197 each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero 1198 structure. The size of this array is equal to the number 1199 of local rows, i.e 'm'. 1200 1201 Notes: If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored. 1202 1203 Level: intermediate 1204 1205 .keywords: matrix, aij, compressed row, sparse, parallel 1206 1207 .seealso: MatCreate(), MatMPIAIJSetPreallocation, MATHYPRE 1208 @*/ 1209 #undef __FUNCT__ 1210 #define __FUNCT__ "MatHYPRESetPreallocation" 1211 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1212 { 1213 PetscErrorCode ierr; 1214 1215 PetscFunctionBegin; 1216 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1217 PetscValidType(A,1); 1218 ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr); 1219 PetscFunctionReturn(0); 1220 } 1221 1222 /* 1223 MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix 1224 1225 Collective 1226 1227 Input Parameters: 1228 + vparcsr - the pointer to the hypre_ParCSRMatrix 1229 . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported. 1230 - copymode - PETSc copying options 1231 1232 Output Parameter: 1233 . A - the matrix 1234 1235 Level: intermediate 1236 1237 .seealso: MatHYPRE, PetscCopyMode 1238 */ 1239 #undef __FUNCT__ 1240 #define __FUNCT__ "MatCreateFromParCSR" 1241 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A) 1242 { 1243 Mat T; 1244 Mat_HYPRE *hA; 1245 hypre_ParCSRMatrix *parcsr; 1246 MPI_Comm comm; 1247 PetscInt rstart,rend,cstart,cend,M,N; 1248 PetscBool isseqaij,ismpiaij,isaij,ishyp,isis; 1249 PetscErrorCode ierr; 1250 1251 PetscFunctionBegin; 1252 parcsr = (hypre_ParCSRMatrix *)vparcsr; 1253 comm = hypre_ParCSRMatrixComm(parcsr); 1254 ierr = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1255 ierr = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 1256 ierr = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr); 1257 ierr = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr); 1258 ierr = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr); 1259 isaij = (PetscBool)(isseqaij || ismpiaij || isaij); 1260 if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE); 1261 if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES"); 1262 1263 /* access ParCSRMatrix */ 1264 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1265 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1266 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1267 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1268 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1269 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1270 1271 /* fix for empty local rows/columns */ 1272 if (rend < rstart) rend = rstart; 1273 if (cend < cstart) cend = cstart; 1274 1275 /* create PETSc matrix with MatHYPRE */ 1276 ierr = MatCreate(comm,&T);CHKERRQ(ierr); 1277 ierr = MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);CHKERRQ(ierr); 1278 ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr); 1279 hA = (Mat_HYPRE*)(T->data); 1280 1281 /* create HYPRE_IJMatrix */ 1282 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 1283 1284 /* set ParCSR object */ 1285 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1286 hypre_IJMatrixObject(hA->ij) = parcsr; 1287 T->preallocated = PETSC_TRUE; 1288 1289 /* set assembled flag */ 1290 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1291 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1292 if (ishyp) { 1293 PetscMPIInt myid = 0; 1294 1295 /* make sure we always have row_starts and col_starts available */ 1296 if (HYPRE_AssumedPartitionCheck()) { 1297 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 1298 } 1299 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1300 PetscLayout map; 1301 1302 ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr); 1303 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1304 hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid); 1305 } 1306 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1307 PetscLayout map; 1308 1309 ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr); 1310 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1311 hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid); 1312 } 1313 /* prevent from freeing the pointer */ 1314 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1315 *A = T; 1316 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1317 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1318 } else if (isaij) { 1319 if (copymode != PETSC_OWN_POINTER) { 1320 /* prevent from freeing the pointer */ 1321 hA->inner_free = PETSC_FALSE; 1322 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1323 ierr = MatDestroy(&T);CHKERRQ(ierr); 1324 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1325 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr); 1326 *A = T; 1327 } 1328 } else if (isis) { 1329 ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1330 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1331 ierr = MatDestroy(&T);CHKERRQ(ierr); 1332 } 1333 PetscFunctionReturn(0); 1334 } 1335 1336 #undef __FUNCT__ 1337 #define __FUNCT__ "MatHYPREGetParCSR_HYPRE" 1338 PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1339 { 1340 Mat_HYPRE* hA = (Mat_HYPRE*)A->data; 1341 HYPRE_Int type; 1342 PetscErrorCode ierr; 1343 1344 PetscFunctionBegin; 1345 if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present"); 1346 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 1347 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1348 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr)); 1349 PetscFunctionReturn(0); 1350 } 1351 1352 /* 1353 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1354 1355 Not collective 1356 1357 Input Parameters: 1358 + A - the MATHYPRE object 1359 1360 Output Parameter: 1361 . parcsr - the pointer to the hypre_ParCSRMatrix 1362 1363 Level: intermediate 1364 1365 .seealso: MatHYPRE, PetscCopyMode 1366 */ 1367 #undef __FUNCT__ 1368 #define __FUNCT__ "MatHYPREGetParCSR" 1369 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1370 { 1371 PetscErrorCode ierr; 1372 1373 PetscFunctionBegin; 1374 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1375 PetscValidType(A,1); 1376 ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr); 1377 PetscFunctionReturn(0); 1378 } 1379 1380 #undef __FUNCT__ 1381 #define __FUNCT__ "MatCreate_HYPRE" 1382 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 1383 { 1384 Mat_HYPRE *hB; 1385 PetscErrorCode ierr; 1386 1387 PetscFunctionBegin; 1388 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 1389 hB->inner_free = PETSC_TRUE; 1390 1391 B->data = (void*)hB; 1392 B->rmap->bs = 1; 1393 B->assembled = PETSC_FALSE; 1394 1395 B->ops->mult = MatMult_HYPRE; 1396 B->ops->multtranspose = MatMultTranspose_HYPRE; 1397 B->ops->setup = MatSetUp_HYPRE; 1398 B->ops->destroy = MatDestroy_HYPRE; 1399 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 1400 B->ops->ptap = MatPtAP_HYPRE_HYPRE; 1401 B->ops->matmult = MatMatMult_HYPRE_HYPRE; 1402 B->ops->setvalues = MatSetValues_HYPRE; 1403 1404 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr); 1405 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 1406 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 1407 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 1408 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 1409 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 1410 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr); 1411 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr); 1412 PetscFunctionReturn(0); 1413 } 1414 1415 #undef __FUNCT__ 1416 #define __FUNCT__ "hypre_array_destroy" 1417 static PetscErrorCode hypre_array_destroy(void *ptr) 1418 { 1419 PetscFunctionBegin; 1420 hypre_TFree(ptr); 1421 PetscFunctionReturn(0); 1422 } 1423 1424 #if 0 1425 /* 1426 Does NOT copy the data over, instead uses DIRECTLY the pointers from the PETSc MPIAIJ format 1427 1428 This is UNFINISHED and does NOT work! The problem is that hypre puts the diagonal entry first 1429 which will corrupt the PETSc data structure if we did this. Need a work around to this problem. 1430 */ 1431 #include <_hypre_IJ_mv.h> 1432 #include <HYPRE_IJ_mv.h> 1433 1434 #undef __FUNCT__ 1435 #define __FUNCT__ "MatHYPRE_IJMatrixLink" 1436 PetscErrorCode MatHYPRE_IJMatrixLink(Mat A, HYPRE_IJMatrix *ij) 1437 { 1438 PetscErrorCode ierr; 1439 PetscInt rstart,rend,cstart,cend; 1440 PetscBool flg; 1441 hypre_AuxParCSRMatrix *aux_matrix; 1442 1443 PetscFunctionBegin; 1444 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1445 PetscValidType(A,1); 1446 PetscValidPointer(ij,2); 1447 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 1448 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Can only use with PETSc MPIAIJ matrices"); 1449 ierr = MatSetUp(A);CHKERRQ(ierr); 1450 1451 ierr = PetscLogEventBegin(MAT_Convert,A,0,0,0);CHKERRQ(ierr); 1452 rstart = A->rmap->rstart; 1453 rend = A->rmap->rend; 1454 cstart = A->cmap->rstart; 1455 cend = A->cmap->rend; 1456 PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij)); 1457 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR)); 1458 1459 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(*ij)); 1460 PetscStackCall("hypre_IJMatrixTranslator",aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(*ij)); 1461 1462 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 1463 1464 /* this is the Hack part where we monkey directly with the hypre datastructures */ 1465 1466 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(*ij)); 1467 ierr = PetscLogEventEnd(MAT_Convert,A,0,0,0);CHKERRQ(ierr); 1468 PetscFunctionReturn(0); 1469 } 1470 #endif 1471