1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 #include <petscpkg_version.h> 7 #include <petsc/private/petschypre.h> 8 #include <petscmathypre.h> 9 #include <petsc/private/matimpl.h> 10 #include <../src/mat/impls/hypre/mhypre.h> 11 #include <../src/mat/impls/aij/mpi/mpiaij.h> 12 #include <../src/vec/vec/impls/hypre/vhyp.h> 13 #include <HYPRE.h> 14 #include <HYPRE_utilities.h> 15 #include <_hypre_parcsr_ls.h> 16 #include <_hypre_sstruct_ls.h> 17 18 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0) 19 #define hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A) 20 #endif 21 22 PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*); 23 24 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 25 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 26 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 27 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 28 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool); 29 static PetscErrorCode hypre_array_destroy(void*); 30 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins); 31 32 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 33 { 34 PetscErrorCode ierr; 35 PetscInt i,n_d,n_o; 36 const PetscInt *ia_d,*ia_o; 37 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 38 HYPRE_Int *nnz_d=NULL,*nnz_o=NULL; 39 40 PetscFunctionBegin; 41 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 42 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 43 if (done_d) { 44 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 45 for (i=0; i<n_d; i++) { 46 nnz_d[i] = ia_d[i+1] - ia_d[i]; 47 } 48 } 49 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 50 } 51 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 52 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 53 if (done_o) { 54 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 55 for (i=0; i<n_o; i++) { 56 nnz_o[i] = ia_o[i+1] - ia_o[i]; 57 } 58 } 59 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 60 } 61 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 62 if (!done_o) { /* only diagonal part */ 63 ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr); 64 for (i=0; i<n_d; i++) { 65 nnz_o[i] = 0; 66 } 67 } 68 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 69 { /* If we don't do this, the columns of the matrix will be all zeros! */ 70 hypre_AuxParCSRMatrix *aux_matrix; 71 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 72 hypre_AuxParCSRMatrixDestroy(aux_matrix); 73 hypre_IJMatrixTranslator(ij) = NULL; 74 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 75 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 76 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 77 } 78 #else 79 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 80 #endif 81 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 82 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 83 } 84 PetscFunctionReturn(0); 85 } 86 87 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 88 { 89 PetscErrorCode ierr; 90 PetscInt rstart,rend,cstart,cend; 91 92 PetscFunctionBegin; 93 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 94 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 95 rstart = A->rmap->rstart; 96 rend = A->rmap->rend; 97 cstart = A->cmap->rstart; 98 cend = A->cmap->rend; 99 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 100 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 101 { 102 PetscBool same; 103 Mat A_d,A_o; 104 const PetscInt *colmap; 105 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 106 if (same) { 107 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 108 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 109 PetscFunctionReturn(0); 110 } 111 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 112 if (same) { 113 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 114 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 115 PetscFunctionReturn(0); 116 } 117 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 118 if (same) { 119 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 120 PetscFunctionReturn(0); 121 } 122 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 123 if (same) { 124 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 125 PetscFunctionReturn(0); 126 } 127 } 128 PetscFunctionReturn(0); 129 } 130 131 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 132 { 133 PetscErrorCode ierr; 134 PetscInt i,rstart,rend,ncols,nr,nc; 135 const PetscScalar *values; 136 const PetscInt *cols; 137 PetscBool flg; 138 139 PetscFunctionBegin; 140 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 141 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 142 if (flg && nr == nc) { 143 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 144 PetscFunctionReturn(0); 145 } 146 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 147 if (flg) { 148 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 149 PetscFunctionReturn(0); 150 } 151 152 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 153 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 154 for (i=rstart; i<rend; i++) { 155 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 156 if (ncols) { 157 HYPRE_Int nc = (HYPRE_Int)ncols; 158 159 if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i); 160 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values)); 161 } 162 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 163 } 164 PetscFunctionReturn(0); 165 } 166 167 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 168 { 169 PetscErrorCode ierr; 170 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 171 HYPRE_Int type; 172 hypre_ParCSRMatrix *par_matrix; 173 hypre_AuxParCSRMatrix *aux_matrix; 174 hypre_CSRMatrix *hdiag; 175 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 176 177 PetscFunctionBegin; 178 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 179 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 180 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 181 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 182 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 183 /* 184 this is the Hack part where we monkey directly with the hypre datastructures 185 */ 186 if (sameint) { 187 ierr = PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);CHKERRQ(ierr); 188 ierr = PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);CHKERRQ(ierr); 189 } else { 190 PetscInt i; 191 192 for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i]; 193 for (i=0;i<pdiag->nz;i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i]; 194 } 195 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 196 197 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 198 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 199 PetscFunctionReturn(0); 200 } 201 202 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 203 { 204 PetscErrorCode ierr; 205 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 206 Mat_SeqAIJ *pdiag,*poffd; 207 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 208 HYPRE_Int *hjj,type; 209 hypre_ParCSRMatrix *par_matrix; 210 hypre_AuxParCSRMatrix *aux_matrix; 211 hypre_CSRMatrix *hdiag,*hoffd; 212 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 213 214 PetscFunctionBegin; 215 pdiag = (Mat_SeqAIJ*) pA->A->data; 216 poffd = (Mat_SeqAIJ*) pA->B->data; 217 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 218 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 219 220 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 221 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 222 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 223 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 224 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 225 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 226 227 /* 228 this is the Hack part where we monkey directly with the hypre datastructures 229 */ 230 if (sameint) { 231 ierr = PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 232 } else { 233 for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]); 234 } 235 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 236 hjj = hdiag->j; 237 pjj = pdiag->j; 238 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 239 for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i]; 240 #else 241 for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i]; 242 #endif 243 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 244 if (sameint) { 245 ierr = PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 246 } else { 247 for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]); 248 } 249 250 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 251 If we hacked a hypre a bit more we might be able to avoid this step */ 252 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 253 PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd)); 254 jj = (PetscInt*) hoffd->big_j; 255 #else 256 jj = (PetscInt*) hoffd->j; 257 #endif 258 pjj = poffd->j; 259 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 260 261 ierr = PetscArraycpy(hoffd->data,poffd->a,poffd->nz);CHKERRQ(ierr); 262 263 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 264 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 265 PetscFunctionReturn(0); 266 } 267 268 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 269 { 270 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 271 Mat lA; 272 ISLocalToGlobalMapping rl2g,cl2g; 273 IS is; 274 hypre_ParCSRMatrix *hA; 275 hypre_CSRMatrix *hdiag,*hoffd; 276 MPI_Comm comm; 277 HYPRE_Complex *hdd,*hod,*aa; 278 PetscScalar *data; 279 HYPRE_BigInt *col_map_offd; 280 HYPRE_Int *hdi,*hdj,*hoi,*hoj; 281 PetscInt *ii,*jj,*iptr,*jptr; 282 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 283 HYPRE_Int type; 284 PetscErrorCode ierr; 285 286 PetscFunctionBegin; 287 comm = PetscObjectComm((PetscObject)A); 288 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 289 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 290 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 291 M = hypre_ParCSRMatrixGlobalNumRows(hA); 292 N = hypre_ParCSRMatrixGlobalNumCols(hA); 293 str = hypre_ParCSRMatrixFirstRowIndex(hA); 294 stc = hypre_ParCSRMatrixFirstColDiag(hA); 295 hdiag = hypre_ParCSRMatrixDiag(hA); 296 hoffd = hypre_ParCSRMatrixOffd(hA); 297 dr = hypre_CSRMatrixNumRows(hdiag); 298 dc = hypre_CSRMatrixNumCols(hdiag); 299 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 300 hdi = hypre_CSRMatrixI(hdiag); 301 hdj = hypre_CSRMatrixJ(hdiag); 302 hdd = hypre_CSRMatrixData(hdiag); 303 oc = hypre_CSRMatrixNumCols(hoffd); 304 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 305 hoi = hypre_CSRMatrixI(hoffd); 306 hoj = hypre_CSRMatrixJ(hoffd); 307 hod = hypre_CSRMatrixData(hoffd); 308 if (reuse != MAT_REUSE_MATRIX) { 309 PetscInt *aux; 310 311 /* generate l2g maps for rows and cols */ 312 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 313 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 314 ierr = ISDestroy(&is);CHKERRQ(ierr); 315 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 316 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 317 for (i=0; i<dc; i++) aux[i] = i+stc; 318 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 319 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 320 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 321 ierr = ISDestroy(&is);CHKERRQ(ierr); 322 /* create MATIS object */ 323 ierr = MatCreate(comm,B);CHKERRQ(ierr); 324 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 325 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 326 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 327 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 328 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 329 330 /* allocate CSR for local matrix */ 331 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 332 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 333 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 334 } else { 335 PetscInt nr; 336 PetscBool done; 337 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 338 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 339 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 340 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 341 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 342 } 343 /* merge local matrices */ 344 ii = iptr; 345 jj = jptr; 346 aa = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 347 *ii = *(hdi++) + *(hoi++); 348 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 349 PetscScalar *aold = (PetscScalar*)aa; 350 PetscInt *jold = jj,nc = jd+jo; 351 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 352 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 353 *(++ii) = *(hdi++) + *(hoi++); 354 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 355 } 356 for (; cum<dr; cum++) *(++ii) = nnz; 357 if (reuse != MAT_REUSE_MATRIX) { 358 Mat_SeqAIJ* a; 359 360 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 361 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 362 /* hack SeqAIJ */ 363 a = (Mat_SeqAIJ*)(lA->data); 364 a->free_a = PETSC_TRUE; 365 a->free_ij = PETSC_TRUE; 366 ierr = MatDestroy(&lA);CHKERRQ(ierr); 367 } 368 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 369 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 370 if (reuse == MAT_INPLACE_MATRIX) { 371 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 372 } 373 PetscFunctionReturn(0); 374 } 375 376 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 377 { 378 Mat M = NULL; 379 Mat_HYPRE *hB; 380 MPI_Comm comm = PetscObjectComm((PetscObject)A); 381 PetscErrorCode ierr; 382 383 PetscFunctionBegin; 384 if (reuse == MAT_REUSE_MATRIX) { 385 /* always destroy the old matrix and create a new memory; 386 hope this does not churn the memory too much. The problem 387 is I do not know if it is possible to put the matrix back to 388 its initial state so that we can directly copy the values 389 the second time through. */ 390 hB = (Mat_HYPRE*)((*B)->data); 391 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 392 } else { 393 ierr = MatCreate(comm,&M);CHKERRQ(ierr); 394 ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr); 395 ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 396 hB = (Mat_HYPRE*)(M->data); 397 if (reuse == MAT_INITIAL_MATRIX) *B = M; 398 } 399 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 400 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 401 if (reuse == MAT_INPLACE_MATRIX) { 402 ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr); 403 } 404 (*B)->preallocated = PETSC_TRUE; 405 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 406 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 407 PetscFunctionReturn(0); 408 } 409 410 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 411 { 412 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 413 hypre_ParCSRMatrix *parcsr; 414 hypre_CSRMatrix *hdiag,*hoffd; 415 MPI_Comm comm; 416 PetscScalar *da,*oa,*aptr; 417 PetscInt *dii,*djj,*oii,*ojj,*iptr; 418 PetscInt i,dnnz,onnz,m,n; 419 HYPRE_Int type; 420 PetscMPIInt size; 421 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 422 PetscErrorCode ierr; 423 424 PetscFunctionBegin; 425 comm = PetscObjectComm((PetscObject)A); 426 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 427 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 428 if (reuse == MAT_REUSE_MATRIX) { 429 PetscBool ismpiaij,isseqaij; 430 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 431 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 432 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 433 } 434 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 435 436 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 437 hdiag = hypre_ParCSRMatrixDiag(parcsr); 438 hoffd = hypre_ParCSRMatrixOffd(parcsr); 439 m = hypre_CSRMatrixNumRows(hdiag); 440 n = hypre_CSRMatrixNumCols(hdiag); 441 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 442 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 443 if (reuse == MAT_INITIAL_MATRIX) { 444 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 445 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 446 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 447 } else if (reuse == MAT_REUSE_MATRIX) { 448 PetscInt nr; 449 PetscBool done; 450 if (size > 1) { 451 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 452 453 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 454 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 455 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 456 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 457 } else { 458 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 459 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 460 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 461 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 462 } 463 } else { /* MAT_INPLACE_MATRIX */ 464 if (!sameint) { 465 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 466 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 467 } else { 468 dii = (PetscInt*)hypre_CSRMatrixI(hdiag); 469 djj = (PetscInt*)hypre_CSRMatrixJ(hdiag); 470 } 471 da = (PetscScalar*)hypre_CSRMatrixData(hdiag); 472 } 473 474 if (!sameint) { 475 for (i=0;i<m+1;i++) dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]); 476 for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]); 477 } else { 478 ierr = PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1);CHKERRQ(ierr); 479 ierr = PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);CHKERRQ(ierr); 480 } 481 ierr = PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);CHKERRQ(ierr); 482 iptr = djj; 483 aptr = da; 484 for (i=0; i<m; i++) { 485 PetscInt nc = dii[i+1]-dii[i]; 486 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 487 iptr += nc; 488 aptr += nc; 489 } 490 if (size > 1) { 491 HYPRE_BigInt *coffd; 492 HYPRE_Int *offdj; 493 494 if (reuse == MAT_INITIAL_MATRIX) { 495 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 496 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 497 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 498 } else if (reuse == MAT_REUSE_MATRIX) { 499 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 500 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 501 PetscBool done; 502 503 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 504 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 505 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 506 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 507 } else { /* MAT_INPLACE_MATRIX */ 508 if (!sameint) { 509 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 510 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 511 } else { 512 oii = (PetscInt*)hypre_CSRMatrixI(hoffd); 513 ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd); 514 } 515 oa = (PetscScalar*)hypre_CSRMatrixData(hoffd); 516 } 517 if (!sameint) { 518 for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]); 519 } else { 520 ierr = PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);CHKERRQ(ierr); 521 } 522 offdj = hypre_CSRMatrixJ(hoffd); 523 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 524 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 525 ierr = PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);CHKERRQ(ierr); 526 iptr = ojj; 527 aptr = oa; 528 for (i=0; i<m; i++) { 529 PetscInt nc = oii[i+1]-oii[i]; 530 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 531 iptr += nc; 532 aptr += nc; 533 } 534 if (reuse == MAT_INITIAL_MATRIX) { 535 Mat_MPIAIJ *b; 536 Mat_SeqAIJ *d,*o; 537 538 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 539 /* hack MPIAIJ */ 540 b = (Mat_MPIAIJ*)((*B)->data); 541 d = (Mat_SeqAIJ*)b->A->data; 542 o = (Mat_SeqAIJ*)b->B->data; 543 d->free_a = PETSC_TRUE; 544 d->free_ij = PETSC_TRUE; 545 o->free_a = PETSC_TRUE; 546 o->free_ij = PETSC_TRUE; 547 } else if (reuse == MAT_INPLACE_MATRIX) { 548 Mat T; 549 550 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr); 551 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 552 hypre_CSRMatrixI(hdiag) = NULL; 553 hypre_CSRMatrixJ(hdiag) = NULL; 554 hypre_CSRMatrixI(hoffd) = NULL; 555 hypre_CSRMatrixJ(hoffd) = NULL; 556 } else { /* Hack MPIAIJ -> free ij but not a */ 557 Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data); 558 Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data); 559 Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data); 560 561 d->free_ij = PETSC_TRUE; 562 o->free_ij = PETSC_TRUE; 563 } 564 hypre_CSRMatrixData(hdiag) = NULL; 565 hypre_CSRMatrixData(hoffd) = NULL; 566 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 567 } 568 } else { 569 oii = NULL; 570 ojj = NULL; 571 oa = NULL; 572 if (reuse == MAT_INITIAL_MATRIX) { 573 Mat_SeqAIJ* b; 574 575 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 576 /* hack SeqAIJ */ 577 b = (Mat_SeqAIJ*)((*B)->data); 578 b->free_a = PETSC_TRUE; 579 b->free_ij = PETSC_TRUE; 580 } else if (reuse == MAT_INPLACE_MATRIX) { 581 Mat T; 582 583 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr); 584 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 585 hypre_CSRMatrixI(hdiag) = NULL; 586 hypre_CSRMatrixJ(hdiag) = NULL; 587 } else { /* free ij but not a */ 588 Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data); 589 590 b->free_ij = PETSC_TRUE; 591 } 592 hypre_CSRMatrixData(hdiag) = NULL; 593 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 594 } 595 } 596 597 /* we have to use hypre_Tfree to free the HYPRE arrays 598 that PETSc now onws */ 599 if (reuse == MAT_INPLACE_MATRIX) { 600 PetscInt nh; 601 void *ptrs[6] = {da,oa,dii,djj,oii,ojj}; 602 const char *names[6] = {"_hypre_csr_da", 603 "_hypre_csr_oa", 604 "_hypre_csr_dii", 605 "_hypre_csr_djj", 606 "_hypre_csr_oii", 607 "_hypre_csr_ojj"}; 608 nh = sameint ? 6 : 2; 609 for (i=0; i<nh; i++) { 610 PetscContainer c; 611 612 ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr); 613 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 614 ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr); 615 ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr); 616 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 617 } 618 } 619 PetscFunctionReturn(0); 620 } 621 622 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 623 { 624 hypre_ParCSRMatrix *tA; 625 hypre_CSRMatrix *hdiag,*hoffd; 626 Mat_SeqAIJ *diag,*offd; 627 PetscInt *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts; 628 MPI_Comm comm = PetscObjectComm((PetscObject)A); 629 PetscBool ismpiaij,isseqaij; 630 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 631 PetscErrorCode ierr; 632 633 PetscFunctionBegin; 634 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 635 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 636 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name); 637 if (ismpiaij) { 638 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 639 640 diag = (Mat_SeqAIJ*)a->A->data; 641 offd = (Mat_SeqAIJ*)a->B->data; 642 garray = a->garray; 643 noffd = a->B->cmap->N; 644 dnnz = diag->nz; 645 onnz = offd->nz; 646 } else { 647 diag = (Mat_SeqAIJ*)A->data; 648 offd = NULL; 649 garray = NULL; 650 noffd = 0; 651 dnnz = diag->nz; 652 onnz = 0; 653 } 654 655 /* create a temporary ParCSR */ 656 if (HYPRE_AssumedPartitionCheck()) { 657 PetscMPIInt myid; 658 659 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 660 row_starts = A->rmap->range + myid; 661 col_starts = A->cmap->range + myid; 662 } else { 663 row_starts = A->rmap->range; 664 col_starts = A->cmap->range; 665 } 666 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz); 667 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 668 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 669 670 /* set diagonal part */ 671 hdiag = hypre_ParCSRMatrixDiag(tA); 672 if (sameint) { /* reuse CSR pointers */ 673 hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i; 674 hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j; 675 } else { /* malloc CSR pointers */ 676 HYPRE_Int *hi,*hj; 677 678 ierr = PetscMalloc2(A->rmap->n+1,&hi,dnnz,&hj);CHKERRQ(ierr); 679 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(diag->i[i]); 680 for (i = 0; i < dnnz; i++) hj[i] = (HYPRE_Int)(diag->j[i]); 681 hypre_CSRMatrixI(hdiag) = hi; 682 hypre_CSRMatrixJ(hdiag) = hj; 683 } 684 hypre_CSRMatrixData(hdiag) = (HYPRE_Complex*)diag->a; 685 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 686 hypre_CSRMatrixSetRownnz(hdiag); 687 hypre_CSRMatrixSetDataOwner(hdiag,0); 688 689 /* set offdiagonal part */ 690 hoffd = hypre_ParCSRMatrixOffd(tA); 691 if (offd) { 692 if (sameint) { /* reuse CSR pointers */ 693 hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i; 694 hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j; 695 } else { /* malloc CSR pointers */ 696 HYPRE_Int *hi,*hj; 697 698 ierr = PetscMalloc2(A->rmap->n+1,&hi,onnz,&hj);CHKERRQ(ierr); 699 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(offd->i[i]); 700 for (i = 0; i < onnz; i++) hj[i] = (HYPRE_Int)(offd->j[i]); 701 hypre_CSRMatrixI(hoffd) = hi; 702 hypre_CSRMatrixJ(hoffd) = hj; 703 } 704 hypre_CSRMatrixData(hoffd) = (HYPRE_Complex*)offd->a; 705 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 706 hypre_CSRMatrixSetRownnz(hoffd); 707 hypre_CSRMatrixSetDataOwner(hoffd,0); 708 hypre_ParCSRMatrixSetNumNonzeros(tA); 709 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray; 710 } 711 *hA = tA; 712 PetscFunctionReturn(0); 713 } 714 715 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 716 { 717 hypre_CSRMatrix *hdiag,*hoffd; 718 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 719 PetscErrorCode ierr; 720 721 PetscFunctionBegin; 722 hdiag = hypre_ParCSRMatrixDiag(*hA); 723 hoffd = hypre_ParCSRMatrixOffd(*hA); 724 /* free temporary memory allocated by PETSc */ 725 if (!sameint) { 726 HYPRE_Int *hi,*hj; 727 728 hi = hypre_CSRMatrixI(hdiag); 729 hj = hypre_CSRMatrixJ(hdiag); 730 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 731 if (hoffd) { 732 hi = hypre_CSRMatrixI(hoffd); 733 hj = hypre_CSRMatrixJ(hoffd); 734 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 735 } 736 } 737 /* set pointers to NULL before destroying tA */ 738 hypre_CSRMatrixI(hdiag) = NULL; 739 hypre_CSRMatrixJ(hdiag) = NULL; 740 hypre_CSRMatrixData(hdiag) = NULL; 741 hypre_CSRMatrixI(hoffd) = NULL; 742 hypre_CSRMatrixJ(hoffd) = NULL; 743 hypre_CSRMatrixData(hoffd) = NULL; 744 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 745 hypre_ParCSRMatrixDestroy(*hA); 746 *hA = NULL; 747 PetscFunctionReturn(0); 748 } 749 750 /* calls RAP from BoomerAMG: 751 the resulting ParCSR will not own the column and row starts 752 It looks like we don't need to have the diagonal entries 753 ordered first in the rows of the diagonal part 754 for boomerAMGBuildCoarseOperator to work */ 755 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 756 { 757 HYPRE_Int P_owns_col_starts,R_owns_row_starts; 758 759 PetscFunctionBegin; 760 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 761 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 762 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP)); 763 PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP)); 764 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 765 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0); 766 hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0); 767 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1); 768 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1); 769 PetscFunctionReturn(0); 770 } 771 772 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C) 773 { 774 Mat B; 775 hypre_ParCSRMatrix *hA,*hP,*hPtAP; 776 PetscErrorCode ierr; 777 778 PetscFunctionBegin; 779 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 780 ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr); 781 ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr); 782 ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 783 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 784 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 785 ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr); 786 PetscFunctionReturn(0); 787 } 788 789 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C) 790 { 791 PetscErrorCode ierr; 792 793 PetscFunctionBegin; 794 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 795 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 796 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 797 PetscFunctionReturn(0); 798 } 799 800 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C) 801 { 802 Mat B; 803 Mat_HYPRE *hP; 804 hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr; 805 HYPRE_Int type; 806 MPI_Comm comm = PetscObjectComm((PetscObject)A); 807 PetscBool ishypre; 808 PetscErrorCode ierr; 809 810 PetscFunctionBegin; 811 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 812 if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 813 hP = (Mat_HYPRE*)P->data; 814 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 815 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 816 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 817 818 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 819 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 820 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 821 822 /* create temporary matrix and merge to C */ 823 ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 824 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 825 PetscFunctionReturn(0); 826 } 827 828 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 829 { 830 PetscErrorCode ierr; 831 832 PetscFunctionBegin; 833 if (scall == MAT_INITIAL_MATRIX) { 834 const char *deft = MATAIJ; 835 char type[256]; 836 PetscBool flg; 837 838 ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr); 839 ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr); 840 ierr = PetscOptionsEnd();CHKERRQ(ierr); 841 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 842 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 843 if (flg) { 844 ierr = MatSetType(*C,type);CHKERRQ(ierr); 845 } else { 846 ierr = MatSetType(*C,deft);CHKERRQ(ierr); 847 } 848 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 849 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 850 } 851 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 852 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 853 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 854 PetscFunctionReturn(0); 855 } 856 857 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C) 858 { 859 Mat B; 860 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 861 Mat_HYPRE *hA,*hP; 862 PetscBool ishypre; 863 HYPRE_Int type; 864 PetscErrorCode ierr; 865 866 PetscFunctionBegin; 867 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 868 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 869 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 870 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 871 hA = (Mat_HYPRE*)A->data; 872 hP = (Mat_HYPRE*)P->data; 873 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 874 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 875 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 876 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 877 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 878 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 879 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 880 ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 881 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 882 PetscFunctionReturn(0); 883 } 884 885 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 886 { 887 PetscErrorCode ierr; 888 889 PetscFunctionBegin; 890 if (scall == MAT_INITIAL_MATRIX) { 891 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 892 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 893 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 894 (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 895 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 896 } 897 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 898 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 899 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 900 PetscFunctionReturn(0); 901 } 902 903 /* calls hypre_ParMatmul 904 hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA 905 hypre_ParMatrixCreate does not duplicate the communicator 906 It looks like we don't need to have the diagonal entries 907 ordered first in the rows of the diagonal part 908 for boomerAMGBuildCoarseOperator to work */ 909 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB) 910 { 911 PetscFunctionBegin; 912 PetscStackPush("hypre_ParMatmul"); 913 *hAB = hypre_ParMatmul(hA,hB); 914 PetscStackPop; 915 PetscFunctionReturn(0); 916 } 917 918 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C) 919 { 920 Mat D; 921 hypre_ParCSRMatrix *hA,*hB,*hAB = NULL; 922 PetscErrorCode ierr; 923 924 PetscFunctionBegin; 925 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 926 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 927 ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr); 928 ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 929 ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr); 930 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 931 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 932 PetscFunctionReturn(0); 933 } 934 935 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C) 936 { 937 PetscErrorCode ierr; 938 939 PetscFunctionBegin; 940 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 941 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 942 (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 943 PetscFunctionReturn(0); 944 } 945 946 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C) 947 { 948 Mat D; 949 hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL; 950 Mat_HYPRE *hA,*hB; 951 PetscBool ishypre; 952 HYPRE_Int type; 953 PetscErrorCode ierr; 954 955 PetscFunctionBegin; 956 ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr); 957 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE); 958 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 959 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 960 hA = (Mat_HYPRE*)A->data; 961 hB = (Mat_HYPRE*)B->data; 962 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 963 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 964 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type)); 965 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 966 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 967 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr)); 968 ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr); 969 ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 970 /* need to use HeaderReplace because HeaderMerge messes up with the communicator */ 971 ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr); 972 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 973 PetscFunctionReturn(0); 974 } 975 976 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 977 { 978 PetscErrorCode ierr; 979 980 PetscFunctionBegin; 981 if (scall == MAT_INITIAL_MATRIX) { 982 ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 983 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 984 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 985 (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 986 ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 987 } 988 ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 989 ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr); 990 ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 991 PetscFunctionReturn(0); 992 } 993 994 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D) 995 { 996 Mat E; 997 hypre_ParCSRMatrix *hA,*hB,*hC,*hABC; 998 PetscErrorCode ierr; 999 1000 PetscFunctionBegin; 1001 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 1002 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 1003 ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr); 1004 ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr); 1005 ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr); 1006 ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr); 1007 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 1008 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 1009 ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr); 1010 PetscFunctionReturn(0); 1011 } 1012 1013 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D) 1014 { 1015 PetscErrorCode ierr; 1016 1017 PetscFunctionBegin; 1018 ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr); 1019 ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr); 1020 PetscFunctionReturn(0); 1021 } 1022 1023 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 1024 { 1025 PetscErrorCode ierr; 1026 1027 PetscFunctionBegin; 1028 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);CHKERRQ(ierr); 1029 PetscFunctionReturn(0); 1030 } 1031 1032 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 1033 { 1034 PetscErrorCode ierr; 1035 1036 PetscFunctionBegin; 1037 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);CHKERRQ(ierr); 1038 PetscFunctionReturn(0); 1039 } 1040 1041 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1042 { 1043 PetscErrorCode ierr; 1044 1045 PetscFunctionBegin; 1046 if (y != z) { 1047 ierr = VecCopy(y,z);CHKERRQ(ierr); 1048 } 1049 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);CHKERRQ(ierr); 1050 PetscFunctionReturn(0); 1051 } 1052 1053 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1054 { 1055 PetscErrorCode ierr; 1056 1057 PetscFunctionBegin; 1058 if (y != z) { 1059 ierr = VecCopy(y,z);CHKERRQ(ierr); 1060 } 1061 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);CHKERRQ(ierr); 1062 PetscFunctionReturn(0); 1063 } 1064 1065 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */ 1066 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans) 1067 { 1068 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1069 hypre_ParCSRMatrix *parcsr; 1070 hypre_ParVector *hx,*hy; 1071 HYPRE_Complex *ax,*ay,*sax,*say; 1072 PetscErrorCode ierr; 1073 1074 PetscFunctionBegin; 1075 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 1076 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 1077 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 1078 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1079 ierr = VecGetArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1080 if (trans) { 1081 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 1082 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 1083 hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx); 1084 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 1085 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 1086 } else { 1087 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 1088 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 1089 hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy); 1090 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 1091 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 1092 } 1093 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1094 ierr = VecRestoreArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1095 PetscFunctionReturn(0); 1096 } 1097 1098 static PetscErrorCode MatDestroy_HYPRE(Mat A) 1099 { 1100 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1101 PetscErrorCode ierr; 1102 1103 PetscFunctionBegin; 1104 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 1105 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 1106 if (hA->ij) { 1107 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 1108 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 1109 } 1110 if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); } 1111 1112 ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr); 1113 1114 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1115 1116 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 1117 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 1118 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr); 1119 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 1120 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr); 1121 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr); 1122 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);CHKERRQ(ierr); 1123 ierr = PetscFree(A->data);CHKERRQ(ierr); 1124 PetscFunctionReturn(0); 1125 } 1126 1127 static PetscErrorCode MatSetUp_HYPRE(Mat A) 1128 { 1129 PetscErrorCode ierr; 1130 1131 PetscFunctionBegin; 1132 ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr); 1133 PetscFunctionReturn(0); 1134 } 1135 1136 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 1137 { 1138 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1139 Vec x,b; 1140 PetscMPIInt n; 1141 PetscInt i,j,rstart,ncols,flg; 1142 PetscInt *row,*col; 1143 PetscScalar *val; 1144 PetscErrorCode ierr; 1145 1146 PetscFunctionBegin; 1147 if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 1148 1149 if (!A->nooffprocentries) { 1150 while (1) { 1151 ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 1152 if (!flg) break; 1153 1154 for (i=0; i<n; ) { 1155 /* Now identify the consecutive vals belonging to the same row */ 1156 for (j=i,rstart=row[j]; j<n; j++) { 1157 if (row[j] != rstart) break; 1158 } 1159 if (j < n) ncols = j-i; 1160 else ncols = n-i; 1161 /* Now assemble all these values with a single function call */ 1162 ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr); 1163 1164 i = j; 1165 } 1166 } 1167 ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr); 1168 } 1169 1170 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 1171 /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitalize */ 1172 { 1173 hypre_AuxParCSRMatrix *aux_matrix; 1174 1175 /* call destroy just to make sure we do not leak anything */ 1176 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1177 PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix)); 1178 hypre_IJMatrixTranslator(hA->ij) = NULL; 1179 1180 /* Initialize with assembled flag -> it only recreates the aux_par_matrix */ 1181 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1182 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1183 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */ 1184 PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix)); 1185 } 1186 if (hA->x) PetscFunctionReturn(0); 1187 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1188 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1189 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 1190 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 1191 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 1192 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 1193 ierr = VecDestroy(&x);CHKERRQ(ierr); 1194 ierr = VecDestroy(&b);CHKERRQ(ierr); 1195 PetscFunctionReturn(0); 1196 } 1197 1198 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array) 1199 { 1200 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1201 PetscErrorCode ierr; 1202 1203 PetscFunctionBegin; 1204 if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use"); 1205 1206 if (hA->size >= size) { 1207 *array = hA->array; 1208 } else { 1209 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1210 hA->size = size; 1211 ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr); 1212 *array = hA->array; 1213 } 1214 1215 hA->available = PETSC_FALSE; 1216 PetscFunctionReturn(0); 1217 } 1218 1219 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array) 1220 { 1221 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1222 1223 PetscFunctionBegin; 1224 *array = NULL; 1225 hA->available = PETSC_TRUE; 1226 PetscFunctionReturn(0); 1227 } 1228 1229 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 1230 { 1231 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1232 PetscScalar *vals = (PetscScalar *)v; 1233 HYPRE_Complex *sscr; 1234 PetscInt *cscr[2]; 1235 PetscInt i,nzc; 1236 void *array = NULL; 1237 PetscErrorCode ierr; 1238 1239 PetscFunctionBegin; 1240 ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);CHKERRQ(ierr); 1241 cscr[0] = (PetscInt*)array; 1242 cscr[1] = ((PetscInt*)array)+nc; 1243 sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2); 1244 for (i=0,nzc=0;i<nc;i++) { 1245 if (cols[i] >= 0) { 1246 cscr[0][nzc ] = cols[i]; 1247 cscr[1][nzc++] = i; 1248 } 1249 } 1250 if (!nzc) { 1251 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1252 PetscFunctionReturn(0); 1253 } 1254 1255 if (ins == ADD_VALUES) { 1256 for (i=0;i<nr;i++) { 1257 if (rows[i] >= 0 && nzc) { 1258 PetscInt j; 1259 HYPRE_Int hnc = (HYPRE_Int)nzc; 1260 1261 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1262 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); } 1263 PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1264 } 1265 vals += nc; 1266 } 1267 } else { /* INSERT_VALUES */ 1268 PetscInt rst,ren; 1269 1270 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1271 for (i=0;i<nr;i++) { 1272 if (rows[i] >= 0 && nzc) { 1273 PetscInt j; 1274 HYPRE_Int hnc = (HYPRE_Int)nzc; 1275 1276 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1277 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); } 1278 /* nonlocal values */ 1279 if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE);CHKERRQ(ierr); } 1280 /* local values */ 1281 else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1282 } 1283 vals += nc; 1284 } 1285 } 1286 1287 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1288 PetscFunctionReturn(0); 1289 } 1290 1291 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1292 { 1293 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1294 HYPRE_Int *hdnnz,*honnz; 1295 PetscInt i,rs,re,cs,ce,bs; 1296 PetscMPIInt size; 1297 PetscErrorCode ierr; 1298 1299 PetscFunctionBegin; 1300 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1301 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1302 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1303 rs = A->rmap->rstart; 1304 re = A->rmap->rend; 1305 cs = A->cmap->rstart; 1306 ce = A->cmap->rend; 1307 if (!hA->ij) { 1308 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij)); 1309 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1310 } else { 1311 HYPRE_BigInt hrs,hre,hcs,hce; 1312 PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce)); 1313 if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re); 1314 if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce); 1315 } 1316 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs; 1317 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs; 1318 1319 if (!dnnz) { 1320 ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr); 1321 for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz; 1322 } else { 1323 hdnnz = (HYPRE_Int*)dnnz; 1324 } 1325 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 1326 if (size > 1) { 1327 hypre_AuxParCSRMatrix *aux_matrix; 1328 if (!onnz) { 1329 ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr); 1330 for (i=0;i<A->rmap->n;i++) honnz[i] = onz; 1331 } else { 1332 honnz = (HYPRE_Int*)onnz; 1333 } 1334 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems 1335 they assume the user will input the entire row values, properly sorted 1336 In PETSc, we don't make such an assumption, and we instead set this flag to 1 1337 Also, to avoid possible memory leaks, we destroy and recreate the translator 1338 This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize 1339 the IJ matrix for us */ 1340 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1341 hypre_AuxParCSRMatrixDestroy(aux_matrix); 1342 hypre_IJMatrixTranslator(hA->ij) = NULL; 1343 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz)); 1344 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1345 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 1346 } else { 1347 honnz = NULL; 1348 PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz)); 1349 } 1350 1351 /* reset assembled flag and call the initialize method */ 1352 hypre_IJMatrixAssembleFlag(hA->ij) = 0; 1353 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1354 if (!dnnz) { 1355 ierr = PetscFree(hdnnz);CHKERRQ(ierr); 1356 } 1357 if (!onnz && honnz) { 1358 ierr = PetscFree(honnz);CHKERRQ(ierr); 1359 } 1360 1361 /* Match AIJ logic */ 1362 A->preallocated = PETSC_TRUE; 1363 A->assembled = PETSC_FALSE; 1364 PetscFunctionReturn(0); 1365 } 1366 1367 /*@C 1368 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1369 1370 Collective on Mat 1371 1372 Input Parameters: 1373 + A - the matrix 1374 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1375 (same value is used for all local rows) 1376 . dnnz - array containing the number of nonzeros in the various rows of the 1377 DIAGONAL portion of the local submatrix (possibly different for each row) 1378 or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure. 1379 The size of this array is equal to the number of local rows, i.e 'm'. 1380 For matrices that will be factored, you must leave room for (and set) 1381 the diagonal entry even if it is zero. 1382 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1383 submatrix (same value is used for all local rows). 1384 - onnz - array containing the number of nonzeros in the various rows of the 1385 OFF-DIAGONAL portion of the local submatrix (possibly different for 1386 each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero 1387 structure. The size of this array is equal to the number 1388 of local rows, i.e 'm'. 1389 1390 Notes: 1391 If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored. 1392 1393 Level: intermediate 1394 1395 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE 1396 @*/ 1397 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1398 { 1399 PetscErrorCode ierr; 1400 1401 PetscFunctionBegin; 1402 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1403 PetscValidType(A,1); 1404 ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr); 1405 PetscFunctionReturn(0); 1406 } 1407 1408 /* 1409 MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix 1410 1411 Collective 1412 1413 Input Parameters: 1414 + parcsr - the pointer to the hypre_ParCSRMatrix 1415 . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported. 1416 - copymode - PETSc copying options 1417 1418 Output Parameter: 1419 . A - the matrix 1420 1421 Level: intermediate 1422 1423 .seealso: MatHYPRE, PetscCopyMode 1424 */ 1425 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A) 1426 { 1427 Mat T; 1428 Mat_HYPRE *hA; 1429 MPI_Comm comm; 1430 PetscInt rstart,rend,cstart,cend,M,N; 1431 PetscBool isseqaij,ismpiaij,isaij,ishyp,isis; 1432 PetscErrorCode ierr; 1433 1434 PetscFunctionBegin; 1435 comm = hypre_ParCSRMatrixComm(parcsr); 1436 ierr = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1437 ierr = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 1438 ierr = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr); 1439 ierr = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr); 1440 ierr = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr); 1441 isaij = (PetscBool)(isseqaij || ismpiaij || isaij); 1442 if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE); 1443 /* access ParCSRMatrix */ 1444 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1445 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1446 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1447 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1448 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1449 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1450 1451 /* fix for empty local rows/columns */ 1452 if (rend < rstart) rend = rstart; 1453 if (cend < cstart) cend = cstart; 1454 1455 /* PETSc convention */ 1456 rend++; 1457 cend++; 1458 rend = PetscMin(rend,M); 1459 cend = PetscMin(cend,N); 1460 1461 /* create PETSc matrix with MatHYPRE */ 1462 ierr = MatCreate(comm,&T);CHKERRQ(ierr); 1463 ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr); 1464 ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr); 1465 hA = (Mat_HYPRE*)(T->data); 1466 1467 /* create HYPRE_IJMatrix */ 1468 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 1469 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1470 1471 /* create new ParCSR object if needed */ 1472 if (ishyp && copymode == PETSC_COPY_VALUES) { 1473 hypre_ParCSRMatrix *new_parcsr; 1474 hypre_CSRMatrix *hdiag,*hoffd,*ndiag,*noffd; 1475 1476 new_parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1477 hdiag = hypre_ParCSRMatrixDiag(parcsr); 1478 hoffd = hypre_ParCSRMatrixOffd(parcsr); 1479 ndiag = hypre_ParCSRMatrixDiag(new_parcsr); 1480 noffd = hypre_ParCSRMatrixOffd(new_parcsr); 1481 ierr = PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));CHKERRQ(ierr); 1482 ierr = PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));CHKERRQ(ierr); 1483 parcsr = new_parcsr; 1484 copymode = PETSC_OWN_POINTER; 1485 } 1486 1487 /* set ParCSR object */ 1488 hypre_IJMatrixObject(hA->ij) = parcsr; 1489 T->preallocated = PETSC_TRUE; 1490 1491 /* set assembled flag */ 1492 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1493 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1494 if (ishyp) { 1495 PetscMPIInt myid = 0; 1496 1497 /* make sure we always have row_starts and col_starts available */ 1498 if (HYPRE_AssumedPartitionCheck()) { 1499 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 1500 } 1501 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1502 PetscLayout map; 1503 1504 ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr); 1505 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1506 hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1507 } 1508 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1509 PetscLayout map; 1510 1511 ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr); 1512 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1513 hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1514 } 1515 /* prevent from freeing the pointer */ 1516 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1517 *A = T; 1518 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1519 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1520 } else if (isaij) { 1521 if (copymode != PETSC_OWN_POINTER) { 1522 /* prevent from freeing the pointer */ 1523 hA->inner_free = PETSC_FALSE; 1524 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1525 ierr = MatDestroy(&T);CHKERRQ(ierr); 1526 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1527 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr); 1528 *A = T; 1529 } 1530 } else if (isis) { 1531 ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1532 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1533 ierr = MatDestroy(&T);CHKERRQ(ierr); 1534 } 1535 PetscFunctionReturn(0); 1536 } 1537 1538 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1539 { 1540 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1541 HYPRE_Int type; 1542 1543 PetscFunctionBegin; 1544 if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present"); 1545 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 1546 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1547 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr)); 1548 PetscFunctionReturn(0); 1549 } 1550 1551 /* 1552 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1553 1554 Not collective 1555 1556 Input Parameters: 1557 + A - the MATHYPRE object 1558 1559 Output Parameter: 1560 . parcsr - the pointer to the hypre_ParCSRMatrix 1561 1562 Level: intermediate 1563 1564 .seealso: MatHYPRE, PetscCopyMode 1565 */ 1566 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1567 { 1568 PetscErrorCode ierr; 1569 1570 PetscFunctionBegin; 1571 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1572 PetscValidType(A,1); 1573 ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr); 1574 PetscFunctionReturn(0); 1575 } 1576 1577 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd) 1578 { 1579 hypre_ParCSRMatrix *parcsr; 1580 hypre_CSRMatrix *ha; 1581 PetscInt rst; 1582 PetscErrorCode ierr; 1583 1584 PetscFunctionBegin; 1585 if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks"); 1586 ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr); 1587 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1588 if (missing) *missing = PETSC_FALSE; 1589 if (dd) *dd = -1; 1590 ha = hypre_ParCSRMatrixDiag(parcsr); 1591 if (ha) { 1592 PetscInt size,i; 1593 HYPRE_Int *ii,*jj; 1594 1595 size = hypre_CSRMatrixNumRows(ha); 1596 ii = hypre_CSRMatrixI(ha); 1597 jj = hypre_CSRMatrixJ(ha); 1598 for (i = 0; i < size; i++) { 1599 PetscInt j; 1600 PetscBool found = PETSC_FALSE; 1601 1602 for (j = ii[i]; j < ii[i+1] && !found; j++) 1603 found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE; 1604 1605 if (!found) { 1606 PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i); 1607 if (missing) *missing = PETSC_TRUE; 1608 if (dd) *dd = i+rst; 1609 PetscFunctionReturn(0); 1610 } 1611 } 1612 if (!size) { 1613 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1614 if (missing) *missing = PETSC_TRUE; 1615 if (dd) *dd = rst; 1616 } 1617 } else { 1618 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1619 if (missing) *missing = PETSC_TRUE; 1620 if (dd) *dd = rst; 1621 } 1622 PetscFunctionReturn(0); 1623 } 1624 1625 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s) 1626 { 1627 hypre_ParCSRMatrix *parcsr; 1628 hypre_CSRMatrix *ha; 1629 PetscErrorCode ierr; 1630 HYPRE_Complex hs; 1631 1632 PetscFunctionBegin; 1633 ierr = PetscHYPREScalarCast(s,&hs);CHKERRQ(ierr); 1634 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1635 /* diagonal part */ 1636 ha = hypre_ParCSRMatrixDiag(parcsr); 1637 if (ha) { 1638 PetscInt size,i; 1639 HYPRE_Int *ii; 1640 HYPRE_Complex *a; 1641 1642 size = hypre_CSRMatrixNumRows(ha); 1643 a = hypre_CSRMatrixData(ha); 1644 ii = hypre_CSRMatrixI(ha); 1645 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1646 } 1647 /* offdiagonal part */ 1648 ha = hypre_ParCSRMatrixOffd(parcsr); 1649 if (ha) { 1650 PetscInt size,i; 1651 HYPRE_Int *ii; 1652 HYPRE_Complex *a; 1653 1654 size = hypre_CSRMatrixNumRows(ha); 1655 a = hypre_CSRMatrixData(ha); 1656 ii = hypre_CSRMatrixI(ha); 1657 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1658 } 1659 PetscFunctionReturn(0); 1660 } 1661 1662 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 1663 { 1664 hypre_ParCSRMatrix *parcsr; 1665 HYPRE_Int *lrows; 1666 PetscInt rst,ren,i; 1667 PetscErrorCode ierr; 1668 1669 PetscFunctionBegin; 1670 if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented"); 1671 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1672 ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr); 1673 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1674 for (i=0;i<numRows;i++) { 1675 if (rows[i] < rst || rows[i] >= ren) 1676 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported"); 1677 lrows[i] = rows[i] - rst; 1678 } 1679 PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows)); 1680 ierr = PetscFree(lrows);CHKERRQ(ierr); 1681 PetscFunctionReturn(0); 1682 } 1683 1684 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha) 1685 { 1686 PetscErrorCode ierr; 1687 1688 PetscFunctionBegin; 1689 if (ha) { 1690 HYPRE_Int *ii, size; 1691 HYPRE_Complex *a; 1692 1693 size = hypre_CSRMatrixNumRows(ha); 1694 a = hypre_CSRMatrixData(ha); 1695 ii = hypre_CSRMatrixI(ha); 1696 1697 if (a) {ierr = PetscArrayzero(a,ii[size]);CHKERRQ(ierr);} 1698 } 1699 PetscFunctionReturn(0); 1700 } 1701 1702 PetscErrorCode MatZeroEntries_HYPRE(Mat A) 1703 { 1704 hypre_ParCSRMatrix *parcsr; 1705 PetscErrorCode ierr; 1706 1707 PetscFunctionBegin; 1708 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1709 /* diagonal part */ 1710 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr); 1711 /* off-diagonal part */ 1712 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr); 1713 PetscFunctionReturn(0); 1714 } 1715 1716 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag) 1717 { 1718 PetscInt ii; 1719 HYPRE_Int *i, *j; 1720 HYPRE_Complex *a; 1721 1722 PetscFunctionBegin; 1723 if (!hA) PetscFunctionReturn(0); 1724 1725 i = hypre_CSRMatrixI(hA); 1726 j = hypre_CSRMatrixJ(hA); 1727 a = hypre_CSRMatrixData(hA); 1728 1729 for (ii = 0; ii < N; ii++) { 1730 HYPRE_Int jj, ibeg, iend, irow; 1731 1732 irow = rows[ii]; 1733 ibeg = i[irow]; 1734 iend = i[irow+1]; 1735 for (jj = ibeg; jj < iend; jj++) 1736 if (j[jj] == irow) a[jj] = diag; 1737 else a[jj] = 0.0; 1738 } 1739 PetscFunctionReturn(0); 1740 } 1741 1742 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1743 { 1744 hypre_ParCSRMatrix *parcsr; 1745 PetscInt *lrows,len; 1746 HYPRE_Complex hdiag; 1747 PetscErrorCode ierr; 1748 1749 PetscFunctionBegin; 1750 if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size"); 1751 ierr = PetscHYPREScalarCast(diag,&hdiag);CHKERRQ(ierr); 1752 /* retrieve the internal matrix */ 1753 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1754 /* get locally owned rows */ 1755 ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr); 1756 /* zero diagonal part */ 1757 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);CHKERRQ(ierr); 1758 /* zero off-diagonal part */ 1759 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr); 1760 1761 ierr = PetscFree(lrows);CHKERRQ(ierr); 1762 PetscFunctionReturn(0); 1763 } 1764 1765 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode) 1766 { 1767 PetscErrorCode ierr; 1768 1769 PetscFunctionBegin; 1770 if (mat->nooffprocentries) PetscFunctionReturn(0); 1771 1772 ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 1773 PetscFunctionReturn(0); 1774 } 1775 1776 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1777 { 1778 hypre_ParCSRMatrix *parcsr; 1779 HYPRE_Int hnz; 1780 PetscErrorCode ierr; 1781 1782 PetscFunctionBegin; 1783 /* retrieve the internal matrix */ 1784 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1785 /* call HYPRE API */ 1786 PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1787 if (nz) *nz = (PetscInt)hnz; 1788 PetscFunctionReturn(0); 1789 } 1790 1791 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1792 { 1793 hypre_ParCSRMatrix *parcsr; 1794 HYPRE_Int hnz; 1795 PetscErrorCode ierr; 1796 1797 PetscFunctionBegin; 1798 /* retrieve the internal matrix */ 1799 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1800 /* call HYPRE API */ 1801 hnz = nz ? (HYPRE_Int)(*nz) : 0; 1802 PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1803 PetscFunctionReturn(0); 1804 } 1805 1806 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 1807 { 1808 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1809 PetscInt i; 1810 1811 PetscFunctionBegin; 1812 if (!m || !n) PetscFunctionReturn(0); 1813 /* Ignore negative row indices 1814 * And negative column indices should be automatically ignored in hypre 1815 * */ 1816 for (i=0; i<m; i++) { 1817 if (idxm[i] >= 0) { 1818 HYPRE_Int hn = (HYPRE_Int)n; 1819 PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n))); 1820 } 1821 } 1822 PetscFunctionReturn(0); 1823 } 1824 1825 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg) 1826 { 1827 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1828 1829 PetscFunctionBegin; 1830 switch (op) { 1831 case MAT_NO_OFF_PROC_ENTRIES: 1832 if (flg) { 1833 PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0)); 1834 } 1835 break; 1836 default: 1837 break; 1838 } 1839 PetscFunctionReturn(0); 1840 } 1841 1842 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view) 1843 { 1844 hypre_ParCSRMatrix *parcsr; 1845 PetscErrorCode ierr; 1846 Mat B; 1847 PetscViewerFormat format; 1848 PetscErrorCode (*mview)(Mat,PetscViewer) = NULL; 1849 1850 PetscFunctionBegin; 1851 ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr); 1852 if (format != PETSC_VIEWER_NATIVE) { 1853 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1854 ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr); 1855 ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr); 1856 if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");CHKERRQ(ierr); 1857 ierr = (*mview)(B,view);CHKERRQ(ierr); 1858 ierr = MatDestroy(&B);CHKERRQ(ierr); 1859 } else { 1860 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1861 PetscMPIInt size; 1862 PetscBool isascii; 1863 const char *filename; 1864 1865 /* HYPRE uses only text files */ 1866 ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 1867 if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name); 1868 ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr); 1869 PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename)); 1870 ierr = MPI_Comm_size(hA->comm,&size);CHKERRQ(ierr); 1871 if (size > 1) { 1872 ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr); 1873 } else { 1874 ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr); 1875 } 1876 } 1877 PetscFunctionReturn(0); 1878 } 1879 1880 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B) 1881 { 1882 hypre_ParCSRMatrix *parcsr; 1883 PetscErrorCode ierr; 1884 PetscCopyMode cpmode; 1885 1886 PetscFunctionBegin; 1887 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1888 if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) { 1889 parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1890 cpmode = PETSC_OWN_POINTER; 1891 } else { 1892 cpmode = PETSC_COPY_VALUES; 1893 } 1894 ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr); 1895 PetscFunctionReturn(0); 1896 } 1897 1898 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str) 1899 { 1900 hypre_ParCSRMatrix *acsr,*bcsr; 1901 PetscErrorCode ierr; 1902 1903 PetscFunctionBegin; 1904 if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) { 1905 ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr); 1906 ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr); 1907 PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1)); 1908 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1909 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1910 } else { 1911 ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1912 } 1913 PetscFunctionReturn(0); 1914 } 1915 1916 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d) 1917 { 1918 hypre_ParCSRMatrix *parcsr; 1919 hypre_CSRMatrix *dmat; 1920 HYPRE_Complex *a; 1921 HYPRE_Complex *data = NULL; 1922 HYPRE_Int *diag = NULL; 1923 PetscInt i; 1924 PetscBool cong; 1925 PetscErrorCode ierr; 1926 1927 PetscFunctionBegin; 1928 ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr); 1929 if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns"); 1930 #if defined(PETSC_USE_DEBUG) 1931 { 1932 PetscBool miss; 1933 ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr); 1934 if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing"); 1935 } 1936 #endif 1937 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1938 dmat = hypre_ParCSRMatrixDiag(parcsr); 1939 if (dmat) { 1940 /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 1941 ierr = VecGetArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 1942 diag = hypre_CSRMatrixI(dmat); 1943 data = hypre_CSRMatrixData(dmat); 1944 for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]]; 1945 ierr = VecRestoreArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 1946 } 1947 PetscFunctionReturn(0); 1948 } 1949 1950 #include <petscblaslapack.h> 1951 1952 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str) 1953 { 1954 PetscErrorCode ierr; 1955 1956 PetscFunctionBegin; 1957 if (str == SAME_NONZERO_PATTERN) { 1958 hypre_ParCSRMatrix *x,*y; 1959 hypre_CSRMatrix *xloc,*yloc; 1960 PetscInt xnnz,ynnz; 1961 HYPRE_Complex *xarr,*yarr; 1962 PetscBLASInt one=1,bnz; 1963 1964 ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr); 1965 ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr); 1966 1967 /* diagonal block */ 1968 xloc = hypre_ParCSRMatrixDiag(x); 1969 yloc = hypre_ParCSRMatrixDiag(y); 1970 xnnz = 0; 1971 ynnz = 0; 1972 xarr = NULL; 1973 yarr = NULL; 1974 if (xloc) { 1975 xarr = hypre_CSRMatrixData(xloc); 1976 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 1977 } 1978 if (yloc) { 1979 yarr = hypre_CSRMatrixData(yloc); 1980 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 1981 } 1982 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz); 1983 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 1984 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 1985 1986 /* off-diagonal block */ 1987 xloc = hypre_ParCSRMatrixOffd(x); 1988 yloc = hypre_ParCSRMatrixOffd(y); 1989 xnnz = 0; 1990 ynnz = 0; 1991 xarr = NULL; 1992 yarr = NULL; 1993 if (xloc) { 1994 xarr = hypre_CSRMatrixData(xloc); 1995 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 1996 } 1997 if (yloc) { 1998 yarr = hypre_CSRMatrixData(yloc); 1999 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2000 } 2001 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz); 2002 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 2003 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 2004 } else if (str == SUBSET_NONZERO_PATTERN) { 2005 ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr); 2006 } else { 2007 Mat B; 2008 2009 ierr = MatAXPY_Basic_Preallocate(Y,X,&B);CHKERRQ(ierr); 2010 ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr); 2011 ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr); 2012 } 2013 PetscFunctionReturn(0); 2014 } 2015 2016 /*MC 2017 MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices 2018 based on the hypre IJ interface. 2019 2020 Level: intermediate 2021 2022 .seealso: MatCreate() 2023 M*/ 2024 2025 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 2026 { 2027 Mat_HYPRE *hB; 2028 PetscErrorCode ierr; 2029 2030 PetscFunctionBegin; 2031 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 2032 hB->inner_free = PETSC_TRUE; 2033 hB->available = PETSC_TRUE; 2034 hB->size = 0; 2035 hB->array = NULL; 2036 2037 B->data = (void*)hB; 2038 B->rmap->bs = 1; 2039 B->assembled = PETSC_FALSE; 2040 2041 ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2042 B->ops->mult = MatMult_HYPRE; 2043 B->ops->multtranspose = MatMultTranspose_HYPRE; 2044 B->ops->multadd = MatMultAdd_HYPRE; 2045 B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE; 2046 B->ops->setup = MatSetUp_HYPRE; 2047 B->ops->destroy = MatDestroy_HYPRE; 2048 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 2049 B->ops->assemblybegin = MatAssemblyBegin_HYPRE; 2050 B->ops->ptap = MatPtAP_HYPRE_HYPRE; 2051 B->ops->matmult = MatMatMult_HYPRE_HYPRE; 2052 B->ops->setvalues = MatSetValues_HYPRE; 2053 B->ops->missingdiagonal = MatMissingDiagonal_HYPRE; 2054 B->ops->scale = MatScale_HYPRE; 2055 B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE; 2056 B->ops->zeroentries = MatZeroEntries_HYPRE; 2057 B->ops->zerorows = MatZeroRows_HYPRE; 2058 B->ops->getrow = MatGetRow_HYPRE; 2059 B->ops->restorerow = MatRestoreRow_HYPRE; 2060 B->ops->getvalues = MatGetValues_HYPRE; 2061 B->ops->setoption = MatSetOption_HYPRE; 2062 B->ops->duplicate = MatDuplicate_HYPRE; 2063 B->ops->copy = MatCopy_HYPRE; 2064 B->ops->view = MatView_HYPRE; 2065 B->ops->getdiagonal = MatGetDiagonal_HYPRE; 2066 B->ops->axpy = MatAXPY_HYPRE; 2067 2068 /* build cache for off array entries formed */ 2069 ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr); 2070 2071 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr); 2072 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 2073 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 2074 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 2075 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 2076 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 2077 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr); 2078 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr); 2079 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);CHKERRQ(ierr); 2080 PetscFunctionReturn(0); 2081 } 2082 2083 static PetscErrorCode hypre_array_destroy(void *ptr) 2084 { 2085 PetscFunctionBegin; 2086 hypre_TFree(ptr,HYPRE_MEMORY_HOST); 2087 PetscFunctionReturn(0); 2088 } 2089