1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 #include <petscpkg_version.h> 7 #include <petsc/private/petschypre.h> 8 #include <petscmathypre.h> 9 #include <petsc/private/matimpl.h> 10 #include <../src/mat/impls/hypre/mhypre.h> 11 #include <../src/mat/impls/aij/mpi/mpiaij.h> 12 #include <../src/vec/vec/impls/hypre/vhyp.h> 13 #include <HYPRE.h> 14 #include <HYPRE_utilities.h> 15 #include <_hypre_parcsr_ls.h> 16 #include <_hypre_sstruct_ls.h> 17 18 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0) 19 #define hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A) 20 #endif 21 22 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 23 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 24 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 25 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 26 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool); 27 static PetscErrorCode hypre_array_destroy(void*); 28 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins); 29 30 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 31 { 32 PetscErrorCode ierr; 33 PetscInt i,n_d,n_o; 34 const PetscInt *ia_d,*ia_o; 35 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 36 HYPRE_Int *nnz_d=NULL,*nnz_o=NULL; 37 38 PetscFunctionBegin; 39 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 40 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 41 if (done_d) { 42 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 43 for (i=0; i<n_d; i++) { 44 nnz_d[i] = ia_d[i+1] - ia_d[i]; 45 } 46 } 47 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 48 } 49 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 50 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 51 if (done_o) { 52 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 53 for (i=0; i<n_o; i++) { 54 nnz_o[i] = ia_o[i+1] - ia_o[i]; 55 } 56 } 57 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 58 } 59 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 60 if (!done_o) { /* only diagonal part */ 61 ierr = PetscCalloc1(n_d,&nnz_o);CHKERRQ(ierr); 62 } 63 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 64 { /* If we don't do this, the columns of the matrix will be all zeros! */ 65 hypre_AuxParCSRMatrix *aux_matrix; 66 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 67 hypre_AuxParCSRMatrixDestroy(aux_matrix); 68 hypre_IJMatrixTranslator(ij) = NULL; 69 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 70 /* it seems they partially fixed it in 2.19.0 */ 71 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0) 72 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 73 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 74 #endif 75 } 76 #else 77 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 78 #endif 79 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 80 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 81 } 82 PetscFunctionReturn(0); 83 } 84 85 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 86 { 87 PetscErrorCode ierr; 88 PetscInt rstart,rend,cstart,cend; 89 90 PetscFunctionBegin; 91 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 92 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 93 rstart = A->rmap->rstart; 94 rend = A->rmap->rend; 95 cstart = A->cmap->rstart; 96 cend = A->cmap->rend; 97 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 98 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 99 { 100 PetscBool same; 101 Mat A_d,A_o; 102 const PetscInt *colmap; 103 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 104 if (same) { 105 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 106 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 107 PetscFunctionReturn(0); 108 } 109 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 110 if (same) { 111 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 112 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 113 PetscFunctionReturn(0); 114 } 115 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 116 if (same) { 117 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 118 PetscFunctionReturn(0); 119 } 120 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 121 if (same) { 122 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 } 126 PetscFunctionReturn(0); 127 } 128 129 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 130 { 131 PetscErrorCode ierr; 132 PetscInt i,rstart,rend,ncols,nr,nc; 133 const PetscScalar *values; 134 const PetscInt *cols; 135 PetscBool flg; 136 137 PetscFunctionBegin; 138 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 139 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 140 if (flg && nr == nc) { 141 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 142 PetscFunctionReturn(0); 143 } 144 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 145 if (flg) { 146 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 147 PetscFunctionReturn(0); 148 } 149 150 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 151 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 152 for (i=rstart; i<rend; i++) { 153 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 154 if (ncols) { 155 HYPRE_Int nc = (HYPRE_Int)ncols; 156 157 if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i); 158 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values)); 159 } 160 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 161 } 162 PetscFunctionReturn(0); 163 } 164 165 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 166 { 167 PetscErrorCode ierr; 168 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 169 HYPRE_Int type; 170 hypre_ParCSRMatrix *par_matrix; 171 hypre_AuxParCSRMatrix *aux_matrix; 172 hypre_CSRMatrix *hdiag; 173 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 174 175 PetscFunctionBegin; 176 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 177 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 178 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 179 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 180 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 181 /* 182 this is the Hack part where we monkey directly with the hypre datastructures 183 */ 184 if (sameint) { 185 ierr = PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);CHKERRQ(ierr); 186 ierr = PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);CHKERRQ(ierr); 187 } else { 188 PetscInt i; 189 190 for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i]; 191 for (i=0;i<pdiag->nz;i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i]; 192 } 193 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 194 195 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 196 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 197 PetscFunctionReturn(0); 198 } 199 200 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 201 { 202 PetscErrorCode ierr; 203 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 204 Mat_SeqAIJ *pdiag,*poffd; 205 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 206 HYPRE_Int *hjj,type; 207 hypre_ParCSRMatrix *par_matrix; 208 hypre_AuxParCSRMatrix *aux_matrix; 209 hypre_CSRMatrix *hdiag,*hoffd; 210 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 211 212 PetscFunctionBegin; 213 pdiag = (Mat_SeqAIJ*) pA->A->data; 214 poffd = (Mat_SeqAIJ*) pA->B->data; 215 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 216 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 217 218 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 219 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 220 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 221 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 222 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 223 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 224 225 /* 226 this is the Hack part where we monkey directly with the hypre datastructures 227 */ 228 if (sameint) { 229 ierr = PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 230 } else { 231 for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]); 232 } 233 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 234 hjj = hdiag->j; 235 pjj = pdiag->j; 236 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 237 for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i]; 238 #else 239 for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i]; 240 #endif 241 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 242 if (sameint) { 243 ierr = PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 244 } else { 245 for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]); 246 } 247 248 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 249 If we hacked a hypre a bit more we might be able to avoid this step */ 250 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 251 PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd)); 252 jj = (PetscInt*) hoffd->big_j; 253 #else 254 jj = (PetscInt*) hoffd->j; 255 #endif 256 pjj = poffd->j; 257 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 258 259 ierr = PetscArraycpy(hoffd->data,poffd->a,poffd->nz);CHKERRQ(ierr); 260 261 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 262 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 263 PetscFunctionReturn(0); 264 } 265 266 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 267 { 268 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 269 Mat lA; 270 ISLocalToGlobalMapping rl2g,cl2g; 271 IS is; 272 hypre_ParCSRMatrix *hA; 273 hypre_CSRMatrix *hdiag,*hoffd; 274 MPI_Comm comm; 275 HYPRE_Complex *hdd,*hod,*aa; 276 PetscScalar *data; 277 HYPRE_BigInt *col_map_offd; 278 HYPRE_Int *hdi,*hdj,*hoi,*hoj; 279 PetscInt *ii,*jj,*iptr,*jptr; 280 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 281 HYPRE_Int type; 282 PetscErrorCode ierr; 283 284 PetscFunctionBegin; 285 comm = PetscObjectComm((PetscObject)A); 286 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 287 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 288 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 289 M = hypre_ParCSRMatrixGlobalNumRows(hA); 290 N = hypre_ParCSRMatrixGlobalNumCols(hA); 291 str = hypre_ParCSRMatrixFirstRowIndex(hA); 292 stc = hypre_ParCSRMatrixFirstColDiag(hA); 293 hdiag = hypre_ParCSRMatrixDiag(hA); 294 hoffd = hypre_ParCSRMatrixOffd(hA); 295 dr = hypre_CSRMatrixNumRows(hdiag); 296 dc = hypre_CSRMatrixNumCols(hdiag); 297 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 298 hdi = hypre_CSRMatrixI(hdiag); 299 hdj = hypre_CSRMatrixJ(hdiag); 300 hdd = hypre_CSRMatrixData(hdiag); 301 oc = hypre_CSRMatrixNumCols(hoffd); 302 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 303 hoi = hypre_CSRMatrixI(hoffd); 304 hoj = hypre_CSRMatrixJ(hoffd); 305 hod = hypre_CSRMatrixData(hoffd); 306 if (reuse != MAT_REUSE_MATRIX) { 307 PetscInt *aux; 308 309 /* generate l2g maps for rows and cols */ 310 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 311 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 312 ierr = ISDestroy(&is);CHKERRQ(ierr); 313 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 314 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 315 for (i=0; i<dc; i++) aux[i] = i+stc; 316 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 317 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 318 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 319 ierr = ISDestroy(&is);CHKERRQ(ierr); 320 /* create MATIS object */ 321 ierr = MatCreate(comm,B);CHKERRQ(ierr); 322 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 323 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 324 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 325 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 326 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 327 328 /* allocate CSR for local matrix */ 329 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 330 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 331 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 332 } else { 333 PetscInt nr; 334 PetscBool done; 335 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 336 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 337 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 338 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 339 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 340 } 341 /* merge local matrices */ 342 ii = iptr; 343 jj = jptr; 344 aa = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 345 *ii = *(hdi++) + *(hoi++); 346 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 347 PetscScalar *aold = (PetscScalar*)aa; 348 PetscInt *jold = jj,nc = jd+jo; 349 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 350 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 351 *(++ii) = *(hdi++) + *(hoi++); 352 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 353 } 354 for (; cum<dr; cum++) *(++ii) = nnz; 355 if (reuse != MAT_REUSE_MATRIX) { 356 Mat_SeqAIJ* a; 357 358 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 359 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 360 /* hack SeqAIJ */ 361 a = (Mat_SeqAIJ*)(lA->data); 362 a->free_a = PETSC_TRUE; 363 a->free_ij = PETSC_TRUE; 364 ierr = MatDestroy(&lA);CHKERRQ(ierr); 365 } 366 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 367 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 368 if (reuse == MAT_INPLACE_MATRIX) { 369 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 370 } 371 PetscFunctionReturn(0); 372 } 373 374 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 375 { 376 Mat M = NULL; 377 Mat_HYPRE *hB; 378 MPI_Comm comm = PetscObjectComm((PetscObject)A); 379 PetscErrorCode ierr; 380 381 PetscFunctionBegin; 382 if (reuse == MAT_REUSE_MATRIX) { 383 /* always destroy the old matrix and create a new memory; 384 hope this does not churn the memory too much. The problem 385 is I do not know if it is possible to put the matrix back to 386 its initial state so that we can directly copy the values 387 the second time through. */ 388 hB = (Mat_HYPRE*)((*B)->data); 389 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 390 } else { 391 ierr = MatCreate(comm,&M);CHKERRQ(ierr); 392 ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr); 393 ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 394 hB = (Mat_HYPRE*)(M->data); 395 if (reuse == MAT_INITIAL_MATRIX) *B = M; 396 } 397 ierr = MatSetOption(*B,MAT_SORTED_FULL,PETSC_TRUE); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */ 398 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 399 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 400 if (reuse == MAT_INPLACE_MATRIX) { 401 ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr); 402 } 403 (*B)->preallocated = PETSC_TRUE; 404 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 405 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 406 PetscFunctionReturn(0); 407 } 408 409 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 410 { 411 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 412 hypre_ParCSRMatrix *parcsr; 413 hypre_CSRMatrix *hdiag,*hoffd; 414 MPI_Comm comm; 415 PetscScalar *da,*oa,*aptr; 416 PetscInt *dii,*djj,*oii,*ojj,*iptr; 417 PetscInt i,dnnz,onnz,m,n; 418 HYPRE_Int type; 419 PetscMPIInt size; 420 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 421 PetscErrorCode ierr; 422 423 PetscFunctionBegin; 424 comm = PetscObjectComm((PetscObject)A); 425 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 426 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 427 if (reuse == MAT_REUSE_MATRIX) { 428 PetscBool ismpiaij,isseqaij; 429 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 430 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 431 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 432 } 433 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 434 435 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 436 hdiag = hypre_ParCSRMatrixDiag(parcsr); 437 hoffd = hypre_ParCSRMatrixOffd(parcsr); 438 m = hypre_CSRMatrixNumRows(hdiag); 439 n = hypre_CSRMatrixNumCols(hdiag); 440 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 441 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 442 if (reuse == MAT_INITIAL_MATRIX) { 443 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 444 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 445 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 446 } else if (reuse == MAT_REUSE_MATRIX) { 447 PetscInt nr; 448 PetscBool done; 449 if (size > 1) { 450 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 451 452 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 453 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 454 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 455 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 456 } else { 457 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 458 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 459 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 460 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 461 } 462 } else { /* MAT_INPLACE_MATRIX */ 463 if (!sameint) { 464 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 465 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 466 } else { 467 dii = (PetscInt*)hypre_CSRMatrixI(hdiag); 468 djj = (PetscInt*)hypre_CSRMatrixJ(hdiag); 469 } 470 da = (PetscScalar*)hypre_CSRMatrixData(hdiag); 471 } 472 473 if (!sameint) { 474 for (i=0;i<m+1;i++) dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]); 475 for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]); 476 } else { 477 ierr = PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1);CHKERRQ(ierr); 478 ierr = PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);CHKERRQ(ierr); 479 } 480 ierr = PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);CHKERRQ(ierr); 481 iptr = djj; 482 aptr = da; 483 for (i=0; i<m; i++) { 484 PetscInt nc = dii[i+1]-dii[i]; 485 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 486 iptr += nc; 487 aptr += nc; 488 } 489 if (size > 1) { 490 HYPRE_BigInt *coffd; 491 HYPRE_Int *offdj; 492 493 if (reuse == MAT_INITIAL_MATRIX) { 494 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 495 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 496 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 497 } else if (reuse == MAT_REUSE_MATRIX) { 498 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 499 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 500 PetscBool done; 501 502 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 503 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 504 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 505 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 506 } else { /* MAT_INPLACE_MATRIX */ 507 if (!sameint) { 508 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 509 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 510 } else { 511 oii = (PetscInt*)hypre_CSRMatrixI(hoffd); 512 ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd); 513 } 514 oa = (PetscScalar*)hypre_CSRMatrixData(hoffd); 515 } 516 if (!sameint) { 517 for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]); 518 } else { 519 ierr = PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);CHKERRQ(ierr); 520 } 521 offdj = hypre_CSRMatrixJ(hoffd); 522 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 523 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 524 ierr = PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);CHKERRQ(ierr); 525 iptr = ojj; 526 aptr = oa; 527 for (i=0; i<m; i++) { 528 PetscInt nc = oii[i+1]-oii[i]; 529 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 530 iptr += nc; 531 aptr += nc; 532 } 533 if (reuse == MAT_INITIAL_MATRIX) { 534 Mat_MPIAIJ *b; 535 Mat_SeqAIJ *d,*o; 536 537 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 538 /* hack MPIAIJ */ 539 b = (Mat_MPIAIJ*)((*B)->data); 540 d = (Mat_SeqAIJ*)b->A->data; 541 o = (Mat_SeqAIJ*)b->B->data; 542 d->free_a = PETSC_TRUE; 543 d->free_ij = PETSC_TRUE; 544 o->free_a = PETSC_TRUE; 545 o->free_ij = PETSC_TRUE; 546 } else if (reuse == MAT_INPLACE_MATRIX) { 547 Mat T; 548 549 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr); 550 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 551 hypre_CSRMatrixI(hdiag) = NULL; 552 hypre_CSRMatrixJ(hdiag) = NULL; 553 hypre_CSRMatrixI(hoffd) = NULL; 554 hypre_CSRMatrixJ(hoffd) = NULL; 555 } else { /* Hack MPIAIJ -> free ij but not a */ 556 Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data); 557 Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data); 558 Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data); 559 560 d->free_ij = PETSC_TRUE; 561 o->free_ij = PETSC_TRUE; 562 } 563 hypre_CSRMatrixData(hdiag) = NULL; 564 hypre_CSRMatrixData(hoffd) = NULL; 565 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 566 } 567 } else { 568 oii = NULL; 569 ojj = NULL; 570 oa = NULL; 571 if (reuse == MAT_INITIAL_MATRIX) { 572 Mat_SeqAIJ* b; 573 574 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 575 /* hack SeqAIJ */ 576 b = (Mat_SeqAIJ*)((*B)->data); 577 b->free_a = PETSC_TRUE; 578 b->free_ij = PETSC_TRUE; 579 } else if (reuse == MAT_INPLACE_MATRIX) { 580 Mat T; 581 582 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr); 583 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 584 hypre_CSRMatrixI(hdiag) = NULL; 585 hypre_CSRMatrixJ(hdiag) = NULL; 586 } else { /* free ij but not a */ 587 Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data); 588 589 b->free_ij = PETSC_TRUE; 590 } 591 hypre_CSRMatrixData(hdiag) = NULL; 592 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 593 } 594 } 595 596 /* we have to use hypre_Tfree to free the HYPRE arrays 597 that PETSc now onws */ 598 if (reuse == MAT_INPLACE_MATRIX) { 599 PetscInt nh; 600 void *ptrs[6] = {da,oa,dii,djj,oii,ojj}; 601 const char *names[6] = {"_hypre_csr_da", 602 "_hypre_csr_oa", 603 "_hypre_csr_dii", 604 "_hypre_csr_djj", 605 "_hypre_csr_oii", 606 "_hypre_csr_ojj"}; 607 nh = sameint ? 6 : 2; 608 for (i=0; i<nh; i++) { 609 PetscContainer c; 610 611 ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr); 612 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 613 ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr); 614 ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr); 615 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 616 } 617 } 618 PetscFunctionReturn(0); 619 } 620 621 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 622 { 623 hypre_ParCSRMatrix *tA; 624 hypre_CSRMatrix *hdiag,*hoffd; 625 Mat_SeqAIJ *diag,*offd; 626 PetscInt *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts; 627 MPI_Comm comm = PetscObjectComm((PetscObject)A); 628 PetscBool ismpiaij,isseqaij; 629 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 630 PetscErrorCode ierr; 631 632 PetscFunctionBegin; 633 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 634 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 635 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name); 636 if (ismpiaij) { 637 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 638 639 diag = (Mat_SeqAIJ*)a->A->data; 640 offd = (Mat_SeqAIJ*)a->B->data; 641 garray = a->garray; 642 noffd = a->B->cmap->N; 643 dnnz = diag->nz; 644 onnz = offd->nz; 645 } else { 646 diag = (Mat_SeqAIJ*)A->data; 647 offd = NULL; 648 garray = NULL; 649 noffd = 0; 650 dnnz = diag->nz; 651 onnz = 0; 652 } 653 654 /* create a temporary ParCSR */ 655 if (HYPRE_AssumedPartitionCheck()) { 656 PetscMPIInt myid; 657 658 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 659 row_starts = A->rmap->range + myid; 660 col_starts = A->cmap->range + myid; 661 } else { 662 row_starts = A->rmap->range; 663 col_starts = A->cmap->range; 664 } 665 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz); 666 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 667 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 668 669 /* set diagonal part */ 670 hdiag = hypre_ParCSRMatrixDiag(tA); 671 if (sameint) { /* reuse CSR pointers */ 672 hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i; 673 hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j; 674 } else { /* malloc CSR pointers */ 675 HYPRE_Int *hi,*hj; 676 677 ierr = PetscMalloc2(A->rmap->n+1,&hi,dnnz,&hj);CHKERRQ(ierr); 678 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(diag->i[i]); 679 for (i = 0; i < dnnz; i++) hj[i] = (HYPRE_Int)(diag->j[i]); 680 hypre_CSRMatrixI(hdiag) = hi; 681 hypre_CSRMatrixJ(hdiag) = hj; 682 } 683 hypre_CSRMatrixData(hdiag) = (HYPRE_Complex*)diag->a; 684 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 685 hypre_CSRMatrixSetRownnz(hdiag); 686 hypre_CSRMatrixSetDataOwner(hdiag,0); 687 688 /* set offdiagonal part */ 689 hoffd = hypre_ParCSRMatrixOffd(tA); 690 if (offd) { 691 if (sameint) { /* reuse CSR pointers */ 692 hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i; 693 hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j; 694 } else { /* malloc CSR pointers */ 695 HYPRE_Int *hi,*hj; 696 697 ierr = PetscMalloc2(A->rmap->n+1,&hi,onnz,&hj);CHKERRQ(ierr); 698 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(offd->i[i]); 699 for (i = 0; i < onnz; i++) hj[i] = (HYPRE_Int)(offd->j[i]); 700 hypre_CSRMatrixI(hoffd) = hi; 701 hypre_CSRMatrixJ(hoffd) = hj; 702 } 703 hypre_CSRMatrixData(hoffd) = (HYPRE_Complex*)offd->a; 704 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 705 hypre_CSRMatrixSetRownnz(hoffd); 706 hypre_CSRMatrixSetDataOwner(hoffd,0); 707 hypre_ParCSRMatrixSetNumNonzeros(tA); 708 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray; 709 } 710 *hA = tA; 711 PetscFunctionReturn(0); 712 } 713 714 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 715 { 716 hypre_CSRMatrix *hdiag,*hoffd; 717 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 718 PetscErrorCode ierr; 719 720 PetscFunctionBegin; 721 hdiag = hypre_ParCSRMatrixDiag(*hA); 722 hoffd = hypre_ParCSRMatrixOffd(*hA); 723 /* free temporary memory allocated by PETSc */ 724 if (!sameint) { 725 HYPRE_Int *hi,*hj; 726 727 hi = hypre_CSRMatrixI(hdiag); 728 hj = hypre_CSRMatrixJ(hdiag); 729 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 730 if (hoffd) { 731 hi = hypre_CSRMatrixI(hoffd); 732 hj = hypre_CSRMatrixJ(hoffd); 733 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 734 } 735 } 736 /* set pointers to NULL before destroying tA */ 737 hypre_CSRMatrixI(hdiag) = NULL; 738 hypre_CSRMatrixJ(hdiag) = NULL; 739 hypre_CSRMatrixData(hdiag) = NULL; 740 hypre_CSRMatrixI(hoffd) = NULL; 741 hypre_CSRMatrixJ(hoffd) = NULL; 742 hypre_CSRMatrixData(hoffd) = NULL; 743 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 744 hypre_ParCSRMatrixDestroy(*hA); 745 *hA = NULL; 746 PetscFunctionReturn(0); 747 } 748 749 /* calls RAP from BoomerAMG: 750 the resulting ParCSR will not own the column and row starts 751 It looks like we don't need to have the diagonal entries 752 ordered first in the rows of the diagonal part 753 for boomerAMGBuildCoarseOperator to work */ 754 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 755 { 756 HYPRE_Int P_owns_col_starts,R_owns_row_starts; 757 758 PetscFunctionBegin; 759 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 760 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 761 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP)); 762 PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP)); 763 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 764 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0); 765 hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0); 766 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1); 767 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1); 768 PetscFunctionReturn(0); 769 } 770 771 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C) 772 { 773 Mat B; 774 hypre_ParCSRMatrix *hA,*hP,*hPtAP; 775 PetscErrorCode ierr; 776 Mat_Product *product=C->product; 777 778 PetscFunctionBegin; 779 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 780 ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr); 781 ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr); 782 ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 783 784 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 785 C->product = product; 786 787 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 788 ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr); 789 PetscFunctionReturn(0); 790 } 791 792 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat C) 793 { 794 PetscErrorCode ierr; 795 796 PetscFunctionBegin; 797 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 798 C->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 799 C->ops->productnumeric = MatProductNumeric_PtAP; 800 PetscFunctionReturn(0); 801 } 802 803 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C) 804 { 805 Mat B; 806 Mat_HYPRE *hP; 807 hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr; 808 HYPRE_Int type; 809 MPI_Comm comm = PetscObjectComm((PetscObject)A); 810 PetscBool ishypre; 811 PetscErrorCode ierr; 812 813 PetscFunctionBegin; 814 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 815 if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 816 hP = (Mat_HYPRE*)P->data; 817 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 818 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 819 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 820 821 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 822 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 823 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 824 825 /* create temporary matrix and merge to C */ 826 ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 827 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 828 PetscFunctionReturn(0); 829 } 830 831 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C) 832 { 833 Mat B; 834 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 835 Mat_HYPRE *hA,*hP; 836 PetscBool ishypre; 837 HYPRE_Int type; 838 PetscErrorCode ierr; 839 840 PetscFunctionBegin; 841 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 842 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 843 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 844 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 845 hA = (Mat_HYPRE*)A->data; 846 hP = (Mat_HYPRE*)P->data; 847 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 848 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 849 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 850 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 851 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 852 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 853 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 854 ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 855 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 856 PetscFunctionReturn(0); 857 } 858 859 /* calls hypre_ParMatmul 860 hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA 861 hypre_ParMatrixCreate does not duplicate the communicator 862 It looks like we don't need to have the diagonal entries 863 ordered first in the rows of the diagonal part 864 for boomerAMGBuildCoarseOperator to work */ 865 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB) 866 { 867 PetscFunctionBegin; 868 PetscStackPush("hypre_ParMatmul"); 869 *hAB = hypre_ParMatmul(hA,hB); 870 PetscStackPop; 871 PetscFunctionReturn(0); 872 } 873 874 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C) 875 { 876 Mat D; 877 hypre_ParCSRMatrix *hA,*hB,*hAB = NULL; 878 PetscErrorCode ierr; 879 Mat_Product *product=C->product; 880 881 PetscFunctionBegin; 882 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 883 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 884 ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr); 885 ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 886 887 ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr); 888 C->product = product; 889 890 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 891 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 892 PetscFunctionReturn(0); 893 } 894 895 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat C) 896 { 897 PetscErrorCode ierr; 898 899 PetscFunctionBegin; 900 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 901 C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 902 C->ops->productnumeric = MatProductNumeric_AB; 903 PetscFunctionReturn(0); 904 } 905 906 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C) 907 { 908 Mat D; 909 hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL; 910 Mat_HYPRE *hA,*hB; 911 PetscBool ishypre; 912 HYPRE_Int type; 913 PetscErrorCode ierr; 914 Mat_Product *product; 915 916 PetscFunctionBegin; 917 ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr); 918 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE); 919 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 920 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 921 hA = (Mat_HYPRE*)A->data; 922 hB = (Mat_HYPRE*)B->data; 923 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 924 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 925 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type)); 926 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 927 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 928 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr)); 929 ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr); 930 ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 931 932 /* need to use HeaderReplace because HeaderMerge messes up with the communicator */ 933 product = C->product; /* save it from MatHeaderReplace() */ 934 C->product = NULL; 935 ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr); 936 C->product = product; 937 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 938 C->ops->productnumeric = MatProductNumeric_AB; 939 PetscFunctionReturn(0); 940 } 941 942 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D) 943 { 944 Mat E; 945 hypre_ParCSRMatrix *hA,*hB,*hC,*hABC; 946 PetscErrorCode ierr; 947 948 PetscFunctionBegin; 949 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 950 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 951 ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr); 952 ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr); 953 ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr); 954 ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr); 955 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 956 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 957 ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr); 958 PetscFunctionReturn(0); 959 } 960 961 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat D) 962 { 963 PetscErrorCode ierr; 964 965 PetscFunctionBegin; 966 ierr = MatSetType(D,MATAIJ);CHKERRQ(ierr); 967 PetscFunctionReturn(0); 968 } 969 970 /* ---------------------------------------------------- */ 971 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C) 972 { 973 PetscFunctionBegin; 974 C->ops->productnumeric = MatProductNumeric_AB; 975 PetscFunctionReturn(0); 976 } 977 978 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C) 979 { 980 PetscErrorCode ierr; 981 Mat_Product *product = C->product; 982 PetscBool Ahypre; 983 984 PetscFunctionBegin; 985 ierr = PetscObjectTypeCompare((PetscObject)product->A,MATHYPRE,&Ahypre);CHKERRQ(ierr); 986 if (Ahypre) { /* A is a Hypre matrix */ 987 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 988 C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE; 989 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 990 PetscFunctionReturn(0); 991 } 992 PetscFunctionReturn(0); 993 } 994 995 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C) 996 { 997 PetscFunctionBegin; 998 C->ops->productnumeric = MatProductNumeric_PtAP; 999 PetscFunctionReturn(0); 1000 } 1001 1002 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C) 1003 { 1004 PetscErrorCode ierr; 1005 Mat_Product *product = C->product; 1006 PetscBool flg; 1007 PetscInt type = 0; 1008 const char *outTypes[4] = {"aij","seqaij","mpiaij","hypre"}; 1009 PetscInt ntype = 4; 1010 Mat A = product->A; 1011 PetscBool Ahypre; 1012 1013 PetscFunctionBegin; 1014 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&Ahypre);CHKERRQ(ierr); 1015 if (Ahypre) { /* A is a Hypre matrix */ 1016 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 1017 C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE; 1018 C->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 1019 PetscFunctionReturn(0); 1020 } 1021 1022 /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */ 1023 /* Get runtime option */ 1024 if (product->api_user) { 1025 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatPtAP_HYPRE","Mat");CHKERRQ(ierr); 1026 ierr = PetscOptionsEList("-matptap_hypre_outtype","MatPtAP outtype","MatPtAP outtype",outTypes,ntype,outTypes[type],&type,&flg);CHKERRQ(ierr); 1027 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1028 } else { 1029 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatProduct_PtAP_HYPRE","Mat");CHKERRQ(ierr); 1030 ierr = PetscOptionsEList("-matproduct_ptap_hypre_outtype","MatProduct_PtAP outtype","MatProduct_PtAP",outTypes,ntype,outTypes[type],&type,&flg);CHKERRQ(ierr); 1031 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1032 } 1033 1034 if (type == 0 || type == 1 || type == 2) { 1035 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 1036 } else if (type == 3) { 1037 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 1038 } else SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"MatPtAP outtype is not supported"); 1039 C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE; 1040 C->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 1041 PetscFunctionReturn(0); 1042 } 1043 1044 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C) 1045 { 1046 PetscErrorCode ierr; 1047 Mat_Product *product = C->product; 1048 1049 PetscFunctionBegin; 1050 switch (product->type) { 1051 case MATPRODUCT_AB: 1052 ierr = MatProductSetFromOptions_HYPRE_AB(C);CHKERRQ(ierr); 1053 break; 1054 case MATPRODUCT_PtAP: 1055 ierr = MatProductSetFromOptions_HYPRE_PtAP(C);CHKERRQ(ierr); 1056 break; 1057 default: 1058 break; 1059 } 1060 PetscFunctionReturn(0); 1061 } 1062 1063 /* -------------------------------------------------------- */ 1064 1065 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 1066 { 1067 PetscErrorCode ierr; 1068 1069 PetscFunctionBegin; 1070 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);CHKERRQ(ierr); 1071 PetscFunctionReturn(0); 1072 } 1073 1074 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 1075 { 1076 PetscErrorCode ierr; 1077 1078 PetscFunctionBegin; 1079 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);CHKERRQ(ierr); 1080 PetscFunctionReturn(0); 1081 } 1082 1083 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1084 { 1085 PetscErrorCode ierr; 1086 1087 PetscFunctionBegin; 1088 if (y != z) { 1089 ierr = VecCopy(y,z);CHKERRQ(ierr); 1090 } 1091 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);CHKERRQ(ierr); 1092 PetscFunctionReturn(0); 1093 } 1094 1095 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1096 { 1097 PetscErrorCode ierr; 1098 1099 PetscFunctionBegin; 1100 if (y != z) { 1101 ierr = VecCopy(y,z);CHKERRQ(ierr); 1102 } 1103 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);CHKERRQ(ierr); 1104 PetscFunctionReturn(0); 1105 } 1106 1107 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */ 1108 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans) 1109 { 1110 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1111 hypre_ParCSRMatrix *parcsr; 1112 hypre_ParVector *hx,*hy; 1113 HYPRE_Complex *ax,*ay,*sax,*say; 1114 PetscErrorCode ierr; 1115 1116 PetscFunctionBegin; 1117 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 1118 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 1119 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 1120 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1121 ierr = VecGetArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1122 if (trans) { 1123 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 1124 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 1125 hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx); 1126 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 1127 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 1128 } else { 1129 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 1130 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 1131 hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy); 1132 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 1133 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 1134 } 1135 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1136 ierr = VecRestoreArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1137 PetscFunctionReturn(0); 1138 } 1139 1140 static PetscErrorCode MatDestroy_HYPRE(Mat A) 1141 { 1142 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1143 PetscErrorCode ierr; 1144 1145 PetscFunctionBegin; 1146 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 1147 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 1148 if (hA->ij) { 1149 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 1150 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 1151 } 1152 if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); } 1153 1154 ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr); 1155 1156 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1157 1158 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 1159 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 1160 ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_seqaij_hypre_C",NULL);CHKERRQ(ierr); 1161 ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 1162 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr); 1163 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr); 1164 ierr = PetscFree(A->data);CHKERRQ(ierr); 1165 PetscFunctionReturn(0); 1166 } 1167 1168 static PetscErrorCode MatSetUp_HYPRE(Mat A) 1169 { 1170 PetscErrorCode ierr; 1171 1172 PetscFunctionBegin; 1173 ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr); 1174 PetscFunctionReturn(0); 1175 } 1176 1177 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 1178 { 1179 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1180 Vec x,b; 1181 PetscMPIInt n; 1182 PetscInt i,j,rstart,ncols,flg; 1183 PetscInt *row,*col; 1184 PetscScalar *val; 1185 PetscErrorCode ierr; 1186 1187 PetscFunctionBegin; 1188 if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 1189 1190 if (!A->nooffprocentries) { 1191 while (1) { 1192 ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 1193 if (!flg) break; 1194 1195 for (i=0; i<n; ) { 1196 /* Now identify the consecutive vals belonging to the same row */ 1197 for (j=i,rstart=row[j]; j<n; j++) { 1198 if (row[j] != rstart) break; 1199 } 1200 if (j < n) ncols = j-i; 1201 else ncols = n-i; 1202 /* Now assemble all these values with a single function call */ 1203 ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr); 1204 1205 i = j; 1206 } 1207 } 1208 ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr); 1209 } 1210 1211 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 1212 /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */ 1213 /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */ 1214 if (!hA->sorted_full) { 1215 hypre_AuxParCSRMatrix *aux_matrix; 1216 1217 /* call destroy just to make sure we do not leak anything */ 1218 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1219 PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix)); 1220 hypre_IJMatrixTranslator(hA->ij) = NULL; 1221 1222 /* Initialize with assembled flag -> it only recreates the aux_par_matrix */ 1223 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1224 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1225 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */ 1226 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0) 1227 PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix)); 1228 #else 1229 PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize_v2,(aux_matrix,HYPRE_MEMORY_HOST)); 1230 #endif 1231 } 1232 if (hA->x) PetscFunctionReturn(0); 1233 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1234 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1235 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 1236 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 1237 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 1238 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 1239 ierr = VecDestroy(&x);CHKERRQ(ierr); 1240 ierr = VecDestroy(&b);CHKERRQ(ierr); 1241 PetscFunctionReturn(0); 1242 } 1243 1244 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array) 1245 { 1246 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1247 PetscErrorCode ierr; 1248 1249 PetscFunctionBegin; 1250 if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use"); 1251 1252 if (hA->size >= size) { 1253 *array = hA->array; 1254 } else { 1255 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1256 hA->size = size; 1257 ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr); 1258 *array = hA->array; 1259 } 1260 1261 hA->available = PETSC_FALSE; 1262 PetscFunctionReturn(0); 1263 } 1264 1265 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array) 1266 { 1267 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1268 1269 PetscFunctionBegin; 1270 *array = NULL; 1271 hA->available = PETSC_TRUE; 1272 PetscFunctionReturn(0); 1273 } 1274 1275 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 1276 { 1277 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1278 PetscScalar *vals = (PetscScalar *)v; 1279 HYPRE_Complex *sscr; 1280 PetscInt *cscr[2]; 1281 PetscInt i,nzc; 1282 void *array = NULL; 1283 PetscErrorCode ierr; 1284 1285 PetscFunctionBegin; 1286 ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);CHKERRQ(ierr); 1287 cscr[0] = (PetscInt*)array; 1288 cscr[1] = ((PetscInt*)array)+nc; 1289 sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2); 1290 for (i=0,nzc=0;i<nc;i++) { 1291 if (cols[i] >= 0) { 1292 cscr[0][nzc ] = cols[i]; 1293 cscr[1][nzc++] = i; 1294 } 1295 } 1296 if (!nzc) { 1297 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1298 PetscFunctionReturn(0); 1299 } 1300 1301 if (ins == ADD_VALUES) { 1302 for (i=0;i<nr;i++) { 1303 if (rows[i] >= 0 && nzc) { 1304 PetscInt j; 1305 HYPRE_Int hnc = (HYPRE_Int)nzc; 1306 1307 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1308 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); } 1309 PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1310 } 1311 vals += nc; 1312 } 1313 } else { /* INSERT_VALUES */ 1314 PetscInt rst,ren; 1315 1316 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1317 for (i=0;i<nr;i++) { 1318 if (rows[i] >= 0 && nzc) { 1319 PetscInt j; 1320 HYPRE_Int hnc = (HYPRE_Int)nzc; 1321 1322 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1323 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); } 1324 /* nonlocal values */ 1325 if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE);CHKERRQ(ierr); } 1326 /* local values */ 1327 else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1328 } 1329 vals += nc; 1330 } 1331 } 1332 1333 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1334 PetscFunctionReturn(0); 1335 } 1336 1337 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1338 { 1339 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1340 HYPRE_Int *hdnnz,*honnz; 1341 PetscInt i,rs,re,cs,ce,bs; 1342 PetscMPIInt size; 1343 PetscErrorCode ierr; 1344 1345 PetscFunctionBegin; 1346 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1347 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1348 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1349 rs = A->rmap->rstart; 1350 re = A->rmap->rend; 1351 cs = A->cmap->rstart; 1352 ce = A->cmap->rend; 1353 if (!hA->ij) { 1354 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij)); 1355 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1356 } else { 1357 HYPRE_BigInt hrs,hre,hcs,hce; 1358 PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce)); 1359 if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re); 1360 if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce); 1361 } 1362 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs; 1363 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs; 1364 1365 if (!dnnz) { 1366 ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr); 1367 for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz; 1368 } else { 1369 hdnnz = (HYPRE_Int*)dnnz; 1370 } 1371 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 1372 if (size > 1) { 1373 hypre_AuxParCSRMatrix *aux_matrix; 1374 if (!onnz) { 1375 ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr); 1376 for (i=0;i<A->rmap->n;i++) honnz[i] = onz; 1377 } else honnz = (HYPRE_Int*)onnz; 1378 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems 1379 they assume the user will input the entire row values, properly sorted 1380 In PETSc, we don't make such an assumption and set this flag to 1, 1381 unless the option MAT_SORTED_FULL is set to true. 1382 Also, to avoid possible memory leaks, we destroy and recreate the translator 1383 This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize 1384 the IJ matrix for us */ 1385 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1386 hypre_AuxParCSRMatrixDestroy(aux_matrix); 1387 hypre_IJMatrixTranslator(hA->ij) = NULL; 1388 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz)); 1389 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1390 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !hA->sorted_full; 1391 } else { 1392 honnz = NULL; 1393 PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz)); 1394 } 1395 1396 /* reset assembled flag and call the initialize method */ 1397 hypre_IJMatrixAssembleFlag(hA->ij) = 0; 1398 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1399 if (!dnnz) { 1400 ierr = PetscFree(hdnnz);CHKERRQ(ierr); 1401 } 1402 if (!onnz && honnz) { 1403 ierr = PetscFree(honnz);CHKERRQ(ierr); 1404 } 1405 1406 /* Match AIJ logic */ 1407 A->preallocated = PETSC_TRUE; 1408 A->assembled = PETSC_FALSE; 1409 PetscFunctionReturn(0); 1410 } 1411 1412 /*@C 1413 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1414 1415 Collective on Mat 1416 1417 Input Parameters: 1418 + A - the matrix 1419 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1420 (same value is used for all local rows) 1421 . dnnz - array containing the number of nonzeros in the various rows of the 1422 DIAGONAL portion of the local submatrix (possibly different for each row) 1423 or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure. 1424 The size of this array is equal to the number of local rows, i.e 'm'. 1425 For matrices that will be factored, you must leave room for (and set) 1426 the diagonal entry even if it is zero. 1427 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1428 submatrix (same value is used for all local rows). 1429 - onnz - array containing the number of nonzeros in the various rows of the 1430 OFF-DIAGONAL portion of the local submatrix (possibly different for 1431 each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero 1432 structure. The size of this array is equal to the number 1433 of local rows, i.e 'm'. 1434 1435 Notes: 1436 If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored. 1437 1438 Level: intermediate 1439 1440 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE 1441 @*/ 1442 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1443 { 1444 PetscErrorCode ierr; 1445 1446 PetscFunctionBegin; 1447 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1448 PetscValidType(A,1); 1449 ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr); 1450 PetscFunctionReturn(0); 1451 } 1452 1453 /* 1454 MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix 1455 1456 Collective 1457 1458 Input Parameters: 1459 + parcsr - the pointer to the hypre_ParCSRMatrix 1460 . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported. 1461 - copymode - PETSc copying options 1462 1463 Output Parameter: 1464 . A - the matrix 1465 1466 Level: intermediate 1467 1468 .seealso: MatHYPRE, PetscCopyMode 1469 */ 1470 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A) 1471 { 1472 Mat T; 1473 Mat_HYPRE *hA; 1474 MPI_Comm comm; 1475 PetscInt rstart,rend,cstart,cend,M,N; 1476 PetscBool isseqaij,isseqaijmkl,ismpiaij,isaij,ishyp,isis; 1477 PetscErrorCode ierr; 1478 1479 PetscFunctionBegin; 1480 comm = hypre_ParCSRMatrixComm(parcsr); 1481 ierr = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1482 ierr = PetscStrcmp(mtype,MATSEQAIJMKL,&isseqaijmkl);CHKERRQ(ierr); 1483 ierr = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 1484 ierr = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr); 1485 ierr = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr); 1486 ierr = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr); 1487 isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij); 1488 if (!isaij && !ishyp && !isis) SETERRQ7(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATSEQAIJMKL,MATMPIAIJ,MATIS,MATHYPRE); 1489 /* access ParCSRMatrix */ 1490 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1491 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1492 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1493 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1494 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1495 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1496 1497 /* fix for empty local rows/columns */ 1498 if (rend < rstart) rend = rstart; 1499 if (cend < cstart) cend = cstart; 1500 1501 /* PETSc convention */ 1502 rend++; 1503 cend++; 1504 rend = PetscMin(rend,M); 1505 cend = PetscMin(cend,N); 1506 1507 /* create PETSc matrix with MatHYPRE */ 1508 ierr = MatCreate(comm,&T);CHKERRQ(ierr); 1509 ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr); 1510 ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr); 1511 hA = (Mat_HYPRE*)(T->data); 1512 1513 /* create HYPRE_IJMatrix */ 1514 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 1515 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1516 1517 /* create new ParCSR object if needed */ 1518 if (ishyp && copymode == PETSC_COPY_VALUES) { 1519 hypre_ParCSRMatrix *new_parcsr; 1520 hypre_CSRMatrix *hdiag,*hoffd,*ndiag,*noffd; 1521 1522 new_parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1523 hdiag = hypre_ParCSRMatrixDiag(parcsr); 1524 hoffd = hypre_ParCSRMatrixOffd(parcsr); 1525 ndiag = hypre_ParCSRMatrixDiag(new_parcsr); 1526 noffd = hypre_ParCSRMatrixOffd(new_parcsr); 1527 ierr = PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));CHKERRQ(ierr); 1528 ierr = PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));CHKERRQ(ierr); 1529 parcsr = new_parcsr; 1530 copymode = PETSC_OWN_POINTER; 1531 } 1532 1533 /* set ParCSR object */ 1534 hypre_IJMatrixObject(hA->ij) = parcsr; 1535 T->preallocated = PETSC_TRUE; 1536 1537 /* set assembled flag */ 1538 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1539 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1540 if (ishyp) { 1541 PetscMPIInt myid = 0; 1542 1543 /* make sure we always have row_starts and col_starts available */ 1544 if (HYPRE_AssumedPartitionCheck()) { 1545 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 1546 } 1547 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1548 PetscLayout map; 1549 1550 ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr); 1551 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1552 hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1553 } 1554 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1555 PetscLayout map; 1556 1557 ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr); 1558 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1559 hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1560 } 1561 /* prevent from freeing the pointer */ 1562 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1563 *A = T; 1564 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1565 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1566 } else if (isaij) { 1567 if (copymode != PETSC_OWN_POINTER) { 1568 /* prevent from freeing the pointer */ 1569 hA->inner_free = PETSC_FALSE; 1570 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1571 ierr = MatDestroy(&T);CHKERRQ(ierr); 1572 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1573 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr); 1574 *A = T; 1575 } 1576 } else if (isis) { 1577 ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1578 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1579 ierr = MatDestroy(&T);CHKERRQ(ierr); 1580 } 1581 PetscFunctionReturn(0); 1582 } 1583 1584 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1585 { 1586 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1587 HYPRE_Int type; 1588 1589 PetscFunctionBegin; 1590 if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present"); 1591 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 1592 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1593 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr)); 1594 PetscFunctionReturn(0); 1595 } 1596 1597 /* 1598 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1599 1600 Not collective 1601 1602 Input Parameters: 1603 + A - the MATHYPRE object 1604 1605 Output Parameter: 1606 . parcsr - the pointer to the hypre_ParCSRMatrix 1607 1608 Level: intermediate 1609 1610 .seealso: MatHYPRE, PetscCopyMode 1611 */ 1612 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1613 { 1614 PetscErrorCode ierr; 1615 1616 PetscFunctionBegin; 1617 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1618 PetscValidType(A,1); 1619 ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr); 1620 PetscFunctionReturn(0); 1621 } 1622 1623 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd) 1624 { 1625 hypre_ParCSRMatrix *parcsr; 1626 hypre_CSRMatrix *ha; 1627 PetscInt rst; 1628 PetscErrorCode ierr; 1629 1630 PetscFunctionBegin; 1631 if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks"); 1632 ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr); 1633 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1634 if (missing) *missing = PETSC_FALSE; 1635 if (dd) *dd = -1; 1636 ha = hypre_ParCSRMatrixDiag(parcsr); 1637 if (ha) { 1638 PetscInt size,i; 1639 HYPRE_Int *ii,*jj; 1640 1641 size = hypre_CSRMatrixNumRows(ha); 1642 ii = hypre_CSRMatrixI(ha); 1643 jj = hypre_CSRMatrixJ(ha); 1644 for (i = 0; i < size; i++) { 1645 PetscInt j; 1646 PetscBool found = PETSC_FALSE; 1647 1648 for (j = ii[i]; j < ii[i+1] && !found; j++) 1649 found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE; 1650 1651 if (!found) { 1652 PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i); 1653 if (missing) *missing = PETSC_TRUE; 1654 if (dd) *dd = i+rst; 1655 PetscFunctionReturn(0); 1656 } 1657 } 1658 if (!size) { 1659 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1660 if (missing) *missing = PETSC_TRUE; 1661 if (dd) *dd = rst; 1662 } 1663 } else { 1664 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1665 if (missing) *missing = PETSC_TRUE; 1666 if (dd) *dd = rst; 1667 } 1668 PetscFunctionReturn(0); 1669 } 1670 1671 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s) 1672 { 1673 hypre_ParCSRMatrix *parcsr; 1674 hypre_CSRMatrix *ha; 1675 PetscErrorCode ierr; 1676 HYPRE_Complex hs; 1677 1678 PetscFunctionBegin; 1679 ierr = PetscHYPREScalarCast(s,&hs);CHKERRQ(ierr); 1680 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1681 /* diagonal part */ 1682 ha = hypre_ParCSRMatrixDiag(parcsr); 1683 if (ha) { 1684 PetscInt size,i; 1685 HYPRE_Int *ii; 1686 HYPRE_Complex *a; 1687 1688 size = hypre_CSRMatrixNumRows(ha); 1689 a = hypre_CSRMatrixData(ha); 1690 ii = hypre_CSRMatrixI(ha); 1691 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1692 } 1693 /* offdiagonal part */ 1694 ha = hypre_ParCSRMatrixOffd(parcsr); 1695 if (ha) { 1696 PetscInt size,i; 1697 HYPRE_Int *ii; 1698 HYPRE_Complex *a; 1699 1700 size = hypre_CSRMatrixNumRows(ha); 1701 a = hypre_CSRMatrixData(ha); 1702 ii = hypre_CSRMatrixI(ha); 1703 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1704 } 1705 PetscFunctionReturn(0); 1706 } 1707 1708 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 1709 { 1710 hypre_ParCSRMatrix *parcsr; 1711 HYPRE_Int *lrows; 1712 PetscInt rst,ren,i; 1713 PetscErrorCode ierr; 1714 1715 PetscFunctionBegin; 1716 if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented"); 1717 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1718 ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr); 1719 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1720 for (i=0;i<numRows;i++) { 1721 if (rows[i] < rst || rows[i] >= ren) 1722 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported"); 1723 lrows[i] = rows[i] - rst; 1724 } 1725 PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows)); 1726 ierr = PetscFree(lrows);CHKERRQ(ierr); 1727 PetscFunctionReturn(0); 1728 } 1729 1730 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha) 1731 { 1732 PetscErrorCode ierr; 1733 1734 PetscFunctionBegin; 1735 if (ha) { 1736 HYPRE_Int *ii, size; 1737 HYPRE_Complex *a; 1738 1739 size = hypre_CSRMatrixNumRows(ha); 1740 a = hypre_CSRMatrixData(ha); 1741 ii = hypre_CSRMatrixI(ha); 1742 1743 if (a) {ierr = PetscArrayzero(a,ii[size]);CHKERRQ(ierr);} 1744 } 1745 PetscFunctionReturn(0); 1746 } 1747 1748 PetscErrorCode MatZeroEntries_HYPRE(Mat A) 1749 { 1750 hypre_ParCSRMatrix *parcsr; 1751 PetscErrorCode ierr; 1752 1753 PetscFunctionBegin; 1754 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1755 /* diagonal part */ 1756 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr); 1757 /* off-diagonal part */ 1758 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr); 1759 PetscFunctionReturn(0); 1760 } 1761 1762 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag) 1763 { 1764 PetscInt ii; 1765 HYPRE_Int *i, *j; 1766 HYPRE_Complex *a; 1767 1768 PetscFunctionBegin; 1769 if (!hA) PetscFunctionReturn(0); 1770 1771 i = hypre_CSRMatrixI(hA); 1772 j = hypre_CSRMatrixJ(hA); 1773 a = hypre_CSRMatrixData(hA); 1774 1775 for (ii = 0; ii < N; ii++) { 1776 HYPRE_Int jj, ibeg, iend, irow; 1777 1778 irow = rows[ii]; 1779 ibeg = i[irow]; 1780 iend = i[irow+1]; 1781 for (jj = ibeg; jj < iend; jj++) 1782 if (j[jj] == irow) a[jj] = diag; 1783 else a[jj] = 0.0; 1784 } 1785 PetscFunctionReturn(0); 1786 } 1787 1788 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1789 { 1790 hypre_ParCSRMatrix *parcsr; 1791 PetscInt *lrows,len; 1792 HYPRE_Complex hdiag; 1793 PetscErrorCode ierr; 1794 1795 PetscFunctionBegin; 1796 if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size"); 1797 ierr = PetscHYPREScalarCast(diag,&hdiag);CHKERRQ(ierr); 1798 /* retrieve the internal matrix */ 1799 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1800 /* get locally owned rows */ 1801 ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr); 1802 /* zero diagonal part */ 1803 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);CHKERRQ(ierr); 1804 /* zero off-diagonal part */ 1805 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr); 1806 1807 ierr = PetscFree(lrows);CHKERRQ(ierr); 1808 PetscFunctionReturn(0); 1809 } 1810 1811 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode) 1812 { 1813 PetscErrorCode ierr; 1814 1815 PetscFunctionBegin; 1816 if (mat->nooffprocentries) PetscFunctionReturn(0); 1817 1818 ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 1819 PetscFunctionReturn(0); 1820 } 1821 1822 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1823 { 1824 hypre_ParCSRMatrix *parcsr; 1825 HYPRE_Int hnz; 1826 PetscErrorCode ierr; 1827 1828 PetscFunctionBegin; 1829 /* retrieve the internal matrix */ 1830 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1831 /* call HYPRE API */ 1832 PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1833 if (nz) *nz = (PetscInt)hnz; 1834 PetscFunctionReturn(0); 1835 } 1836 1837 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1838 { 1839 hypre_ParCSRMatrix *parcsr; 1840 HYPRE_Int hnz; 1841 PetscErrorCode ierr; 1842 1843 PetscFunctionBegin; 1844 /* retrieve the internal matrix */ 1845 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1846 /* call HYPRE API */ 1847 hnz = nz ? (HYPRE_Int)(*nz) : 0; 1848 PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1849 PetscFunctionReturn(0); 1850 } 1851 1852 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 1853 { 1854 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1855 PetscInt i; 1856 1857 PetscFunctionBegin; 1858 if (!m || !n) PetscFunctionReturn(0); 1859 /* Ignore negative row indices 1860 * And negative column indices should be automatically ignored in hypre 1861 * */ 1862 for (i=0; i<m; i++) { 1863 if (idxm[i] >= 0) { 1864 HYPRE_Int hn = (HYPRE_Int)n; 1865 PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n))); 1866 } 1867 } 1868 PetscFunctionReturn(0); 1869 } 1870 1871 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg) 1872 { 1873 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1874 1875 PetscFunctionBegin; 1876 switch (op) { 1877 case MAT_NO_OFF_PROC_ENTRIES: 1878 if (flg) { 1879 PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0)); 1880 } 1881 break; 1882 case MAT_SORTED_FULL: 1883 hA->sorted_full = flg; 1884 break; 1885 default: 1886 break; 1887 } 1888 PetscFunctionReturn(0); 1889 } 1890 1891 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view) 1892 { 1893 hypre_ParCSRMatrix *parcsr; 1894 PetscErrorCode ierr; 1895 Mat B; 1896 PetscViewerFormat format; 1897 PetscErrorCode (*mview)(Mat,PetscViewer) = NULL; 1898 1899 PetscFunctionBegin; 1900 ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr); 1901 if (format != PETSC_VIEWER_NATIVE) { 1902 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1903 ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr); 1904 ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr); 1905 if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");CHKERRQ(ierr); 1906 ierr = (*mview)(B,view);CHKERRQ(ierr); 1907 ierr = MatDestroy(&B);CHKERRQ(ierr); 1908 } else { 1909 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1910 PetscMPIInt size; 1911 PetscBool isascii; 1912 const char *filename; 1913 1914 /* HYPRE uses only text files */ 1915 ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 1916 if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name); 1917 ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr); 1918 PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename)); 1919 ierr = MPI_Comm_size(hA->comm,&size);CHKERRQ(ierr); 1920 if (size > 1) { 1921 ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr); 1922 } else { 1923 ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr); 1924 } 1925 } 1926 PetscFunctionReturn(0); 1927 } 1928 1929 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B) 1930 { 1931 hypre_ParCSRMatrix *parcsr; 1932 PetscErrorCode ierr; 1933 PetscCopyMode cpmode; 1934 1935 PetscFunctionBegin; 1936 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1937 if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) { 1938 parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1939 cpmode = PETSC_OWN_POINTER; 1940 } else { 1941 cpmode = PETSC_COPY_VALUES; 1942 } 1943 ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr); 1944 PetscFunctionReturn(0); 1945 } 1946 1947 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str) 1948 { 1949 hypre_ParCSRMatrix *acsr,*bcsr; 1950 PetscErrorCode ierr; 1951 1952 PetscFunctionBegin; 1953 if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) { 1954 ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr); 1955 ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr); 1956 PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1)); 1957 ierr = MatSetOption(B,MAT_SORTED_FULL,PETSC_TRUE); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */ 1958 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1959 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1960 } else { 1961 ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1962 } 1963 PetscFunctionReturn(0); 1964 } 1965 1966 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d) 1967 { 1968 hypre_ParCSRMatrix *parcsr; 1969 hypre_CSRMatrix *dmat; 1970 HYPRE_Complex *a; 1971 HYPRE_Complex *data = NULL; 1972 HYPRE_Int *diag = NULL; 1973 PetscInt i; 1974 PetscBool cong; 1975 PetscErrorCode ierr; 1976 1977 PetscFunctionBegin; 1978 ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr); 1979 if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns"); 1980 if (PetscDefined(USE_DEBUG)) { 1981 PetscBool miss; 1982 ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr); 1983 if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing"); 1984 } 1985 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1986 dmat = hypre_ParCSRMatrixDiag(parcsr); 1987 if (dmat) { 1988 /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 1989 ierr = VecGetArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 1990 diag = hypre_CSRMatrixI(dmat); 1991 data = hypre_CSRMatrixData(dmat); 1992 for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]]; 1993 ierr = VecRestoreArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 1994 } 1995 PetscFunctionReturn(0); 1996 } 1997 1998 #include <petscblaslapack.h> 1999 2000 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str) 2001 { 2002 PetscErrorCode ierr; 2003 2004 PetscFunctionBegin; 2005 if (str == SAME_NONZERO_PATTERN) { 2006 hypre_ParCSRMatrix *x,*y; 2007 hypre_CSRMatrix *xloc,*yloc; 2008 PetscInt xnnz,ynnz; 2009 HYPRE_Complex *xarr,*yarr; 2010 PetscBLASInt one=1,bnz; 2011 2012 ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr); 2013 ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr); 2014 2015 /* diagonal block */ 2016 xloc = hypre_ParCSRMatrixDiag(x); 2017 yloc = hypre_ParCSRMatrixDiag(y); 2018 xnnz = 0; 2019 ynnz = 0; 2020 xarr = NULL; 2021 yarr = NULL; 2022 if (xloc) { 2023 xarr = hypre_CSRMatrixData(xloc); 2024 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 2025 } 2026 if (yloc) { 2027 yarr = hypre_CSRMatrixData(yloc); 2028 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2029 } 2030 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz); 2031 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 2032 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 2033 2034 /* off-diagonal block */ 2035 xloc = hypre_ParCSRMatrixOffd(x); 2036 yloc = hypre_ParCSRMatrixOffd(y); 2037 xnnz = 0; 2038 ynnz = 0; 2039 xarr = NULL; 2040 yarr = NULL; 2041 if (xloc) { 2042 xarr = hypre_CSRMatrixData(xloc); 2043 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 2044 } 2045 if (yloc) { 2046 yarr = hypre_CSRMatrixData(yloc); 2047 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2048 } 2049 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz); 2050 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 2051 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 2052 } else if (str == SUBSET_NONZERO_PATTERN) { 2053 ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr); 2054 } else { 2055 Mat B; 2056 2057 ierr = MatAXPY_Basic_Preallocate(Y,X,&B);CHKERRQ(ierr); 2058 ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr); 2059 ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr); 2060 } 2061 PetscFunctionReturn(0); 2062 } 2063 2064 /*MC 2065 MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices 2066 based on the hypre IJ interface. 2067 2068 Level: intermediate 2069 2070 .seealso: MatCreate() 2071 M*/ 2072 2073 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 2074 { 2075 Mat_HYPRE *hB; 2076 PetscErrorCode ierr; 2077 2078 PetscFunctionBegin; 2079 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 2080 hB->inner_free = PETSC_TRUE; 2081 hB->available = PETSC_TRUE; 2082 hB->sorted_full= PETSC_FALSE; /* no assumption whether column indices are sorted or not */ 2083 hB->size = 0; 2084 hB->array = NULL; 2085 2086 B->data = (void*)hB; 2087 B->rmap->bs = 1; 2088 B->assembled = PETSC_FALSE; 2089 2090 ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2091 B->ops->mult = MatMult_HYPRE; 2092 B->ops->multtranspose = MatMultTranspose_HYPRE; 2093 B->ops->multadd = MatMultAdd_HYPRE; 2094 B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE; 2095 B->ops->setup = MatSetUp_HYPRE; 2096 B->ops->destroy = MatDestroy_HYPRE; 2097 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 2098 B->ops->assemblybegin = MatAssemblyBegin_HYPRE; 2099 B->ops->setvalues = MatSetValues_HYPRE; 2100 B->ops->missingdiagonal = MatMissingDiagonal_HYPRE; 2101 B->ops->scale = MatScale_HYPRE; 2102 B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE; 2103 B->ops->zeroentries = MatZeroEntries_HYPRE; 2104 B->ops->zerorows = MatZeroRows_HYPRE; 2105 B->ops->getrow = MatGetRow_HYPRE; 2106 B->ops->restorerow = MatRestoreRow_HYPRE; 2107 B->ops->getvalues = MatGetValues_HYPRE; 2108 B->ops->setoption = MatSetOption_HYPRE; 2109 B->ops->duplicate = MatDuplicate_HYPRE; 2110 B->ops->copy = MatCopy_HYPRE; 2111 B->ops->view = MatView_HYPRE; 2112 B->ops->getdiagonal = MatGetDiagonal_HYPRE; 2113 B->ops->axpy = MatAXPY_HYPRE; 2114 B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE; 2115 2116 /* build cache for off array entries formed */ 2117 ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr); 2118 2119 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr); 2120 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 2121 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 2122 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 2123 ierr = PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_seqaij_hypre_C",MatProductSetFromOptions_HYPRE);CHKERRQ(ierr); 2124 ierr = PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_mpiaij_hypre_C",MatProductSetFromOptions_HYPRE);CHKERRQ(ierr); 2125 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr); 2126 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr); 2127 PetscFunctionReturn(0); 2128 } 2129 2130 static PetscErrorCode hypre_array_destroy(void *ptr) 2131 { 2132 PetscFunctionBegin; 2133 hypre_TFree(ptr,HYPRE_MEMORY_HOST); 2134 PetscFunctionReturn(0); 2135 } 2136