1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 #include <petscpkg_version.h> 7 #include <petsc/private/petschypre.h> 8 #include <petscmathypre.h> 9 #include <petsc/private/matimpl.h> 10 #include <../src/mat/impls/hypre/mhypre.h> 11 #include <../src/mat/impls/aij/mpi/mpiaij.h> 12 #include <../src/vec/vec/impls/hypre/vhyp.h> 13 #include <HYPRE.h> 14 #include <HYPRE_utilities.h> 15 #include <_hypre_parcsr_ls.h> 16 #include <_hypre_sstruct_ls.h> 17 18 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0) 19 #define hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A) 20 #endif 21 22 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 23 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 24 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 25 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 26 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool); 27 static PetscErrorCode hypre_array_destroy(void*); 28 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins); 29 30 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 31 { 32 PetscErrorCode ierr; 33 PetscInt i,n_d,n_o; 34 const PetscInt *ia_d,*ia_o; 35 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 36 HYPRE_Int *nnz_d=NULL,*nnz_o=NULL; 37 38 PetscFunctionBegin; 39 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 40 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 41 if (done_d) { 42 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 43 for (i=0; i<n_d; i++) { 44 nnz_d[i] = ia_d[i+1] - ia_d[i]; 45 } 46 } 47 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 48 } 49 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 50 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 51 if (done_o) { 52 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 53 for (i=0; i<n_o; i++) { 54 nnz_o[i] = ia_o[i+1] - ia_o[i]; 55 } 56 } 57 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 58 } 59 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 60 if (!done_o) { /* only diagonal part */ 61 ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr); 62 for (i=0; i<n_d; i++) { 63 nnz_o[i] = 0; 64 } 65 } 66 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 67 { /* If we don't do this, the columns of the matrix will be all zeros! */ 68 hypre_AuxParCSRMatrix *aux_matrix; 69 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 70 hypre_AuxParCSRMatrixDestroy(aux_matrix); 71 hypre_IJMatrixTranslator(ij) = NULL; 72 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 73 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 74 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 75 } 76 #else 77 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 78 #endif 79 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 80 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 81 } 82 PetscFunctionReturn(0); 83 } 84 85 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 86 { 87 PetscErrorCode ierr; 88 PetscInt rstart,rend,cstart,cend; 89 90 PetscFunctionBegin; 91 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 92 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 93 rstart = A->rmap->rstart; 94 rend = A->rmap->rend; 95 cstart = A->cmap->rstart; 96 cend = A->cmap->rend; 97 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 98 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 99 { 100 PetscBool same; 101 Mat A_d,A_o; 102 const PetscInt *colmap; 103 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 104 if (same) { 105 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 106 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 107 PetscFunctionReturn(0); 108 } 109 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 110 if (same) { 111 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 112 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 113 PetscFunctionReturn(0); 114 } 115 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 116 if (same) { 117 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 118 PetscFunctionReturn(0); 119 } 120 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 121 if (same) { 122 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 } 126 PetscFunctionReturn(0); 127 } 128 129 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 130 { 131 PetscErrorCode ierr; 132 PetscInt i,rstart,rend,ncols,nr,nc; 133 const PetscScalar *values; 134 const PetscInt *cols; 135 PetscBool flg; 136 137 PetscFunctionBegin; 138 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 139 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 140 if (flg && nr == nc) { 141 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 142 PetscFunctionReturn(0); 143 } 144 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 145 if (flg) { 146 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 147 PetscFunctionReturn(0); 148 } 149 150 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 151 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 152 for (i=rstart; i<rend; i++) { 153 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 154 if (ncols) { 155 HYPRE_Int nc = (HYPRE_Int)ncols; 156 157 if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i); 158 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values)); 159 } 160 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 161 } 162 PetscFunctionReturn(0); 163 } 164 165 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 166 { 167 PetscErrorCode ierr; 168 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 169 HYPRE_Int type; 170 hypre_ParCSRMatrix *par_matrix; 171 hypre_AuxParCSRMatrix *aux_matrix; 172 hypre_CSRMatrix *hdiag; 173 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 174 175 PetscFunctionBegin; 176 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 177 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 178 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 179 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 180 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 181 /* 182 this is the Hack part where we monkey directly with the hypre datastructures 183 */ 184 if (sameint) { 185 ierr = PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);CHKERRQ(ierr); 186 ierr = PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);CHKERRQ(ierr); 187 } else { 188 PetscInt i; 189 190 for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i]; 191 for (i=0;i<pdiag->nz;i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i]; 192 } 193 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 194 195 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 196 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 197 PetscFunctionReturn(0); 198 } 199 200 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 201 { 202 PetscErrorCode ierr; 203 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 204 Mat_SeqAIJ *pdiag,*poffd; 205 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 206 HYPRE_Int *hjj,type; 207 hypre_ParCSRMatrix *par_matrix; 208 hypre_AuxParCSRMatrix *aux_matrix; 209 hypre_CSRMatrix *hdiag,*hoffd; 210 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 211 212 PetscFunctionBegin; 213 pdiag = (Mat_SeqAIJ*) pA->A->data; 214 poffd = (Mat_SeqAIJ*) pA->B->data; 215 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 216 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 217 218 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 219 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 220 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 221 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 222 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 223 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 224 225 /* 226 this is the Hack part where we monkey directly with the hypre datastructures 227 */ 228 if (sameint) { 229 ierr = PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 230 } else { 231 for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]); 232 } 233 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 234 hjj = hdiag->j; 235 pjj = pdiag->j; 236 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 237 for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i]; 238 #else 239 for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i]; 240 #endif 241 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 242 if (sameint) { 243 ierr = PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 244 } else { 245 for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]); 246 } 247 248 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 249 If we hacked a hypre a bit more we might be able to avoid this step */ 250 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 251 PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd)); 252 jj = (PetscInt*) hoffd->big_j; 253 #else 254 jj = (PetscInt*) hoffd->j; 255 #endif 256 pjj = poffd->j; 257 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 258 259 ierr = PetscArraycpy(hoffd->data,poffd->a,poffd->nz);CHKERRQ(ierr); 260 261 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 262 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 263 PetscFunctionReturn(0); 264 } 265 266 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 267 { 268 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 269 Mat lA; 270 ISLocalToGlobalMapping rl2g,cl2g; 271 IS is; 272 hypre_ParCSRMatrix *hA; 273 hypre_CSRMatrix *hdiag,*hoffd; 274 MPI_Comm comm; 275 HYPRE_Complex *hdd,*hod,*aa; 276 PetscScalar *data; 277 HYPRE_BigInt *col_map_offd; 278 HYPRE_Int *hdi,*hdj,*hoi,*hoj; 279 PetscInt *ii,*jj,*iptr,*jptr; 280 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 281 HYPRE_Int type; 282 PetscErrorCode ierr; 283 284 PetscFunctionBegin; 285 comm = PetscObjectComm((PetscObject)A); 286 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 287 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 288 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 289 M = hypre_ParCSRMatrixGlobalNumRows(hA); 290 N = hypre_ParCSRMatrixGlobalNumCols(hA); 291 str = hypre_ParCSRMatrixFirstRowIndex(hA); 292 stc = hypre_ParCSRMatrixFirstColDiag(hA); 293 hdiag = hypre_ParCSRMatrixDiag(hA); 294 hoffd = hypre_ParCSRMatrixOffd(hA); 295 dr = hypre_CSRMatrixNumRows(hdiag); 296 dc = hypre_CSRMatrixNumCols(hdiag); 297 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 298 hdi = hypre_CSRMatrixI(hdiag); 299 hdj = hypre_CSRMatrixJ(hdiag); 300 hdd = hypre_CSRMatrixData(hdiag); 301 oc = hypre_CSRMatrixNumCols(hoffd); 302 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 303 hoi = hypre_CSRMatrixI(hoffd); 304 hoj = hypre_CSRMatrixJ(hoffd); 305 hod = hypre_CSRMatrixData(hoffd); 306 if (reuse != MAT_REUSE_MATRIX) { 307 PetscInt *aux; 308 309 /* generate l2g maps for rows and cols */ 310 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 311 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 312 ierr = ISDestroy(&is);CHKERRQ(ierr); 313 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 314 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 315 for (i=0; i<dc; i++) aux[i] = i+stc; 316 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 317 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 318 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 319 ierr = ISDestroy(&is);CHKERRQ(ierr); 320 /* create MATIS object */ 321 ierr = MatCreate(comm,B);CHKERRQ(ierr); 322 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 323 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 324 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 325 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 326 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 327 328 /* allocate CSR for local matrix */ 329 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 330 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 331 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 332 } else { 333 PetscInt nr; 334 PetscBool done; 335 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 336 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 337 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 338 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 339 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 340 } 341 /* merge local matrices */ 342 ii = iptr; 343 jj = jptr; 344 aa = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 345 *ii = *(hdi++) + *(hoi++); 346 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 347 PetscScalar *aold = (PetscScalar*)aa; 348 PetscInt *jold = jj,nc = jd+jo; 349 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 350 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 351 *(++ii) = *(hdi++) + *(hoi++); 352 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 353 } 354 for (; cum<dr; cum++) *(++ii) = nnz; 355 if (reuse != MAT_REUSE_MATRIX) { 356 Mat_SeqAIJ* a; 357 358 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 359 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 360 /* hack SeqAIJ */ 361 a = (Mat_SeqAIJ*)(lA->data); 362 a->free_a = PETSC_TRUE; 363 a->free_ij = PETSC_TRUE; 364 ierr = MatDestroy(&lA);CHKERRQ(ierr); 365 } 366 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 367 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 368 if (reuse == MAT_INPLACE_MATRIX) { 369 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 370 } 371 PetscFunctionReturn(0); 372 } 373 374 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 375 { 376 Mat M = NULL; 377 Mat_HYPRE *hB; 378 MPI_Comm comm = PetscObjectComm((PetscObject)A); 379 PetscErrorCode ierr; 380 381 PetscFunctionBegin; 382 if (reuse == MAT_REUSE_MATRIX) { 383 /* always destroy the old matrix and create a new memory; 384 hope this does not churn the memory too much. The problem 385 is I do not know if it is possible to put the matrix back to 386 its initial state so that we can directly copy the values 387 the second time through. */ 388 hB = (Mat_HYPRE*)((*B)->data); 389 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 390 } else { 391 ierr = MatCreate(comm,&M);CHKERRQ(ierr); 392 ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr); 393 ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 394 hB = (Mat_HYPRE*)(M->data); 395 if (reuse == MAT_INITIAL_MATRIX) *B = M; 396 } 397 ierr = MatSetOption(*B,MAT_SORTED_FULL,PETSC_TRUE); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */ 398 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 399 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 400 if (reuse == MAT_INPLACE_MATRIX) { 401 ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr); 402 } 403 (*B)->preallocated = PETSC_TRUE; 404 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 405 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 406 PetscFunctionReturn(0); 407 } 408 409 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 410 { 411 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 412 hypre_ParCSRMatrix *parcsr; 413 hypre_CSRMatrix *hdiag,*hoffd; 414 MPI_Comm comm; 415 PetscScalar *da,*oa,*aptr; 416 PetscInt *dii,*djj,*oii,*ojj,*iptr; 417 PetscInt i,dnnz,onnz,m,n; 418 HYPRE_Int type; 419 PetscMPIInt size; 420 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 421 PetscErrorCode ierr; 422 423 PetscFunctionBegin; 424 comm = PetscObjectComm((PetscObject)A); 425 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 426 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 427 if (reuse == MAT_REUSE_MATRIX) { 428 PetscBool ismpiaij,isseqaij; 429 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 430 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 431 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 432 } 433 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 434 435 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 436 hdiag = hypre_ParCSRMatrixDiag(parcsr); 437 hoffd = hypre_ParCSRMatrixOffd(parcsr); 438 m = hypre_CSRMatrixNumRows(hdiag); 439 n = hypre_CSRMatrixNumCols(hdiag); 440 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 441 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 442 if (reuse == MAT_INITIAL_MATRIX) { 443 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 444 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 445 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 446 } else if (reuse == MAT_REUSE_MATRIX) { 447 PetscInt nr; 448 PetscBool done; 449 if (size > 1) { 450 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 451 452 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 453 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 454 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 455 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 456 } else { 457 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 458 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 459 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 460 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 461 } 462 } else { /* MAT_INPLACE_MATRIX */ 463 if (!sameint) { 464 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 465 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 466 } else { 467 dii = (PetscInt*)hypre_CSRMatrixI(hdiag); 468 djj = (PetscInt*)hypre_CSRMatrixJ(hdiag); 469 } 470 da = (PetscScalar*)hypre_CSRMatrixData(hdiag); 471 } 472 473 if (!sameint) { 474 for (i=0;i<m+1;i++) dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]); 475 for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]); 476 } else { 477 ierr = PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1);CHKERRQ(ierr); 478 ierr = PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);CHKERRQ(ierr); 479 } 480 ierr = PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);CHKERRQ(ierr); 481 iptr = djj; 482 aptr = da; 483 for (i=0; i<m; i++) { 484 PetscInt nc = dii[i+1]-dii[i]; 485 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 486 iptr += nc; 487 aptr += nc; 488 } 489 if (size > 1) { 490 HYPRE_BigInt *coffd; 491 HYPRE_Int *offdj; 492 493 if (reuse == MAT_INITIAL_MATRIX) { 494 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 495 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 496 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 497 } else if (reuse == MAT_REUSE_MATRIX) { 498 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 499 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 500 PetscBool done; 501 502 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 503 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 504 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 505 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 506 } else { /* MAT_INPLACE_MATRIX */ 507 if (!sameint) { 508 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 509 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 510 } else { 511 oii = (PetscInt*)hypre_CSRMatrixI(hoffd); 512 ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd); 513 } 514 oa = (PetscScalar*)hypre_CSRMatrixData(hoffd); 515 } 516 if (!sameint) { 517 for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]); 518 } else { 519 ierr = PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);CHKERRQ(ierr); 520 } 521 offdj = hypre_CSRMatrixJ(hoffd); 522 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 523 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 524 ierr = PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);CHKERRQ(ierr); 525 iptr = ojj; 526 aptr = oa; 527 for (i=0; i<m; i++) { 528 PetscInt nc = oii[i+1]-oii[i]; 529 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 530 iptr += nc; 531 aptr += nc; 532 } 533 if (reuse == MAT_INITIAL_MATRIX) { 534 Mat_MPIAIJ *b; 535 Mat_SeqAIJ *d,*o; 536 537 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 538 /* hack MPIAIJ */ 539 b = (Mat_MPIAIJ*)((*B)->data); 540 d = (Mat_SeqAIJ*)b->A->data; 541 o = (Mat_SeqAIJ*)b->B->data; 542 d->free_a = PETSC_TRUE; 543 d->free_ij = PETSC_TRUE; 544 o->free_a = PETSC_TRUE; 545 o->free_ij = PETSC_TRUE; 546 } else if (reuse == MAT_INPLACE_MATRIX) { 547 Mat T; 548 549 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr); 550 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 551 hypre_CSRMatrixI(hdiag) = NULL; 552 hypre_CSRMatrixJ(hdiag) = NULL; 553 hypre_CSRMatrixI(hoffd) = NULL; 554 hypre_CSRMatrixJ(hoffd) = NULL; 555 } else { /* Hack MPIAIJ -> free ij but not a */ 556 Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data); 557 Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data); 558 Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data); 559 560 d->free_ij = PETSC_TRUE; 561 o->free_ij = PETSC_TRUE; 562 } 563 hypre_CSRMatrixData(hdiag) = NULL; 564 hypre_CSRMatrixData(hoffd) = NULL; 565 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 566 } 567 } else { 568 oii = NULL; 569 ojj = NULL; 570 oa = NULL; 571 if (reuse == MAT_INITIAL_MATRIX) { 572 Mat_SeqAIJ* b; 573 574 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 575 /* hack SeqAIJ */ 576 b = (Mat_SeqAIJ*)((*B)->data); 577 b->free_a = PETSC_TRUE; 578 b->free_ij = PETSC_TRUE; 579 } else if (reuse == MAT_INPLACE_MATRIX) { 580 Mat T; 581 582 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr); 583 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 584 hypre_CSRMatrixI(hdiag) = NULL; 585 hypre_CSRMatrixJ(hdiag) = NULL; 586 } else { /* free ij but not a */ 587 Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data); 588 589 b->free_ij = PETSC_TRUE; 590 } 591 hypre_CSRMatrixData(hdiag) = NULL; 592 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 593 } 594 } 595 596 /* we have to use hypre_Tfree to free the HYPRE arrays 597 that PETSc now onws */ 598 if (reuse == MAT_INPLACE_MATRIX) { 599 PetscInt nh; 600 void *ptrs[6] = {da,oa,dii,djj,oii,ojj}; 601 const char *names[6] = {"_hypre_csr_da", 602 "_hypre_csr_oa", 603 "_hypre_csr_dii", 604 "_hypre_csr_djj", 605 "_hypre_csr_oii", 606 "_hypre_csr_ojj"}; 607 nh = sameint ? 6 : 2; 608 for (i=0; i<nh; i++) { 609 PetscContainer c; 610 611 ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr); 612 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 613 ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr); 614 ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr); 615 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 616 } 617 } 618 PetscFunctionReturn(0); 619 } 620 621 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 622 { 623 hypre_ParCSRMatrix *tA; 624 hypre_CSRMatrix *hdiag,*hoffd; 625 Mat_SeqAIJ *diag,*offd; 626 PetscInt *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts; 627 MPI_Comm comm = PetscObjectComm((PetscObject)A); 628 PetscBool ismpiaij,isseqaij; 629 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 630 PetscErrorCode ierr; 631 632 PetscFunctionBegin; 633 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 634 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 635 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name); 636 if (ismpiaij) { 637 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 638 639 diag = (Mat_SeqAIJ*)a->A->data; 640 offd = (Mat_SeqAIJ*)a->B->data; 641 garray = a->garray; 642 noffd = a->B->cmap->N; 643 dnnz = diag->nz; 644 onnz = offd->nz; 645 } else { 646 diag = (Mat_SeqAIJ*)A->data; 647 offd = NULL; 648 garray = NULL; 649 noffd = 0; 650 dnnz = diag->nz; 651 onnz = 0; 652 } 653 654 /* create a temporary ParCSR */ 655 if (HYPRE_AssumedPartitionCheck()) { 656 PetscMPIInt myid; 657 658 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 659 row_starts = A->rmap->range + myid; 660 col_starts = A->cmap->range + myid; 661 } else { 662 row_starts = A->rmap->range; 663 col_starts = A->cmap->range; 664 } 665 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz); 666 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 667 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 668 669 /* set diagonal part */ 670 hdiag = hypre_ParCSRMatrixDiag(tA); 671 if (sameint) { /* reuse CSR pointers */ 672 hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i; 673 hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j; 674 } else { /* malloc CSR pointers */ 675 HYPRE_Int *hi,*hj; 676 677 ierr = PetscMalloc2(A->rmap->n+1,&hi,dnnz,&hj);CHKERRQ(ierr); 678 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(diag->i[i]); 679 for (i = 0; i < dnnz; i++) hj[i] = (HYPRE_Int)(diag->j[i]); 680 hypre_CSRMatrixI(hdiag) = hi; 681 hypre_CSRMatrixJ(hdiag) = hj; 682 } 683 hypre_CSRMatrixData(hdiag) = (HYPRE_Complex*)diag->a; 684 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 685 hypre_CSRMatrixSetRownnz(hdiag); 686 hypre_CSRMatrixSetDataOwner(hdiag,0); 687 688 /* set offdiagonal part */ 689 hoffd = hypre_ParCSRMatrixOffd(tA); 690 if (offd) { 691 if (sameint) { /* reuse CSR pointers */ 692 hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i; 693 hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j; 694 } else { /* malloc CSR pointers */ 695 HYPRE_Int *hi,*hj; 696 697 ierr = PetscMalloc2(A->rmap->n+1,&hi,onnz,&hj);CHKERRQ(ierr); 698 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(offd->i[i]); 699 for (i = 0; i < onnz; i++) hj[i] = (HYPRE_Int)(offd->j[i]); 700 hypre_CSRMatrixI(hoffd) = hi; 701 hypre_CSRMatrixJ(hoffd) = hj; 702 } 703 hypre_CSRMatrixData(hoffd) = (HYPRE_Complex*)offd->a; 704 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 705 hypre_CSRMatrixSetRownnz(hoffd); 706 hypre_CSRMatrixSetDataOwner(hoffd,0); 707 hypre_ParCSRMatrixSetNumNonzeros(tA); 708 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray; 709 } 710 *hA = tA; 711 PetscFunctionReturn(0); 712 } 713 714 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 715 { 716 hypre_CSRMatrix *hdiag,*hoffd; 717 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 718 PetscErrorCode ierr; 719 720 PetscFunctionBegin; 721 hdiag = hypre_ParCSRMatrixDiag(*hA); 722 hoffd = hypre_ParCSRMatrixOffd(*hA); 723 /* free temporary memory allocated by PETSc */ 724 if (!sameint) { 725 HYPRE_Int *hi,*hj; 726 727 hi = hypre_CSRMatrixI(hdiag); 728 hj = hypre_CSRMatrixJ(hdiag); 729 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 730 if (hoffd) { 731 hi = hypre_CSRMatrixI(hoffd); 732 hj = hypre_CSRMatrixJ(hoffd); 733 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 734 } 735 } 736 /* set pointers to NULL before destroying tA */ 737 hypre_CSRMatrixI(hdiag) = NULL; 738 hypre_CSRMatrixJ(hdiag) = NULL; 739 hypre_CSRMatrixData(hdiag) = NULL; 740 hypre_CSRMatrixI(hoffd) = NULL; 741 hypre_CSRMatrixJ(hoffd) = NULL; 742 hypre_CSRMatrixData(hoffd) = NULL; 743 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 744 hypre_ParCSRMatrixDestroy(*hA); 745 *hA = NULL; 746 PetscFunctionReturn(0); 747 } 748 749 /* calls RAP from BoomerAMG: 750 the resulting ParCSR will not own the column and row starts 751 It looks like we don't need to have the diagonal entries 752 ordered first in the rows of the diagonal part 753 for boomerAMGBuildCoarseOperator to work */ 754 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 755 { 756 HYPRE_Int P_owns_col_starts,R_owns_row_starts; 757 758 PetscFunctionBegin; 759 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 760 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 761 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP)); 762 PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP)); 763 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 764 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0); 765 hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0); 766 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1); 767 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1); 768 PetscFunctionReturn(0); 769 } 770 771 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C) 772 { 773 Mat B; 774 hypre_ParCSRMatrix *hA,*hP,*hPtAP; 775 PetscErrorCode ierr; 776 Mat_Product *product=C->product; 777 778 PetscFunctionBegin; 779 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 780 ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr); 781 ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr); 782 ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 783 784 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 785 C->product = product; 786 787 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 788 ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr); 789 PetscFunctionReturn(0); 790 } 791 792 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat C) 793 { 794 PetscErrorCode ierr; 795 796 PetscFunctionBegin; 797 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 798 C->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 799 C->ops->productnumeric = MatProductNumeric_PtAP; 800 PetscFunctionReturn(0); 801 } 802 803 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C) 804 { 805 Mat B; 806 Mat_HYPRE *hP; 807 hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr; 808 HYPRE_Int type; 809 MPI_Comm comm = PetscObjectComm((PetscObject)A); 810 PetscBool ishypre; 811 PetscErrorCode ierr; 812 813 PetscFunctionBegin; 814 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 815 if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 816 hP = (Mat_HYPRE*)P->data; 817 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 818 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 819 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 820 821 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 822 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 823 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 824 825 /* create temporary matrix and merge to C */ 826 ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 827 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 828 PetscFunctionReturn(0); 829 } 830 831 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C) 832 { 833 Mat B; 834 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 835 Mat_HYPRE *hA,*hP; 836 PetscBool ishypre; 837 HYPRE_Int type; 838 PetscErrorCode ierr; 839 840 PetscFunctionBegin; 841 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 842 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 843 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 844 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 845 hA = (Mat_HYPRE*)A->data; 846 hP = (Mat_HYPRE*)P->data; 847 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 848 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 849 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 850 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 851 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 852 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 853 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 854 ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 855 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 856 PetscFunctionReturn(0); 857 } 858 859 /* calls hypre_ParMatmul 860 hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA 861 hypre_ParMatrixCreate does not duplicate the communicator 862 It looks like we don't need to have the diagonal entries 863 ordered first in the rows of the diagonal part 864 for boomerAMGBuildCoarseOperator to work */ 865 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB) 866 { 867 PetscFunctionBegin; 868 PetscStackPush("hypre_ParMatmul"); 869 *hAB = hypre_ParMatmul(hA,hB); 870 PetscStackPop; 871 PetscFunctionReturn(0); 872 } 873 874 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C) 875 { 876 Mat D; 877 hypre_ParCSRMatrix *hA,*hB,*hAB = NULL; 878 PetscErrorCode ierr; 879 Mat_Product *product=C->product; 880 881 PetscFunctionBegin; 882 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 883 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 884 ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr); 885 ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 886 887 ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr); 888 C->product = product; 889 890 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 891 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 892 PetscFunctionReturn(0); 893 } 894 895 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat C) 896 { 897 PetscErrorCode ierr; 898 899 PetscFunctionBegin; 900 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 901 C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 902 C->ops->productnumeric = MatProductNumeric_AB; 903 PetscFunctionReturn(0); 904 } 905 906 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C) 907 { 908 Mat D; 909 hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL; 910 Mat_HYPRE *hA,*hB; 911 PetscBool ishypre; 912 HYPRE_Int type; 913 PetscErrorCode ierr; 914 Mat_Product *product; 915 916 PetscFunctionBegin; 917 ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr); 918 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE); 919 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 920 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 921 hA = (Mat_HYPRE*)A->data; 922 hB = (Mat_HYPRE*)B->data; 923 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 924 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 925 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type)); 926 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 927 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 928 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr)); 929 ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr); 930 ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 931 932 /* need to use HeaderReplace because HeaderMerge messes up with the communicator */ 933 product = C->product; /* save it from MatHeaderReplace() */ 934 C->product = NULL; 935 ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr); 936 C->product = product; 937 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 938 C->ops->productnumeric = MatProductNumeric_AB; 939 PetscFunctionReturn(0); 940 } 941 942 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D) 943 { 944 Mat E; 945 hypre_ParCSRMatrix *hA,*hB,*hC,*hABC; 946 PetscErrorCode ierr; 947 948 PetscFunctionBegin; 949 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 950 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 951 ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr); 952 ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr); 953 ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr); 954 ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr); 955 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 956 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 957 ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr); 958 PetscFunctionReturn(0); 959 } 960 961 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat D) 962 { 963 PetscErrorCode ierr; 964 965 PetscFunctionBegin; 966 ierr = MatSetType(D,MATAIJ);CHKERRQ(ierr); 967 PetscFunctionReturn(0); 968 } 969 970 /* ---------------------------------------------------- */ 971 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C) 972 { 973 PetscFunctionBegin; 974 C->ops->productnumeric = MatProductNumeric_AB; 975 PetscFunctionReturn(0); 976 } 977 978 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C) 979 { 980 PetscErrorCode ierr; 981 Mat_Product *product = C->product; 982 PetscBool Ahypre; 983 984 PetscFunctionBegin; 985 ierr = PetscObjectTypeCompare((PetscObject)product->A,MATHYPRE,&Ahypre);CHKERRQ(ierr); 986 if (Ahypre) { /* A is a Hypre matrix */ 987 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 988 C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE; 989 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 990 PetscFunctionReturn(0); 991 } 992 PetscFunctionReturn(0); 993 } 994 995 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C) 996 { 997 PetscFunctionBegin; 998 C->ops->productnumeric = MatProductNumeric_PtAP; 999 PetscFunctionReturn(0); 1000 } 1001 1002 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C) 1003 { 1004 PetscErrorCode ierr; 1005 Mat_Product *product = C->product; 1006 PetscBool flg; 1007 PetscInt type = 0; 1008 const char *outTypes[4] = {"aij","seqaij","mpiaij","hypre"}; 1009 PetscInt ntype = 4; 1010 Mat A = product->A; 1011 PetscBool Ahypre; 1012 1013 PetscFunctionBegin; 1014 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&Ahypre);CHKERRQ(ierr); 1015 if (Ahypre) { /* A is a Hypre matrix */ 1016 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 1017 C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE; 1018 C->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 1019 PetscFunctionReturn(0); 1020 } 1021 1022 /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */ 1023 /* Get runtime option */ 1024 if (product->api_user) { 1025 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatPtAP_HYPRE","Mat");CHKERRQ(ierr); 1026 ierr = PetscOptionsEList("-matptap_hypre_outtype","MatPtAP outtype","MatPtAP outtype",outTypes,ntype,outTypes[type],&type,&flg);CHKERRQ(ierr); 1027 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1028 } else { 1029 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatProduct_PtAP_HYPRE","Mat");CHKERRQ(ierr); 1030 ierr = PetscOptionsEList("-matproduct_ptap_hypre_outtype","MatProduct_PtAP outtype","MatProduct_PtAP",outTypes,ntype,outTypes[type],&type,&flg);CHKERRQ(ierr); 1031 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1032 } 1033 1034 if (type == 0 || type == 1 || type == 2) { 1035 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 1036 } else if (type == 3) { 1037 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 1038 } else SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"MatPtAP outtype is not supported"); 1039 C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE; 1040 C->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 1041 PetscFunctionReturn(0); 1042 } 1043 1044 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C) 1045 { 1046 PetscErrorCode ierr; 1047 Mat_Product *product = C->product; 1048 1049 PetscFunctionBegin; 1050 switch (product->type) { 1051 case MATPRODUCT_AB: 1052 ierr = MatProductSetFromOptions_HYPRE_AB(C);CHKERRQ(ierr); 1053 break; 1054 case MATPRODUCT_PtAP: 1055 ierr = MatProductSetFromOptions_HYPRE_PtAP(C);CHKERRQ(ierr); 1056 break; 1057 default: 1058 break; 1059 } 1060 PetscFunctionReturn(0); 1061 } 1062 1063 /* -------------------------------------------------------- */ 1064 1065 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 1066 { 1067 PetscErrorCode ierr; 1068 1069 PetscFunctionBegin; 1070 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);CHKERRQ(ierr); 1071 PetscFunctionReturn(0); 1072 } 1073 1074 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 1075 { 1076 PetscErrorCode ierr; 1077 1078 PetscFunctionBegin; 1079 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);CHKERRQ(ierr); 1080 PetscFunctionReturn(0); 1081 } 1082 1083 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1084 { 1085 PetscErrorCode ierr; 1086 1087 PetscFunctionBegin; 1088 if (y != z) { 1089 ierr = VecCopy(y,z);CHKERRQ(ierr); 1090 } 1091 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);CHKERRQ(ierr); 1092 PetscFunctionReturn(0); 1093 } 1094 1095 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1096 { 1097 PetscErrorCode ierr; 1098 1099 PetscFunctionBegin; 1100 if (y != z) { 1101 ierr = VecCopy(y,z);CHKERRQ(ierr); 1102 } 1103 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);CHKERRQ(ierr); 1104 PetscFunctionReturn(0); 1105 } 1106 1107 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */ 1108 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans) 1109 { 1110 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1111 hypre_ParCSRMatrix *parcsr; 1112 hypre_ParVector *hx,*hy; 1113 HYPRE_Complex *ax,*ay,*sax,*say; 1114 PetscErrorCode ierr; 1115 1116 PetscFunctionBegin; 1117 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 1118 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 1119 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 1120 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1121 ierr = VecGetArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1122 if (trans) { 1123 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 1124 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 1125 hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx); 1126 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 1127 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 1128 } else { 1129 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 1130 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 1131 hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy); 1132 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 1133 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 1134 } 1135 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1136 ierr = VecRestoreArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1137 PetscFunctionReturn(0); 1138 } 1139 1140 static PetscErrorCode MatDestroy_HYPRE(Mat A) 1141 { 1142 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1143 PetscErrorCode ierr; 1144 1145 PetscFunctionBegin; 1146 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 1147 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 1148 if (hA->ij) { 1149 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 1150 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 1151 } 1152 if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); } 1153 1154 ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr); 1155 1156 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1157 1158 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 1159 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 1160 ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_seqaij_hypre_C",NULL);CHKERRQ(ierr); 1161 ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 1162 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr); 1163 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr); 1164 ierr = PetscFree(A->data);CHKERRQ(ierr); 1165 PetscFunctionReturn(0); 1166 } 1167 1168 static PetscErrorCode MatSetUp_HYPRE(Mat A) 1169 { 1170 PetscErrorCode ierr; 1171 1172 PetscFunctionBegin; 1173 ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr); 1174 PetscFunctionReturn(0); 1175 } 1176 1177 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 1178 { 1179 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1180 Vec x,b; 1181 PetscMPIInt n; 1182 PetscInt i,j,rstart,ncols,flg; 1183 PetscInt *row,*col; 1184 PetscScalar *val; 1185 PetscErrorCode ierr; 1186 1187 PetscFunctionBegin; 1188 if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 1189 1190 if (!A->nooffprocentries) { 1191 while (1) { 1192 ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 1193 if (!flg) break; 1194 1195 for (i=0; i<n; ) { 1196 /* Now identify the consecutive vals belonging to the same row */ 1197 for (j=i,rstart=row[j]; j<n; j++) { 1198 if (row[j] != rstart) break; 1199 } 1200 if (j < n) ncols = j-i; 1201 else ncols = n-i; 1202 /* Now assemble all these values with a single function call */ 1203 ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr); 1204 1205 i = j; 1206 } 1207 } 1208 ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr); 1209 } 1210 1211 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 1212 /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */ 1213 /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */ 1214 if(!hA->sorted_full) { 1215 hypre_AuxParCSRMatrix *aux_matrix; 1216 1217 /* call destroy just to make sure we do not leak anything */ 1218 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1219 PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix)); 1220 hypre_IJMatrixTranslator(hA->ij) = NULL; 1221 1222 /* Initialize with assembled flag -> it only recreates the aux_par_matrix */ 1223 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1224 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1225 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */ 1226 PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix)); 1227 } 1228 if (hA->x) PetscFunctionReturn(0); 1229 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1230 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1231 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 1232 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 1233 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 1234 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 1235 ierr = VecDestroy(&x);CHKERRQ(ierr); 1236 ierr = VecDestroy(&b);CHKERRQ(ierr); 1237 PetscFunctionReturn(0); 1238 } 1239 1240 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array) 1241 { 1242 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1243 PetscErrorCode ierr; 1244 1245 PetscFunctionBegin; 1246 if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use"); 1247 1248 if (hA->size >= size) { 1249 *array = hA->array; 1250 } else { 1251 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1252 hA->size = size; 1253 ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr); 1254 *array = hA->array; 1255 } 1256 1257 hA->available = PETSC_FALSE; 1258 PetscFunctionReturn(0); 1259 } 1260 1261 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array) 1262 { 1263 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1264 1265 PetscFunctionBegin; 1266 *array = NULL; 1267 hA->available = PETSC_TRUE; 1268 PetscFunctionReturn(0); 1269 } 1270 1271 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 1272 { 1273 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1274 PetscScalar *vals = (PetscScalar *)v; 1275 HYPRE_Complex *sscr; 1276 PetscInt *cscr[2]; 1277 PetscInt i,nzc; 1278 void *array = NULL; 1279 PetscErrorCode ierr; 1280 1281 PetscFunctionBegin; 1282 ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);CHKERRQ(ierr); 1283 cscr[0] = (PetscInt*)array; 1284 cscr[1] = ((PetscInt*)array)+nc; 1285 sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2); 1286 for (i=0,nzc=0;i<nc;i++) { 1287 if (cols[i] >= 0) { 1288 cscr[0][nzc ] = cols[i]; 1289 cscr[1][nzc++] = i; 1290 } 1291 } 1292 if (!nzc) { 1293 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1294 PetscFunctionReturn(0); 1295 } 1296 1297 if (ins == ADD_VALUES) { 1298 for (i=0;i<nr;i++) { 1299 if (rows[i] >= 0 && nzc) { 1300 PetscInt j; 1301 HYPRE_Int hnc = (HYPRE_Int)nzc; 1302 1303 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1304 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); } 1305 PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1306 } 1307 vals += nc; 1308 } 1309 } else { /* INSERT_VALUES */ 1310 PetscInt rst,ren; 1311 1312 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1313 for (i=0;i<nr;i++) { 1314 if (rows[i] >= 0 && nzc) { 1315 PetscInt j; 1316 HYPRE_Int hnc = (HYPRE_Int)nzc; 1317 1318 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1319 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); } 1320 /* nonlocal values */ 1321 if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE);CHKERRQ(ierr); } 1322 /* local values */ 1323 else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1324 } 1325 vals += nc; 1326 } 1327 } 1328 1329 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1330 PetscFunctionReturn(0); 1331 } 1332 1333 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1334 { 1335 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1336 HYPRE_Int *hdnnz,*honnz; 1337 PetscInt i,rs,re,cs,ce,bs; 1338 PetscMPIInt size; 1339 PetscErrorCode ierr; 1340 1341 PetscFunctionBegin; 1342 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1343 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1344 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1345 rs = A->rmap->rstart; 1346 re = A->rmap->rend; 1347 cs = A->cmap->rstart; 1348 ce = A->cmap->rend; 1349 if (!hA->ij) { 1350 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij)); 1351 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1352 } else { 1353 HYPRE_BigInt hrs,hre,hcs,hce; 1354 PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce)); 1355 if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re); 1356 if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce); 1357 } 1358 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs; 1359 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs; 1360 1361 if (!dnnz) { 1362 ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr); 1363 for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz; 1364 } else { 1365 hdnnz = (HYPRE_Int*)dnnz; 1366 } 1367 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 1368 if (size > 1) { 1369 hypre_AuxParCSRMatrix *aux_matrix; 1370 if (!onnz) { 1371 ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr); 1372 for (i=0;i<A->rmap->n;i++) honnz[i] = onz; 1373 } else { 1374 honnz = (HYPRE_Int*)onnz; 1375 } 1376 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems 1377 they assume the user will input the entire row values, properly sorted 1378 In PETSc, we don't make such an assumption and set this flag to 1, 1379 unless the option MAT_SORTED_FULL is set to true. 1380 Also, to avoid possible memory leaks, we destroy and recreate the translator 1381 This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize 1382 the IJ matrix for us */ 1383 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1384 hypre_AuxParCSRMatrixDestroy(aux_matrix); 1385 hypre_IJMatrixTranslator(hA->ij) = NULL; 1386 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz)); 1387 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1388 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !hA->sorted_full; 1389 } else { 1390 honnz = NULL; 1391 PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz)); 1392 } 1393 1394 /* reset assembled flag and call the initialize method */ 1395 hypre_IJMatrixAssembleFlag(hA->ij) = 0; 1396 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1397 if (!dnnz) { 1398 ierr = PetscFree(hdnnz);CHKERRQ(ierr); 1399 } 1400 if (!onnz && honnz) { 1401 ierr = PetscFree(honnz);CHKERRQ(ierr); 1402 } 1403 1404 /* Match AIJ logic */ 1405 A->preallocated = PETSC_TRUE; 1406 A->assembled = PETSC_FALSE; 1407 PetscFunctionReturn(0); 1408 } 1409 1410 /*@C 1411 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1412 1413 Collective on Mat 1414 1415 Input Parameters: 1416 + A - the matrix 1417 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1418 (same value is used for all local rows) 1419 . dnnz - array containing the number of nonzeros in the various rows of the 1420 DIAGONAL portion of the local submatrix (possibly different for each row) 1421 or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure. 1422 The size of this array is equal to the number of local rows, i.e 'm'. 1423 For matrices that will be factored, you must leave room for (and set) 1424 the diagonal entry even if it is zero. 1425 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1426 submatrix (same value is used for all local rows). 1427 - onnz - array containing the number of nonzeros in the various rows of the 1428 OFF-DIAGONAL portion of the local submatrix (possibly different for 1429 each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero 1430 structure. The size of this array is equal to the number 1431 of local rows, i.e 'm'. 1432 1433 Notes: 1434 If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored. 1435 1436 Level: intermediate 1437 1438 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE 1439 @*/ 1440 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1441 { 1442 PetscErrorCode ierr; 1443 1444 PetscFunctionBegin; 1445 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1446 PetscValidType(A,1); 1447 ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr); 1448 PetscFunctionReturn(0); 1449 } 1450 1451 /* 1452 MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix 1453 1454 Collective 1455 1456 Input Parameters: 1457 + parcsr - the pointer to the hypre_ParCSRMatrix 1458 . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported. 1459 - copymode - PETSc copying options 1460 1461 Output Parameter: 1462 . A - the matrix 1463 1464 Level: intermediate 1465 1466 .seealso: MatHYPRE, PetscCopyMode 1467 */ 1468 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A) 1469 { 1470 Mat T; 1471 Mat_HYPRE *hA; 1472 MPI_Comm comm; 1473 PetscInt rstart,rend,cstart,cend,M,N; 1474 PetscBool isseqaij,isseqaijmkl,ismpiaij,isaij,ishyp,isis; 1475 PetscErrorCode ierr; 1476 1477 PetscFunctionBegin; 1478 comm = hypre_ParCSRMatrixComm(parcsr); 1479 ierr = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1480 ierr = PetscStrcmp(mtype,MATSEQAIJMKL,&isseqaijmkl);CHKERRQ(ierr); 1481 ierr = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 1482 ierr = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr); 1483 ierr = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr); 1484 ierr = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr); 1485 isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij); 1486 if (!isaij && !ishyp && !isis) SETERRQ7(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATSEQAIJMKL,MATMPIAIJ,MATIS,MATHYPRE); 1487 /* access ParCSRMatrix */ 1488 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1489 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1490 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1491 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1492 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1493 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1494 1495 /* fix for empty local rows/columns */ 1496 if (rend < rstart) rend = rstart; 1497 if (cend < cstart) cend = cstart; 1498 1499 /* PETSc convention */ 1500 rend++; 1501 cend++; 1502 rend = PetscMin(rend,M); 1503 cend = PetscMin(cend,N); 1504 1505 /* create PETSc matrix with MatHYPRE */ 1506 ierr = MatCreate(comm,&T);CHKERRQ(ierr); 1507 ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr); 1508 ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr); 1509 hA = (Mat_HYPRE*)(T->data); 1510 1511 /* create HYPRE_IJMatrix */ 1512 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 1513 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1514 1515 /* create new ParCSR object if needed */ 1516 if (ishyp && copymode == PETSC_COPY_VALUES) { 1517 hypre_ParCSRMatrix *new_parcsr; 1518 hypre_CSRMatrix *hdiag,*hoffd,*ndiag,*noffd; 1519 1520 new_parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1521 hdiag = hypre_ParCSRMatrixDiag(parcsr); 1522 hoffd = hypre_ParCSRMatrixOffd(parcsr); 1523 ndiag = hypre_ParCSRMatrixDiag(new_parcsr); 1524 noffd = hypre_ParCSRMatrixOffd(new_parcsr); 1525 ierr = PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));CHKERRQ(ierr); 1526 ierr = PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));CHKERRQ(ierr); 1527 parcsr = new_parcsr; 1528 copymode = PETSC_OWN_POINTER; 1529 } 1530 1531 /* set ParCSR object */ 1532 hypre_IJMatrixObject(hA->ij) = parcsr; 1533 T->preallocated = PETSC_TRUE; 1534 1535 /* set assembled flag */ 1536 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1537 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1538 if (ishyp) { 1539 PetscMPIInt myid = 0; 1540 1541 /* make sure we always have row_starts and col_starts available */ 1542 if (HYPRE_AssumedPartitionCheck()) { 1543 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 1544 } 1545 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1546 PetscLayout map; 1547 1548 ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr); 1549 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1550 hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1551 } 1552 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1553 PetscLayout map; 1554 1555 ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr); 1556 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1557 hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1558 } 1559 /* prevent from freeing the pointer */ 1560 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1561 *A = T; 1562 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1563 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1564 } else if (isaij) { 1565 if (copymode != PETSC_OWN_POINTER) { 1566 /* prevent from freeing the pointer */ 1567 hA->inner_free = PETSC_FALSE; 1568 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1569 ierr = MatDestroy(&T);CHKERRQ(ierr); 1570 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1571 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr); 1572 *A = T; 1573 } 1574 } else if (isis) { 1575 ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1576 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1577 ierr = MatDestroy(&T);CHKERRQ(ierr); 1578 } 1579 PetscFunctionReturn(0); 1580 } 1581 1582 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1583 { 1584 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1585 HYPRE_Int type; 1586 1587 PetscFunctionBegin; 1588 if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present"); 1589 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 1590 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1591 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr)); 1592 PetscFunctionReturn(0); 1593 } 1594 1595 /* 1596 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1597 1598 Not collective 1599 1600 Input Parameters: 1601 + A - the MATHYPRE object 1602 1603 Output Parameter: 1604 . parcsr - the pointer to the hypre_ParCSRMatrix 1605 1606 Level: intermediate 1607 1608 .seealso: MatHYPRE, PetscCopyMode 1609 */ 1610 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1611 { 1612 PetscErrorCode ierr; 1613 1614 PetscFunctionBegin; 1615 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1616 PetscValidType(A,1); 1617 ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr); 1618 PetscFunctionReturn(0); 1619 } 1620 1621 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd) 1622 { 1623 hypre_ParCSRMatrix *parcsr; 1624 hypre_CSRMatrix *ha; 1625 PetscInt rst; 1626 PetscErrorCode ierr; 1627 1628 PetscFunctionBegin; 1629 if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks"); 1630 ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr); 1631 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1632 if (missing) *missing = PETSC_FALSE; 1633 if (dd) *dd = -1; 1634 ha = hypre_ParCSRMatrixDiag(parcsr); 1635 if (ha) { 1636 PetscInt size,i; 1637 HYPRE_Int *ii,*jj; 1638 1639 size = hypre_CSRMatrixNumRows(ha); 1640 ii = hypre_CSRMatrixI(ha); 1641 jj = hypre_CSRMatrixJ(ha); 1642 for (i = 0; i < size; i++) { 1643 PetscInt j; 1644 PetscBool found = PETSC_FALSE; 1645 1646 for (j = ii[i]; j < ii[i+1] && !found; j++) 1647 found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE; 1648 1649 if (!found) { 1650 PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i); 1651 if (missing) *missing = PETSC_TRUE; 1652 if (dd) *dd = i+rst; 1653 PetscFunctionReturn(0); 1654 } 1655 } 1656 if (!size) { 1657 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1658 if (missing) *missing = PETSC_TRUE; 1659 if (dd) *dd = rst; 1660 } 1661 } else { 1662 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1663 if (missing) *missing = PETSC_TRUE; 1664 if (dd) *dd = rst; 1665 } 1666 PetscFunctionReturn(0); 1667 } 1668 1669 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s) 1670 { 1671 hypre_ParCSRMatrix *parcsr; 1672 hypre_CSRMatrix *ha; 1673 PetscErrorCode ierr; 1674 HYPRE_Complex hs; 1675 1676 PetscFunctionBegin; 1677 ierr = PetscHYPREScalarCast(s,&hs);CHKERRQ(ierr); 1678 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1679 /* diagonal part */ 1680 ha = hypre_ParCSRMatrixDiag(parcsr); 1681 if (ha) { 1682 PetscInt size,i; 1683 HYPRE_Int *ii; 1684 HYPRE_Complex *a; 1685 1686 size = hypre_CSRMatrixNumRows(ha); 1687 a = hypre_CSRMatrixData(ha); 1688 ii = hypre_CSRMatrixI(ha); 1689 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1690 } 1691 /* offdiagonal part */ 1692 ha = hypre_ParCSRMatrixOffd(parcsr); 1693 if (ha) { 1694 PetscInt size,i; 1695 HYPRE_Int *ii; 1696 HYPRE_Complex *a; 1697 1698 size = hypre_CSRMatrixNumRows(ha); 1699 a = hypre_CSRMatrixData(ha); 1700 ii = hypre_CSRMatrixI(ha); 1701 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1702 } 1703 PetscFunctionReturn(0); 1704 } 1705 1706 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 1707 { 1708 hypre_ParCSRMatrix *parcsr; 1709 HYPRE_Int *lrows; 1710 PetscInt rst,ren,i; 1711 PetscErrorCode ierr; 1712 1713 PetscFunctionBegin; 1714 if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented"); 1715 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1716 ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr); 1717 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1718 for (i=0;i<numRows;i++) { 1719 if (rows[i] < rst || rows[i] >= ren) 1720 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported"); 1721 lrows[i] = rows[i] - rst; 1722 } 1723 PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows)); 1724 ierr = PetscFree(lrows);CHKERRQ(ierr); 1725 PetscFunctionReturn(0); 1726 } 1727 1728 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha) 1729 { 1730 PetscErrorCode ierr; 1731 1732 PetscFunctionBegin; 1733 if (ha) { 1734 HYPRE_Int *ii, size; 1735 HYPRE_Complex *a; 1736 1737 size = hypre_CSRMatrixNumRows(ha); 1738 a = hypre_CSRMatrixData(ha); 1739 ii = hypre_CSRMatrixI(ha); 1740 1741 if (a) {ierr = PetscArrayzero(a,ii[size]);CHKERRQ(ierr);} 1742 } 1743 PetscFunctionReturn(0); 1744 } 1745 1746 PetscErrorCode MatZeroEntries_HYPRE(Mat A) 1747 { 1748 hypre_ParCSRMatrix *parcsr; 1749 PetscErrorCode ierr; 1750 1751 PetscFunctionBegin; 1752 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1753 /* diagonal part */ 1754 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr); 1755 /* off-diagonal part */ 1756 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr); 1757 PetscFunctionReturn(0); 1758 } 1759 1760 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag) 1761 { 1762 PetscInt ii; 1763 HYPRE_Int *i, *j; 1764 HYPRE_Complex *a; 1765 1766 PetscFunctionBegin; 1767 if (!hA) PetscFunctionReturn(0); 1768 1769 i = hypre_CSRMatrixI(hA); 1770 j = hypre_CSRMatrixJ(hA); 1771 a = hypre_CSRMatrixData(hA); 1772 1773 for (ii = 0; ii < N; ii++) { 1774 HYPRE_Int jj, ibeg, iend, irow; 1775 1776 irow = rows[ii]; 1777 ibeg = i[irow]; 1778 iend = i[irow+1]; 1779 for (jj = ibeg; jj < iend; jj++) 1780 if (j[jj] == irow) a[jj] = diag; 1781 else a[jj] = 0.0; 1782 } 1783 PetscFunctionReturn(0); 1784 } 1785 1786 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1787 { 1788 hypre_ParCSRMatrix *parcsr; 1789 PetscInt *lrows,len; 1790 HYPRE_Complex hdiag; 1791 PetscErrorCode ierr; 1792 1793 PetscFunctionBegin; 1794 if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size"); 1795 ierr = PetscHYPREScalarCast(diag,&hdiag);CHKERRQ(ierr); 1796 /* retrieve the internal matrix */ 1797 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1798 /* get locally owned rows */ 1799 ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr); 1800 /* zero diagonal part */ 1801 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);CHKERRQ(ierr); 1802 /* zero off-diagonal part */ 1803 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr); 1804 1805 ierr = PetscFree(lrows);CHKERRQ(ierr); 1806 PetscFunctionReturn(0); 1807 } 1808 1809 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode) 1810 { 1811 PetscErrorCode ierr; 1812 1813 PetscFunctionBegin; 1814 if (mat->nooffprocentries) PetscFunctionReturn(0); 1815 1816 ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 1817 PetscFunctionReturn(0); 1818 } 1819 1820 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1821 { 1822 hypre_ParCSRMatrix *parcsr; 1823 HYPRE_Int hnz; 1824 PetscErrorCode ierr; 1825 1826 PetscFunctionBegin; 1827 /* retrieve the internal matrix */ 1828 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1829 /* call HYPRE API */ 1830 PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1831 if (nz) *nz = (PetscInt)hnz; 1832 PetscFunctionReturn(0); 1833 } 1834 1835 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1836 { 1837 hypre_ParCSRMatrix *parcsr; 1838 HYPRE_Int hnz; 1839 PetscErrorCode ierr; 1840 1841 PetscFunctionBegin; 1842 /* retrieve the internal matrix */ 1843 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1844 /* call HYPRE API */ 1845 hnz = nz ? (HYPRE_Int)(*nz) : 0; 1846 PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1847 PetscFunctionReturn(0); 1848 } 1849 1850 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 1851 { 1852 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1853 PetscInt i; 1854 1855 PetscFunctionBegin; 1856 if (!m || !n) PetscFunctionReturn(0); 1857 /* Ignore negative row indices 1858 * And negative column indices should be automatically ignored in hypre 1859 * */ 1860 for (i=0; i<m; i++) { 1861 if (idxm[i] >= 0) { 1862 HYPRE_Int hn = (HYPRE_Int)n; 1863 PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n))); 1864 } 1865 } 1866 PetscFunctionReturn(0); 1867 } 1868 1869 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg) 1870 { 1871 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1872 1873 PetscFunctionBegin; 1874 switch (op) { 1875 case MAT_NO_OFF_PROC_ENTRIES: 1876 if (flg) { 1877 PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0)); 1878 } 1879 break; 1880 case MAT_SORTED_FULL: 1881 hA->sorted_full = flg; 1882 break; 1883 default: 1884 break; 1885 } 1886 PetscFunctionReturn(0); 1887 } 1888 1889 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view) 1890 { 1891 hypre_ParCSRMatrix *parcsr; 1892 PetscErrorCode ierr; 1893 Mat B; 1894 PetscViewerFormat format; 1895 PetscErrorCode (*mview)(Mat,PetscViewer) = NULL; 1896 1897 PetscFunctionBegin; 1898 ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr); 1899 if (format != PETSC_VIEWER_NATIVE) { 1900 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1901 ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr); 1902 ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr); 1903 if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");CHKERRQ(ierr); 1904 ierr = (*mview)(B,view);CHKERRQ(ierr); 1905 ierr = MatDestroy(&B);CHKERRQ(ierr); 1906 } else { 1907 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1908 PetscMPIInt size; 1909 PetscBool isascii; 1910 const char *filename; 1911 1912 /* HYPRE uses only text files */ 1913 ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 1914 if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name); 1915 ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr); 1916 PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename)); 1917 ierr = MPI_Comm_size(hA->comm,&size);CHKERRQ(ierr); 1918 if (size > 1) { 1919 ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr); 1920 } else { 1921 ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr); 1922 } 1923 } 1924 PetscFunctionReturn(0); 1925 } 1926 1927 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B) 1928 { 1929 hypre_ParCSRMatrix *parcsr; 1930 PetscErrorCode ierr; 1931 PetscCopyMode cpmode; 1932 1933 PetscFunctionBegin; 1934 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1935 if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) { 1936 parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1937 cpmode = PETSC_OWN_POINTER; 1938 } else { 1939 cpmode = PETSC_COPY_VALUES; 1940 } 1941 ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr); 1942 PetscFunctionReturn(0); 1943 } 1944 1945 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str) 1946 { 1947 hypre_ParCSRMatrix *acsr,*bcsr; 1948 PetscErrorCode ierr; 1949 1950 PetscFunctionBegin; 1951 if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) { 1952 ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr); 1953 ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr); 1954 PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1)); 1955 ierr = MatSetOption(B,MAT_SORTED_FULL,PETSC_TRUE); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */ 1956 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1957 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1958 } else { 1959 ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1960 } 1961 PetscFunctionReturn(0); 1962 } 1963 1964 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d) 1965 { 1966 hypre_ParCSRMatrix *parcsr; 1967 hypre_CSRMatrix *dmat; 1968 HYPRE_Complex *a; 1969 HYPRE_Complex *data = NULL; 1970 HYPRE_Int *diag = NULL; 1971 PetscInt i; 1972 PetscBool cong; 1973 PetscErrorCode ierr; 1974 1975 PetscFunctionBegin; 1976 ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr); 1977 if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns"); 1978 if (PetscDefined(USE_DEBUG)) { 1979 PetscBool miss; 1980 ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr); 1981 if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing"); 1982 } 1983 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1984 dmat = hypre_ParCSRMatrixDiag(parcsr); 1985 if (dmat) { 1986 /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 1987 ierr = VecGetArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 1988 diag = hypre_CSRMatrixI(dmat); 1989 data = hypre_CSRMatrixData(dmat); 1990 for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]]; 1991 ierr = VecRestoreArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 1992 } 1993 PetscFunctionReturn(0); 1994 } 1995 1996 #include <petscblaslapack.h> 1997 1998 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str) 1999 { 2000 PetscErrorCode ierr; 2001 2002 PetscFunctionBegin; 2003 if (str == SAME_NONZERO_PATTERN) { 2004 hypre_ParCSRMatrix *x,*y; 2005 hypre_CSRMatrix *xloc,*yloc; 2006 PetscInt xnnz,ynnz; 2007 HYPRE_Complex *xarr,*yarr; 2008 PetscBLASInt one=1,bnz; 2009 2010 ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr); 2011 ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr); 2012 2013 /* diagonal block */ 2014 xloc = hypre_ParCSRMatrixDiag(x); 2015 yloc = hypre_ParCSRMatrixDiag(y); 2016 xnnz = 0; 2017 ynnz = 0; 2018 xarr = NULL; 2019 yarr = NULL; 2020 if (xloc) { 2021 xarr = hypre_CSRMatrixData(xloc); 2022 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 2023 } 2024 if (yloc) { 2025 yarr = hypre_CSRMatrixData(yloc); 2026 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2027 } 2028 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz); 2029 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 2030 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 2031 2032 /* off-diagonal block */ 2033 xloc = hypre_ParCSRMatrixOffd(x); 2034 yloc = hypre_ParCSRMatrixOffd(y); 2035 xnnz = 0; 2036 ynnz = 0; 2037 xarr = NULL; 2038 yarr = NULL; 2039 if (xloc) { 2040 xarr = hypre_CSRMatrixData(xloc); 2041 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 2042 } 2043 if (yloc) { 2044 yarr = hypre_CSRMatrixData(yloc); 2045 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2046 } 2047 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz); 2048 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 2049 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 2050 } else if (str == SUBSET_NONZERO_PATTERN) { 2051 ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr); 2052 } else { 2053 Mat B; 2054 2055 ierr = MatAXPY_Basic_Preallocate(Y,X,&B);CHKERRQ(ierr); 2056 ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr); 2057 ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr); 2058 } 2059 PetscFunctionReturn(0); 2060 } 2061 2062 /*MC 2063 MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices 2064 based on the hypre IJ interface. 2065 2066 Level: intermediate 2067 2068 .seealso: MatCreate() 2069 M*/ 2070 2071 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 2072 { 2073 Mat_HYPRE *hB; 2074 PetscErrorCode ierr; 2075 2076 PetscFunctionBegin; 2077 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 2078 hB->inner_free = PETSC_TRUE; 2079 hB->available = PETSC_TRUE; 2080 hB->sorted_full= PETSC_FALSE; /* no assumption whether column indices are sorted or not */ 2081 hB->size = 0; 2082 hB->array = NULL; 2083 2084 B->data = (void*)hB; 2085 B->rmap->bs = 1; 2086 B->assembled = PETSC_FALSE; 2087 2088 ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2089 B->ops->mult = MatMult_HYPRE; 2090 B->ops->multtranspose = MatMultTranspose_HYPRE; 2091 B->ops->multadd = MatMultAdd_HYPRE; 2092 B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE; 2093 B->ops->setup = MatSetUp_HYPRE; 2094 B->ops->destroy = MatDestroy_HYPRE; 2095 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 2096 B->ops->assemblybegin = MatAssemblyBegin_HYPRE; 2097 B->ops->setvalues = MatSetValues_HYPRE; 2098 B->ops->missingdiagonal = MatMissingDiagonal_HYPRE; 2099 B->ops->scale = MatScale_HYPRE; 2100 B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE; 2101 B->ops->zeroentries = MatZeroEntries_HYPRE; 2102 B->ops->zerorows = MatZeroRows_HYPRE; 2103 B->ops->getrow = MatGetRow_HYPRE; 2104 B->ops->restorerow = MatRestoreRow_HYPRE; 2105 B->ops->getvalues = MatGetValues_HYPRE; 2106 B->ops->setoption = MatSetOption_HYPRE; 2107 B->ops->duplicate = MatDuplicate_HYPRE; 2108 B->ops->copy = MatCopy_HYPRE; 2109 B->ops->view = MatView_HYPRE; 2110 B->ops->getdiagonal = MatGetDiagonal_HYPRE; 2111 B->ops->axpy = MatAXPY_HYPRE; 2112 B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE; 2113 2114 /* build cache for off array entries formed */ 2115 ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr); 2116 2117 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr); 2118 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 2119 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 2120 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 2121 ierr = PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_seqaij_hypre_C",MatProductSetFromOptions_HYPRE);CHKERRQ(ierr); 2122 ierr = PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_mpiaij_hypre_C",MatProductSetFromOptions_HYPRE);CHKERRQ(ierr); 2123 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr); 2124 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr); 2125 PetscFunctionReturn(0); 2126 } 2127 2128 static PetscErrorCode hypre_array_destroy(void *ptr) 2129 { 2130 PetscFunctionBegin; 2131 hypre_TFree(ptr,HYPRE_MEMORY_HOST); 2132 PetscFunctionReturn(0); 2133 } 2134