1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 #include <petscpkg_version.h> 7 #include <petsc/private/petschypre.h> 8 #include <petscmathypre.h> 9 #include <petsc/private/matimpl.h> 10 #include <../src/mat/impls/hypre/mhypre.h> 11 #include <../src/mat/impls/aij/mpi/mpiaij.h> 12 #include <../src/vec/vec/impls/hypre/vhyp.h> 13 #include <HYPRE.h> 14 #include <HYPRE_utilities.h> 15 #include <_hypre_parcsr_ls.h> 16 #include <_hypre_sstruct_ls.h> 17 18 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0) 19 #define hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A) 20 #endif 21 22 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 23 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 24 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 25 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 26 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool); 27 static PetscErrorCode hypre_array_destroy(void*); 28 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins); 29 30 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 31 { 32 PetscErrorCode ierr; 33 PetscInt i,n_d,n_o; 34 const PetscInt *ia_d,*ia_o; 35 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 36 HYPRE_Int *nnz_d=NULL,*nnz_o=NULL; 37 38 PetscFunctionBegin; 39 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 40 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 41 if (done_d) { 42 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 43 for (i=0; i<n_d; i++) { 44 nnz_d[i] = ia_d[i+1] - ia_d[i]; 45 } 46 } 47 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 48 } 49 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 50 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 51 if (done_o) { 52 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 53 for (i=0; i<n_o; i++) { 54 nnz_o[i] = ia_o[i+1] - ia_o[i]; 55 } 56 } 57 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 58 } 59 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 60 if (!done_o) { /* only diagonal part */ 61 ierr = PetscCalloc1(n_d,&nnz_o);CHKERRQ(ierr); 62 } 63 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 64 { /* If we don't do this, the columns of the matrix will be all zeros! */ 65 hypre_AuxParCSRMatrix *aux_matrix; 66 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 67 hypre_AuxParCSRMatrixDestroy(aux_matrix); 68 hypre_IJMatrixTranslator(ij) = NULL; 69 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 70 /* it seems they partially fixed it in 2.19.0 */ 71 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0) 72 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 73 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 74 #endif 75 } 76 #else 77 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 78 #endif 79 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 80 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 81 } 82 PetscFunctionReturn(0); 83 } 84 85 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 86 { 87 PetscErrorCode ierr; 88 PetscInt rstart,rend,cstart,cend; 89 90 PetscFunctionBegin; 91 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 92 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 93 rstart = A->rmap->rstart; 94 rend = A->rmap->rend; 95 cstart = A->cmap->rstart; 96 cend = A->cmap->rend; 97 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 98 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 99 { 100 PetscBool same; 101 Mat A_d,A_o; 102 const PetscInt *colmap; 103 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 104 if (same) { 105 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 106 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 107 PetscFunctionReturn(0); 108 } 109 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 110 if (same) { 111 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 112 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 113 PetscFunctionReturn(0); 114 } 115 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 116 if (same) { 117 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 118 PetscFunctionReturn(0); 119 } 120 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 121 if (same) { 122 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 } 126 PetscFunctionReturn(0); 127 } 128 129 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 130 { 131 PetscErrorCode ierr; 132 PetscInt i,rstart,rend,ncols,nr,nc; 133 const PetscScalar *values; 134 const PetscInt *cols; 135 PetscBool flg; 136 137 PetscFunctionBegin; 138 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 139 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 140 if (flg && nr == nc) { 141 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 142 PetscFunctionReturn(0); 143 } 144 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 145 if (flg) { 146 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 147 PetscFunctionReturn(0); 148 } 149 150 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 151 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 152 for (i=rstart; i<rend; i++) { 153 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 154 if (ncols) { 155 HYPRE_Int nc = (HYPRE_Int)ncols; 156 157 if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i); 158 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values)); 159 } 160 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 161 } 162 PetscFunctionReturn(0); 163 } 164 165 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 166 { 167 PetscErrorCode ierr; 168 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 169 HYPRE_Int type; 170 hypre_ParCSRMatrix *par_matrix; 171 hypre_AuxParCSRMatrix *aux_matrix; 172 hypre_CSRMatrix *hdiag; 173 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 174 175 PetscFunctionBegin; 176 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 177 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 178 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 179 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 180 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 181 /* 182 this is the Hack part where we monkey directly with the hypre datastructures 183 */ 184 if (sameint) { 185 ierr = PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);CHKERRQ(ierr); 186 ierr = PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);CHKERRQ(ierr); 187 } else { 188 PetscInt i; 189 190 for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i]; 191 for (i=0;i<pdiag->nz;i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i]; 192 } 193 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 194 195 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 196 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 197 PetscFunctionReturn(0); 198 } 199 200 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 201 { 202 PetscErrorCode ierr; 203 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 204 Mat_SeqAIJ *pdiag,*poffd; 205 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 206 HYPRE_Int *hjj,type; 207 hypre_ParCSRMatrix *par_matrix; 208 hypre_AuxParCSRMatrix *aux_matrix; 209 hypre_CSRMatrix *hdiag,*hoffd; 210 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 211 212 PetscFunctionBegin; 213 pdiag = (Mat_SeqAIJ*) pA->A->data; 214 poffd = (Mat_SeqAIJ*) pA->B->data; 215 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 216 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 217 218 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 219 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 220 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 221 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 222 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 223 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 224 225 /* 226 this is the Hack part where we monkey directly with the hypre datastructures 227 */ 228 if (sameint) { 229 ierr = PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 230 } else { 231 for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]); 232 } 233 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 234 hjj = hdiag->j; 235 pjj = pdiag->j; 236 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 237 for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i]; 238 #else 239 for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i]; 240 #endif 241 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 242 if (sameint) { 243 ierr = PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 244 } else { 245 for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]); 246 } 247 248 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 249 If we hacked a hypre a bit more we might be able to avoid this step */ 250 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 251 PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd)); 252 jj = (PetscInt*) hoffd->big_j; 253 #else 254 jj = (PetscInt*) hoffd->j; 255 #endif 256 pjj = poffd->j; 257 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 258 259 ierr = PetscArraycpy(hoffd->data,poffd->a,poffd->nz);CHKERRQ(ierr); 260 261 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 262 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 263 PetscFunctionReturn(0); 264 } 265 266 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 267 { 268 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 269 Mat lA; 270 ISLocalToGlobalMapping rl2g,cl2g; 271 IS is; 272 hypre_ParCSRMatrix *hA; 273 hypre_CSRMatrix *hdiag,*hoffd; 274 MPI_Comm comm; 275 HYPRE_Complex *hdd,*hod,*aa; 276 PetscScalar *data; 277 HYPRE_BigInt *col_map_offd; 278 HYPRE_Int *hdi,*hdj,*hoi,*hoj; 279 PetscInt *ii,*jj,*iptr,*jptr; 280 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 281 HYPRE_Int type; 282 PetscErrorCode ierr; 283 284 PetscFunctionBegin; 285 comm = PetscObjectComm((PetscObject)A); 286 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 287 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 288 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 289 M = hypre_ParCSRMatrixGlobalNumRows(hA); 290 N = hypre_ParCSRMatrixGlobalNumCols(hA); 291 str = hypre_ParCSRMatrixFirstRowIndex(hA); 292 stc = hypre_ParCSRMatrixFirstColDiag(hA); 293 hdiag = hypre_ParCSRMatrixDiag(hA); 294 hoffd = hypre_ParCSRMatrixOffd(hA); 295 dr = hypre_CSRMatrixNumRows(hdiag); 296 dc = hypre_CSRMatrixNumCols(hdiag); 297 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 298 hdi = hypre_CSRMatrixI(hdiag); 299 hdj = hypre_CSRMatrixJ(hdiag); 300 hdd = hypre_CSRMatrixData(hdiag); 301 oc = hypre_CSRMatrixNumCols(hoffd); 302 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 303 hoi = hypre_CSRMatrixI(hoffd); 304 hoj = hypre_CSRMatrixJ(hoffd); 305 hod = hypre_CSRMatrixData(hoffd); 306 if (reuse != MAT_REUSE_MATRIX) { 307 PetscInt *aux; 308 309 /* generate l2g maps for rows and cols */ 310 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 311 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 312 ierr = ISDestroy(&is);CHKERRQ(ierr); 313 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 314 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 315 for (i=0; i<dc; i++) aux[i] = i+stc; 316 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 317 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 318 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 319 ierr = ISDestroy(&is);CHKERRQ(ierr); 320 /* create MATIS object */ 321 ierr = MatCreate(comm,B);CHKERRQ(ierr); 322 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 323 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 324 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 325 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 326 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 327 328 /* allocate CSR for local matrix */ 329 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 330 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 331 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 332 } else { 333 PetscInt nr; 334 PetscBool done; 335 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 336 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 337 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 338 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 339 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 340 } 341 /* merge local matrices */ 342 ii = iptr; 343 jj = jptr; 344 aa = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 345 *ii = *(hdi++) + *(hoi++); 346 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 347 PetscScalar *aold = (PetscScalar*)aa; 348 PetscInt *jold = jj,nc = jd+jo; 349 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 350 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 351 *(++ii) = *(hdi++) + *(hoi++); 352 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 353 } 354 for (; cum<dr; cum++) *(++ii) = nnz; 355 if (reuse != MAT_REUSE_MATRIX) { 356 Mat_SeqAIJ* a; 357 358 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 359 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 360 /* hack SeqAIJ */ 361 a = (Mat_SeqAIJ*)(lA->data); 362 a->free_a = PETSC_TRUE; 363 a->free_ij = PETSC_TRUE; 364 ierr = MatDestroy(&lA);CHKERRQ(ierr); 365 } 366 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 367 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 368 if (reuse == MAT_INPLACE_MATRIX) { 369 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 370 } 371 PetscFunctionReturn(0); 372 } 373 374 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 375 { 376 Mat M = NULL; 377 Mat_HYPRE *hB; 378 MPI_Comm comm = PetscObjectComm((PetscObject)A); 379 PetscErrorCode ierr; 380 381 PetscFunctionBegin; 382 if (reuse == MAT_REUSE_MATRIX) { 383 /* always destroy the old matrix and create a new memory; 384 hope this does not churn the memory too much. The problem 385 is I do not know if it is possible to put the matrix back to 386 its initial state so that we can directly copy the values 387 the second time through. */ 388 hB = (Mat_HYPRE*)((*B)->data); 389 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 390 } else { 391 ierr = MatCreate(comm,&M);CHKERRQ(ierr); 392 ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr); 393 ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 394 hB = (Mat_HYPRE*)(M->data); 395 if (reuse == MAT_INITIAL_MATRIX) *B = M; 396 } 397 ierr = MatSetOption(*B,MAT_SORTED_FULL,PETSC_TRUE);CHKERRQ(ierr); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */ 398 ierr = MatSetOption(*B,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE);CHKERRQ(ierr); 399 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 400 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 401 if (reuse == MAT_INPLACE_MATRIX) { 402 ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr); 403 } 404 (*B)->preallocated = PETSC_TRUE; 405 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 406 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 407 PetscFunctionReturn(0); 408 } 409 410 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 411 { 412 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 413 hypre_ParCSRMatrix *parcsr; 414 hypre_CSRMatrix *hdiag,*hoffd; 415 MPI_Comm comm; 416 PetscScalar *da,*oa,*aptr; 417 PetscInt *dii,*djj,*oii,*ojj,*iptr; 418 PetscInt i,dnnz,onnz,m,n; 419 HYPRE_Int type; 420 PetscMPIInt size; 421 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 422 PetscErrorCode ierr; 423 424 PetscFunctionBegin; 425 comm = PetscObjectComm((PetscObject)A); 426 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 427 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 428 if (reuse == MAT_REUSE_MATRIX) { 429 PetscBool ismpiaij,isseqaij; 430 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 431 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 432 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 433 } 434 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 435 436 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 437 hdiag = hypre_ParCSRMatrixDiag(parcsr); 438 hoffd = hypre_ParCSRMatrixOffd(parcsr); 439 m = hypre_CSRMatrixNumRows(hdiag); 440 n = hypre_CSRMatrixNumCols(hdiag); 441 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 442 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 443 if (reuse == MAT_INITIAL_MATRIX) { 444 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 445 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 446 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 447 } else if (reuse == MAT_REUSE_MATRIX) { 448 PetscInt nr; 449 PetscBool done; 450 if (size > 1) { 451 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 452 453 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 454 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 455 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 456 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 457 } else { 458 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 459 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 460 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 461 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 462 } 463 } else { /* MAT_INPLACE_MATRIX */ 464 if (!sameint) { 465 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 466 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 467 } else { 468 dii = (PetscInt*)hypre_CSRMatrixI(hdiag); 469 djj = (PetscInt*)hypre_CSRMatrixJ(hdiag); 470 } 471 da = (PetscScalar*)hypre_CSRMatrixData(hdiag); 472 } 473 474 if (!sameint) { 475 for (i=0;i<m+1;i++) dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]); 476 for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]); 477 } else { 478 ierr = PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1);CHKERRQ(ierr); 479 ierr = PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);CHKERRQ(ierr); 480 } 481 ierr = PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);CHKERRQ(ierr); 482 iptr = djj; 483 aptr = da; 484 for (i=0; i<m; i++) { 485 PetscInt nc = dii[i+1]-dii[i]; 486 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 487 iptr += nc; 488 aptr += nc; 489 } 490 if (size > 1) { 491 HYPRE_BigInt *coffd; 492 HYPRE_Int *offdj; 493 494 if (reuse == MAT_INITIAL_MATRIX) { 495 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 496 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 497 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 498 } else if (reuse == MAT_REUSE_MATRIX) { 499 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 500 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 501 PetscBool done; 502 503 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 504 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 505 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 506 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 507 } else { /* MAT_INPLACE_MATRIX */ 508 if (!sameint) { 509 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 510 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 511 } else { 512 oii = (PetscInt*)hypre_CSRMatrixI(hoffd); 513 ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd); 514 } 515 oa = (PetscScalar*)hypre_CSRMatrixData(hoffd); 516 } 517 if (!sameint) { 518 for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]); 519 } else { 520 ierr = PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);CHKERRQ(ierr); 521 } 522 offdj = hypre_CSRMatrixJ(hoffd); 523 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 524 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 525 ierr = PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);CHKERRQ(ierr); 526 iptr = ojj; 527 aptr = oa; 528 for (i=0; i<m; i++) { 529 PetscInt nc = oii[i+1]-oii[i]; 530 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 531 iptr += nc; 532 aptr += nc; 533 } 534 if (reuse == MAT_INITIAL_MATRIX) { 535 Mat_MPIAIJ *b; 536 Mat_SeqAIJ *d,*o; 537 538 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 539 /* hack MPIAIJ */ 540 b = (Mat_MPIAIJ*)((*B)->data); 541 d = (Mat_SeqAIJ*)b->A->data; 542 o = (Mat_SeqAIJ*)b->B->data; 543 d->free_a = PETSC_TRUE; 544 d->free_ij = PETSC_TRUE; 545 o->free_a = PETSC_TRUE; 546 o->free_ij = PETSC_TRUE; 547 } else if (reuse == MAT_INPLACE_MATRIX) { 548 Mat T; 549 550 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr); 551 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 552 hypre_CSRMatrixI(hdiag) = NULL; 553 hypre_CSRMatrixJ(hdiag) = NULL; 554 hypre_CSRMatrixI(hoffd) = NULL; 555 hypre_CSRMatrixJ(hoffd) = NULL; 556 } else { /* Hack MPIAIJ -> free ij but not a */ 557 Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data); 558 Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data); 559 Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data); 560 561 d->free_ij = PETSC_TRUE; 562 o->free_ij = PETSC_TRUE; 563 } 564 hypre_CSRMatrixData(hdiag) = NULL; 565 hypre_CSRMatrixData(hoffd) = NULL; 566 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 567 } 568 } else { 569 oii = NULL; 570 ojj = NULL; 571 oa = NULL; 572 if (reuse == MAT_INITIAL_MATRIX) { 573 Mat_SeqAIJ* b; 574 575 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 576 /* hack SeqAIJ */ 577 b = (Mat_SeqAIJ*)((*B)->data); 578 b->free_a = PETSC_TRUE; 579 b->free_ij = PETSC_TRUE; 580 } else if (reuse == MAT_INPLACE_MATRIX) { 581 Mat T; 582 583 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr); 584 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 585 hypre_CSRMatrixI(hdiag) = NULL; 586 hypre_CSRMatrixJ(hdiag) = NULL; 587 } else { /* free ij but not a */ 588 Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data); 589 590 b->free_ij = PETSC_TRUE; 591 } 592 hypre_CSRMatrixData(hdiag) = NULL; 593 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 594 } 595 } 596 597 /* we have to use hypre_Tfree to free the HYPRE arrays 598 that PETSc now onws */ 599 if (reuse == MAT_INPLACE_MATRIX) { 600 PetscInt nh; 601 void *ptrs[6] = {da,oa,dii,djj,oii,ojj}; 602 const char *names[6] = {"_hypre_csr_da", 603 "_hypre_csr_oa", 604 "_hypre_csr_dii", 605 "_hypre_csr_djj", 606 "_hypre_csr_oii", 607 "_hypre_csr_ojj"}; 608 nh = sameint ? 6 : 2; 609 for (i=0; i<nh; i++) { 610 PetscContainer c; 611 612 ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr); 613 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 614 ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr); 615 ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr); 616 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 617 } 618 } 619 PetscFunctionReturn(0); 620 } 621 622 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 623 { 624 hypre_ParCSRMatrix *tA; 625 hypre_CSRMatrix *hdiag,*hoffd; 626 Mat_SeqAIJ *diag,*offd; 627 PetscInt *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts; 628 MPI_Comm comm = PetscObjectComm((PetscObject)A); 629 PetscBool ismpiaij,isseqaij; 630 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 631 PetscErrorCode ierr; 632 633 PetscFunctionBegin; 634 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 635 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 636 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name); 637 if (ismpiaij) { 638 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 639 640 diag = (Mat_SeqAIJ*)a->A->data; 641 offd = (Mat_SeqAIJ*)a->B->data; 642 garray = a->garray; 643 noffd = a->B->cmap->N; 644 dnnz = diag->nz; 645 onnz = offd->nz; 646 } else { 647 diag = (Mat_SeqAIJ*)A->data; 648 offd = NULL; 649 garray = NULL; 650 noffd = 0; 651 dnnz = diag->nz; 652 onnz = 0; 653 } 654 655 /* create a temporary ParCSR */ 656 if (HYPRE_AssumedPartitionCheck()) { 657 PetscMPIInt myid; 658 659 ierr = MPI_Comm_rank(comm,&myid);CHKERRMPI(ierr); 660 row_starts = A->rmap->range + myid; 661 col_starts = A->cmap->range + myid; 662 } else { 663 row_starts = A->rmap->range; 664 col_starts = A->cmap->range; 665 } 666 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz); 667 #if defined(hypre_ParCSRMatrixOwnsRowStarts) 668 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 669 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 670 #endif 671 672 /* set diagonal part */ 673 hdiag = hypre_ParCSRMatrixDiag(tA); 674 if (sameint) { /* reuse CSR pointers */ 675 hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i; 676 hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j; 677 } else { /* malloc CSR pointers */ 678 HYPRE_Int *hi,*hj; 679 680 ierr = PetscMalloc2(A->rmap->n+1,&hi,dnnz,&hj);CHKERRQ(ierr); 681 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(diag->i[i]); 682 for (i = 0; i < dnnz; i++) hj[i] = (HYPRE_Int)(diag->j[i]); 683 hypre_CSRMatrixI(hdiag) = hi; 684 hypre_CSRMatrixJ(hdiag) = hj; 685 } 686 hypre_CSRMatrixData(hdiag) = (HYPRE_Complex*)diag->a; 687 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 688 hypre_CSRMatrixSetRownnz(hdiag); 689 hypre_CSRMatrixSetDataOwner(hdiag,0); 690 691 /* set offdiagonal part */ 692 hoffd = hypre_ParCSRMatrixOffd(tA); 693 if (offd) { 694 if (sameint) { /* reuse CSR pointers */ 695 hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i; 696 hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j; 697 } else { /* malloc CSR pointers */ 698 HYPRE_Int *hi,*hj; 699 700 ierr = PetscMalloc2(A->rmap->n+1,&hi,onnz,&hj);CHKERRQ(ierr); 701 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(offd->i[i]); 702 for (i = 0; i < onnz; i++) hj[i] = (HYPRE_Int)(offd->j[i]); 703 hypre_CSRMatrixI(hoffd) = hi; 704 hypre_CSRMatrixJ(hoffd) = hj; 705 } 706 hypre_CSRMatrixData(hoffd) = (HYPRE_Complex*)offd->a; 707 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 708 hypre_CSRMatrixSetRownnz(hoffd); 709 hypre_CSRMatrixSetDataOwner(hoffd,0); 710 hypre_ParCSRMatrixSetNumNonzeros(tA); 711 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray; 712 } 713 *hA = tA; 714 PetscFunctionReturn(0); 715 } 716 717 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 718 { 719 hypre_CSRMatrix *hdiag,*hoffd; 720 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 721 PetscErrorCode ierr; 722 723 PetscFunctionBegin; 724 hdiag = hypre_ParCSRMatrixDiag(*hA); 725 hoffd = hypre_ParCSRMatrixOffd(*hA); 726 /* free temporary memory allocated by PETSc */ 727 if (!sameint) { 728 HYPRE_Int *hi,*hj; 729 730 hi = hypre_CSRMatrixI(hdiag); 731 hj = hypre_CSRMatrixJ(hdiag); 732 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 733 if (hoffd) { 734 hi = hypre_CSRMatrixI(hoffd); 735 hj = hypre_CSRMatrixJ(hoffd); 736 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 737 } 738 } 739 /* set pointers to NULL before destroying tA */ 740 hypre_CSRMatrixI(hdiag) = NULL; 741 hypre_CSRMatrixJ(hdiag) = NULL; 742 hypre_CSRMatrixData(hdiag) = NULL; 743 hypre_CSRMatrixI(hoffd) = NULL; 744 hypre_CSRMatrixJ(hoffd) = NULL; 745 hypre_CSRMatrixData(hoffd) = NULL; 746 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 747 hypre_ParCSRMatrixDestroy(*hA); 748 *hA = NULL; 749 PetscFunctionReturn(0); 750 } 751 752 /* calls RAP from BoomerAMG: 753 the resulting ParCSR will not own the column and row starts 754 It looks like we don't need to have the diagonal entries 755 ordered first in the rows of the diagonal part 756 for boomerAMGBuildCoarseOperator to work */ 757 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 758 { 759 #if defined(hypre_ParCSRMatrixOwnsRowStarts) 760 HYPRE_Int P_owns_col_starts,R_owns_row_starts; 761 #endif 762 763 PetscFunctionBegin; 764 #if defined(hypre_ParCSRMatrixOwnsRowStarts) 765 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 766 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 767 #endif 768 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP)); 769 PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP)); 770 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 771 #if defined(hypre_ParCSRMatrixOwnsRowStarts) 772 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0); 773 hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0); 774 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1); 775 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1); 776 #endif 777 PetscFunctionReturn(0); 778 } 779 780 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C) 781 { 782 Mat B; 783 hypre_ParCSRMatrix *hA,*hP,*hPtAP; 784 PetscErrorCode ierr; 785 Mat_Product *product=C->product; 786 787 PetscFunctionBegin; 788 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 789 ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr); 790 ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr); 791 ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 792 793 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 794 C->product = product; 795 796 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 797 ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr); 798 PetscFunctionReturn(0); 799 } 800 801 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat C) 802 { 803 PetscErrorCode ierr; 804 805 PetscFunctionBegin; 806 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 807 C->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 808 C->ops->productnumeric = MatProductNumeric_PtAP; 809 PetscFunctionReturn(0); 810 } 811 812 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C) 813 { 814 Mat B; 815 Mat_HYPRE *hP; 816 hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr; 817 HYPRE_Int type; 818 MPI_Comm comm = PetscObjectComm((PetscObject)A); 819 PetscBool ishypre; 820 PetscErrorCode ierr; 821 822 PetscFunctionBegin; 823 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 824 if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 825 hP = (Mat_HYPRE*)P->data; 826 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 827 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 828 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 829 830 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 831 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 832 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 833 834 /* create temporary matrix and merge to C */ 835 ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 836 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 837 PetscFunctionReturn(0); 838 } 839 840 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C) 841 { 842 Mat B; 843 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 844 Mat_HYPRE *hA,*hP; 845 PetscBool ishypre; 846 HYPRE_Int type; 847 PetscErrorCode ierr; 848 849 PetscFunctionBegin; 850 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 851 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 852 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 853 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 854 hA = (Mat_HYPRE*)A->data; 855 hP = (Mat_HYPRE*)P->data; 856 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 857 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 858 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 859 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 860 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 861 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 862 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 863 ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 864 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 865 PetscFunctionReturn(0); 866 } 867 868 /* calls hypre_ParMatmul 869 hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA 870 hypre_ParMatrixCreate does not duplicate the communicator 871 It looks like we don't need to have the diagonal entries 872 ordered first in the rows of the diagonal part 873 for boomerAMGBuildCoarseOperator to work */ 874 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB) 875 { 876 PetscFunctionBegin; 877 PetscStackPush("hypre_ParMatmul"); 878 *hAB = hypre_ParMatmul(hA,hB); 879 PetscStackPop; 880 PetscFunctionReturn(0); 881 } 882 883 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C) 884 { 885 Mat D; 886 hypre_ParCSRMatrix *hA,*hB,*hAB = NULL; 887 PetscErrorCode ierr; 888 Mat_Product *product=C->product; 889 890 PetscFunctionBegin; 891 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 892 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 893 ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr); 894 ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 895 896 ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr); 897 C->product = product; 898 899 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 900 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 901 PetscFunctionReturn(0); 902 } 903 904 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat C) 905 { 906 PetscErrorCode ierr; 907 908 PetscFunctionBegin; 909 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 910 C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 911 C->ops->productnumeric = MatProductNumeric_AB; 912 PetscFunctionReturn(0); 913 } 914 915 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C) 916 { 917 Mat D; 918 hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL; 919 Mat_HYPRE *hA,*hB; 920 PetscBool ishypre; 921 HYPRE_Int type; 922 PetscErrorCode ierr; 923 Mat_Product *product; 924 925 PetscFunctionBegin; 926 ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr); 927 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE); 928 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 929 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 930 hA = (Mat_HYPRE*)A->data; 931 hB = (Mat_HYPRE*)B->data; 932 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 933 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 934 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type)); 935 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 936 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 937 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr)); 938 ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr); 939 ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 940 941 /* need to use HeaderReplace because HeaderMerge messes up with the communicator */ 942 product = C->product; /* save it from MatHeaderReplace() */ 943 C->product = NULL; 944 ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr); 945 C->product = product; 946 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 947 C->ops->productnumeric = MatProductNumeric_AB; 948 PetscFunctionReturn(0); 949 } 950 951 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D) 952 { 953 Mat E; 954 hypre_ParCSRMatrix *hA,*hB,*hC,*hABC; 955 PetscErrorCode ierr; 956 957 PetscFunctionBegin; 958 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 959 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 960 ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr); 961 ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr); 962 ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr); 963 ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr); 964 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 965 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 966 ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr); 967 PetscFunctionReturn(0); 968 } 969 970 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat D) 971 { 972 PetscErrorCode ierr; 973 974 PetscFunctionBegin; 975 ierr = MatSetType(D,MATAIJ);CHKERRQ(ierr); 976 PetscFunctionReturn(0); 977 } 978 979 /* ---------------------------------------------------- */ 980 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C) 981 { 982 PetscFunctionBegin; 983 C->ops->productnumeric = MatProductNumeric_AB; 984 PetscFunctionReturn(0); 985 } 986 987 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C) 988 { 989 PetscErrorCode ierr; 990 Mat_Product *product = C->product; 991 PetscBool Ahypre; 992 993 PetscFunctionBegin; 994 ierr = PetscObjectTypeCompare((PetscObject)product->A,MATHYPRE,&Ahypre);CHKERRQ(ierr); 995 if (Ahypre) { /* A is a Hypre matrix */ 996 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 997 C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE; 998 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 999 PetscFunctionReturn(0); 1000 } 1001 PetscFunctionReturn(0); 1002 } 1003 1004 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C) 1005 { 1006 PetscFunctionBegin; 1007 C->ops->productnumeric = MatProductNumeric_PtAP; 1008 PetscFunctionReturn(0); 1009 } 1010 1011 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C) 1012 { 1013 PetscErrorCode ierr; 1014 Mat_Product *product = C->product; 1015 PetscBool flg; 1016 PetscInt type = 0; 1017 const char *outTypes[4] = {"aij","seqaij","mpiaij","hypre"}; 1018 PetscInt ntype = 4; 1019 Mat A = product->A; 1020 PetscBool Ahypre; 1021 1022 PetscFunctionBegin; 1023 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&Ahypre);CHKERRQ(ierr); 1024 if (Ahypre) { /* A is a Hypre matrix */ 1025 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 1026 C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE; 1027 C->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 1028 PetscFunctionReturn(0); 1029 } 1030 1031 /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */ 1032 /* Get runtime option */ 1033 if (product->api_user) { 1034 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatPtAP_HYPRE","Mat");CHKERRQ(ierr); 1035 ierr = PetscOptionsEList("-matptap_hypre_outtype","MatPtAP outtype","MatPtAP outtype",outTypes,ntype,outTypes[type],&type,&flg);CHKERRQ(ierr); 1036 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1037 } else { 1038 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatProduct_PtAP_HYPRE","Mat");CHKERRQ(ierr); 1039 ierr = PetscOptionsEList("-matproduct_ptap_hypre_outtype","MatProduct_PtAP outtype","MatProduct_PtAP",outTypes,ntype,outTypes[type],&type,&flg);CHKERRQ(ierr); 1040 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1041 } 1042 1043 if (type == 0 || type == 1 || type == 2) { 1044 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 1045 } else if (type == 3) { 1046 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 1047 } else SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"MatPtAP outtype is not supported"); 1048 C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE; 1049 C->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 1050 PetscFunctionReturn(0); 1051 } 1052 1053 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C) 1054 { 1055 PetscErrorCode ierr; 1056 Mat_Product *product = C->product; 1057 1058 PetscFunctionBegin; 1059 switch (product->type) { 1060 case MATPRODUCT_AB: 1061 ierr = MatProductSetFromOptions_HYPRE_AB(C);CHKERRQ(ierr); 1062 break; 1063 case MATPRODUCT_PtAP: 1064 ierr = MatProductSetFromOptions_HYPRE_PtAP(C);CHKERRQ(ierr); 1065 break; 1066 default: 1067 break; 1068 } 1069 PetscFunctionReturn(0); 1070 } 1071 1072 /* -------------------------------------------------------- */ 1073 1074 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 1075 { 1076 PetscErrorCode ierr; 1077 1078 PetscFunctionBegin; 1079 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);CHKERRQ(ierr); 1080 PetscFunctionReturn(0); 1081 } 1082 1083 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 1084 { 1085 PetscErrorCode ierr; 1086 1087 PetscFunctionBegin; 1088 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);CHKERRQ(ierr); 1089 PetscFunctionReturn(0); 1090 } 1091 1092 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1093 { 1094 PetscErrorCode ierr; 1095 1096 PetscFunctionBegin; 1097 if (y != z) { 1098 ierr = VecCopy(y,z);CHKERRQ(ierr); 1099 } 1100 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);CHKERRQ(ierr); 1101 PetscFunctionReturn(0); 1102 } 1103 1104 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1105 { 1106 PetscErrorCode ierr; 1107 1108 PetscFunctionBegin; 1109 if (y != z) { 1110 ierr = VecCopy(y,z);CHKERRQ(ierr); 1111 } 1112 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);CHKERRQ(ierr); 1113 PetscFunctionReturn(0); 1114 } 1115 1116 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */ 1117 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans) 1118 { 1119 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1120 hypre_ParCSRMatrix *parcsr; 1121 hypre_ParVector *hx,*hy; 1122 HYPRE_Complex *ax,*ay,*sax,*say; 1123 PetscErrorCode ierr; 1124 1125 PetscFunctionBegin; 1126 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 1127 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 1128 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 1129 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1130 ierr = VecGetArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1131 if (trans) { 1132 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 1133 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 1134 hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx); 1135 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 1136 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 1137 } else { 1138 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 1139 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 1140 hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy); 1141 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 1142 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 1143 } 1144 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1145 ierr = VecRestoreArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1146 PetscFunctionReturn(0); 1147 } 1148 1149 static PetscErrorCode MatDestroy_HYPRE(Mat A) 1150 { 1151 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1152 PetscErrorCode ierr; 1153 1154 PetscFunctionBegin; 1155 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 1156 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 1157 if (hA->ij) { 1158 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 1159 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 1160 } 1161 if (hA->comm) {ierr = MPI_Comm_free(&hA->comm);CHKERRMPI(ierr);} 1162 1163 ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr); 1164 1165 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1166 1167 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 1168 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 1169 ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_seqaij_hypre_C",NULL);CHKERRQ(ierr); 1170 ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 1171 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr); 1172 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr); 1173 ierr = PetscFree(A->data);CHKERRQ(ierr); 1174 PetscFunctionReturn(0); 1175 } 1176 1177 static PetscErrorCode MatSetUp_HYPRE(Mat A) 1178 { 1179 PetscErrorCode ierr; 1180 1181 PetscFunctionBegin; 1182 ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr); 1183 PetscFunctionReturn(0); 1184 } 1185 1186 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 1187 { 1188 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1189 Vec x,b; 1190 PetscMPIInt n; 1191 PetscInt i,j,rstart,ncols,flg; 1192 PetscInt *row,*col; 1193 PetscScalar *val; 1194 PetscErrorCode ierr; 1195 1196 PetscFunctionBegin; 1197 if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 1198 1199 if (!A->nooffprocentries) { 1200 while (1) { 1201 ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 1202 if (!flg) break; 1203 1204 for (i=0; i<n;) { 1205 /* Now identify the consecutive vals belonging to the same row */ 1206 for (j=i,rstart=row[j]; j<n; j++) { 1207 if (row[j] != rstart) break; 1208 } 1209 if (j < n) ncols = j-i; 1210 else ncols = n-i; 1211 /* Now assemble all these values with a single function call */ 1212 ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr); 1213 1214 i = j; 1215 } 1216 } 1217 ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr); 1218 } 1219 1220 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 1221 /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */ 1222 /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */ 1223 if (!hA->sorted_full) { 1224 hypre_AuxParCSRMatrix *aux_matrix; 1225 1226 /* call destroy just to make sure we do not leak anything */ 1227 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1228 PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix)); 1229 hypre_IJMatrixTranslator(hA->ij) = NULL; 1230 1231 /* Initialize with assembled flag -> it only recreates the aux_par_matrix */ 1232 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1233 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1234 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */ 1235 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0) 1236 PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix)); 1237 #else 1238 PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize_v2,(aux_matrix,HYPRE_MEMORY_HOST)); 1239 #endif 1240 } 1241 if (hA->x) PetscFunctionReturn(0); 1242 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1243 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1244 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 1245 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 1246 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 1247 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 1248 ierr = VecDestroy(&x);CHKERRQ(ierr); 1249 ierr = VecDestroy(&b);CHKERRQ(ierr); 1250 PetscFunctionReturn(0); 1251 } 1252 1253 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array) 1254 { 1255 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1256 PetscErrorCode ierr; 1257 1258 PetscFunctionBegin; 1259 if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use"); 1260 1261 if (hA->size >= size) { 1262 *array = hA->array; 1263 } else { 1264 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1265 hA->size = size; 1266 ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr); 1267 *array = hA->array; 1268 } 1269 1270 hA->available = PETSC_FALSE; 1271 PetscFunctionReturn(0); 1272 } 1273 1274 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array) 1275 { 1276 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1277 1278 PetscFunctionBegin; 1279 *array = NULL; 1280 hA->available = PETSC_TRUE; 1281 PetscFunctionReturn(0); 1282 } 1283 1284 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 1285 { 1286 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1287 PetscScalar *vals = (PetscScalar *)v; 1288 HYPRE_Complex *sscr; 1289 PetscInt *cscr[2]; 1290 PetscInt i,nzc; 1291 void *array = NULL; 1292 PetscErrorCode ierr; 1293 1294 PetscFunctionBegin; 1295 ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);CHKERRQ(ierr); 1296 cscr[0] = (PetscInt*)array; 1297 cscr[1] = ((PetscInt*)array)+nc; 1298 sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2); 1299 for (i=0,nzc=0;i<nc;i++) { 1300 if (cols[i] >= 0) { 1301 cscr[0][nzc ] = cols[i]; 1302 cscr[1][nzc++] = i; 1303 } 1304 } 1305 if (!nzc) { 1306 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1307 PetscFunctionReturn(0); 1308 } 1309 1310 if (ins == ADD_VALUES) { 1311 for (i=0;i<nr;i++) { 1312 if (rows[i] >= 0 && nzc) { 1313 PetscInt j; 1314 HYPRE_Int hnc = (HYPRE_Int)nzc; 1315 1316 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1317 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]);CHKERRQ(ierr); } 1318 PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1319 } 1320 vals += nc; 1321 } 1322 } else { /* INSERT_VALUES */ 1323 PetscInt rst,ren; 1324 1325 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1326 for (i=0;i<nr;i++) { 1327 if (rows[i] >= 0 && nzc) { 1328 PetscInt j; 1329 HYPRE_Int hnc = (HYPRE_Int)nzc; 1330 1331 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1332 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]);CHKERRQ(ierr); } 1333 /* nonlocal values */ 1334 if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE);CHKERRQ(ierr); } 1335 /* local values */ 1336 else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1337 } 1338 vals += nc; 1339 } 1340 } 1341 1342 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1343 PetscFunctionReturn(0); 1344 } 1345 1346 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1347 { 1348 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1349 HYPRE_Int *hdnnz,*honnz; 1350 PetscInt i,rs,re,cs,ce,bs; 1351 PetscMPIInt size; 1352 PetscErrorCode ierr; 1353 1354 PetscFunctionBegin; 1355 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1356 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1357 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1358 rs = A->rmap->rstart; 1359 re = A->rmap->rend; 1360 cs = A->cmap->rstart; 1361 ce = A->cmap->rend; 1362 if (!hA->ij) { 1363 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij)); 1364 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1365 } else { 1366 HYPRE_BigInt hrs,hre,hcs,hce; 1367 PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce)); 1368 if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re); 1369 if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce); 1370 } 1371 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs; 1372 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs; 1373 1374 if (!dnnz) { 1375 ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr); 1376 for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz; 1377 } else { 1378 hdnnz = (HYPRE_Int*)dnnz; 1379 } 1380 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRMPI(ierr); 1381 if (size > 1) { 1382 hypre_AuxParCSRMatrix *aux_matrix; 1383 if (!onnz) { 1384 ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr); 1385 for (i=0;i<A->rmap->n;i++) honnz[i] = onz; 1386 } else honnz = (HYPRE_Int*)onnz; 1387 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems 1388 they assume the user will input the entire row values, properly sorted 1389 In PETSc, we don't make such an assumption and set this flag to 1, 1390 unless the option MAT_SORTED_FULL is set to true. 1391 Also, to avoid possible memory leaks, we destroy and recreate the translator 1392 This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize 1393 the IJ matrix for us */ 1394 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1395 hypre_AuxParCSRMatrixDestroy(aux_matrix); 1396 hypre_IJMatrixTranslator(hA->ij) = NULL; 1397 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz)); 1398 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1399 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !hA->sorted_full; 1400 } else { 1401 honnz = NULL; 1402 PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz)); 1403 } 1404 1405 /* reset assembled flag and call the initialize method */ 1406 hypre_IJMatrixAssembleFlag(hA->ij) = 0; 1407 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1408 if (!dnnz) { 1409 ierr = PetscFree(hdnnz);CHKERRQ(ierr); 1410 } 1411 if (!onnz && honnz) { 1412 ierr = PetscFree(honnz);CHKERRQ(ierr); 1413 } 1414 1415 /* Match AIJ logic */ 1416 A->preallocated = PETSC_TRUE; 1417 A->assembled = PETSC_FALSE; 1418 PetscFunctionReturn(0); 1419 } 1420 1421 /*@C 1422 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1423 1424 Collective on Mat 1425 1426 Input Parameters: 1427 + A - the matrix 1428 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1429 (same value is used for all local rows) 1430 . dnnz - array containing the number of nonzeros in the various rows of the 1431 DIAGONAL portion of the local submatrix (possibly different for each row) 1432 or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure. 1433 The size of this array is equal to the number of local rows, i.e 'm'. 1434 For matrices that will be factored, you must leave room for (and set) 1435 the diagonal entry even if it is zero. 1436 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1437 submatrix (same value is used for all local rows). 1438 - onnz - array containing the number of nonzeros in the various rows of the 1439 OFF-DIAGONAL portion of the local submatrix (possibly different for 1440 each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero 1441 structure. The size of this array is equal to the number 1442 of local rows, i.e 'm'. 1443 1444 Notes: 1445 If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored. 1446 1447 Level: intermediate 1448 1449 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE 1450 @*/ 1451 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1452 { 1453 PetscErrorCode ierr; 1454 1455 PetscFunctionBegin; 1456 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1457 PetscValidType(A,1); 1458 ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr); 1459 PetscFunctionReturn(0); 1460 } 1461 1462 /* 1463 MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix 1464 1465 Collective 1466 1467 Input Parameters: 1468 + parcsr - the pointer to the hypre_ParCSRMatrix 1469 . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported. 1470 - copymode - PETSc copying options 1471 1472 Output Parameter: 1473 . A - the matrix 1474 1475 Level: intermediate 1476 1477 .seealso: MatHYPRE, PetscCopyMode 1478 */ 1479 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A) 1480 { 1481 Mat T; 1482 Mat_HYPRE *hA; 1483 MPI_Comm comm; 1484 PetscInt rstart,rend,cstart,cend,M,N; 1485 PetscBool isseqaij,isseqaijmkl,ismpiaij,isaij,ishyp,isis; 1486 PetscErrorCode ierr; 1487 1488 PetscFunctionBegin; 1489 comm = hypre_ParCSRMatrixComm(parcsr); 1490 ierr = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1491 ierr = PetscStrcmp(mtype,MATSEQAIJMKL,&isseqaijmkl);CHKERRQ(ierr); 1492 ierr = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 1493 ierr = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr); 1494 ierr = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr); 1495 ierr = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr); 1496 isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij); 1497 if (!isaij && !ishyp && !isis) SETERRQ7(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATSEQAIJMKL,MATMPIAIJ,MATIS,MATHYPRE); 1498 /* access ParCSRMatrix */ 1499 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1500 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1501 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1502 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1503 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1504 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1505 1506 /* fix for empty local rows/columns */ 1507 if (rend < rstart) rend = rstart; 1508 if (cend < cstart) cend = cstart; 1509 1510 /* PETSc convention */ 1511 rend++; 1512 cend++; 1513 rend = PetscMin(rend,M); 1514 cend = PetscMin(cend,N); 1515 1516 /* create PETSc matrix with MatHYPRE */ 1517 ierr = MatCreate(comm,&T);CHKERRQ(ierr); 1518 ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr); 1519 ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr); 1520 hA = (Mat_HYPRE*)(T->data); 1521 1522 /* create HYPRE_IJMatrix */ 1523 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 1524 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1525 1526 /* create new ParCSR object if needed */ 1527 if (ishyp && copymode == PETSC_COPY_VALUES) { 1528 hypre_ParCSRMatrix *new_parcsr; 1529 hypre_CSRMatrix *hdiag,*hoffd,*ndiag,*noffd; 1530 1531 new_parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1532 hdiag = hypre_ParCSRMatrixDiag(parcsr); 1533 hoffd = hypre_ParCSRMatrixOffd(parcsr); 1534 ndiag = hypre_ParCSRMatrixDiag(new_parcsr); 1535 noffd = hypre_ParCSRMatrixOffd(new_parcsr); 1536 ierr = PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));CHKERRQ(ierr); 1537 ierr = PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));CHKERRQ(ierr); 1538 parcsr = new_parcsr; 1539 copymode = PETSC_OWN_POINTER; 1540 } 1541 1542 /* set ParCSR object */ 1543 hypre_IJMatrixObject(hA->ij) = parcsr; 1544 T->preallocated = PETSC_TRUE; 1545 1546 /* set assembled flag */ 1547 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1548 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1549 if (ishyp) { 1550 PetscMPIInt myid = 0; 1551 1552 /* make sure we always have row_starts and col_starts available */ 1553 if (HYPRE_AssumedPartitionCheck()) { 1554 ierr = MPI_Comm_rank(comm,&myid);CHKERRMPI(ierr); 1555 } 1556 #if defined(hypre_ParCSRMatrixOwnsRowStarts) 1557 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1558 PetscLayout map; 1559 1560 ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr); 1561 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1562 hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1563 } 1564 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1565 PetscLayout map; 1566 1567 ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr); 1568 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1569 hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1570 } 1571 #endif 1572 /* prevent from freeing the pointer */ 1573 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1574 *A = T; 1575 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1576 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1577 } else if (isaij) { 1578 if (copymode != PETSC_OWN_POINTER) { 1579 /* prevent from freeing the pointer */ 1580 hA->inner_free = PETSC_FALSE; 1581 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1582 ierr = MatDestroy(&T);CHKERRQ(ierr); 1583 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1584 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr); 1585 *A = T; 1586 } 1587 } else if (isis) { 1588 ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1589 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1590 ierr = MatDestroy(&T);CHKERRQ(ierr); 1591 } 1592 PetscFunctionReturn(0); 1593 } 1594 1595 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1596 { 1597 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1598 HYPRE_Int type; 1599 1600 PetscFunctionBegin; 1601 if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present"); 1602 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 1603 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1604 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr)); 1605 PetscFunctionReturn(0); 1606 } 1607 1608 /* 1609 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1610 1611 Not collective 1612 1613 Input Parameters: 1614 + A - the MATHYPRE object 1615 1616 Output Parameter: 1617 . parcsr - the pointer to the hypre_ParCSRMatrix 1618 1619 Level: intermediate 1620 1621 .seealso: MatHYPRE, PetscCopyMode 1622 */ 1623 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1624 { 1625 PetscErrorCode ierr; 1626 1627 PetscFunctionBegin; 1628 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1629 PetscValidType(A,1); 1630 ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr); 1631 PetscFunctionReturn(0); 1632 } 1633 1634 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd) 1635 { 1636 hypre_ParCSRMatrix *parcsr; 1637 hypre_CSRMatrix *ha; 1638 PetscInt rst; 1639 PetscErrorCode ierr; 1640 1641 PetscFunctionBegin; 1642 if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks"); 1643 ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr); 1644 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1645 if (missing) *missing = PETSC_FALSE; 1646 if (dd) *dd = -1; 1647 ha = hypre_ParCSRMatrixDiag(parcsr); 1648 if (ha) { 1649 PetscInt size,i; 1650 HYPRE_Int *ii,*jj; 1651 1652 size = hypre_CSRMatrixNumRows(ha); 1653 ii = hypre_CSRMatrixI(ha); 1654 jj = hypre_CSRMatrixJ(ha); 1655 for (i = 0; i < size; i++) { 1656 PetscInt j; 1657 PetscBool found = PETSC_FALSE; 1658 1659 for (j = ii[i]; j < ii[i+1] && !found; j++) 1660 found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE; 1661 1662 if (!found) { 1663 PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i); 1664 if (missing) *missing = PETSC_TRUE; 1665 if (dd) *dd = i+rst; 1666 PetscFunctionReturn(0); 1667 } 1668 } 1669 if (!size) { 1670 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1671 if (missing) *missing = PETSC_TRUE; 1672 if (dd) *dd = rst; 1673 } 1674 } else { 1675 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1676 if (missing) *missing = PETSC_TRUE; 1677 if (dd) *dd = rst; 1678 } 1679 PetscFunctionReturn(0); 1680 } 1681 1682 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s) 1683 { 1684 hypre_ParCSRMatrix *parcsr; 1685 hypre_CSRMatrix *ha; 1686 PetscErrorCode ierr; 1687 HYPRE_Complex hs; 1688 1689 PetscFunctionBegin; 1690 ierr = PetscHYPREScalarCast(s,&hs);CHKERRQ(ierr); 1691 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1692 /* diagonal part */ 1693 ha = hypre_ParCSRMatrixDiag(parcsr); 1694 if (ha) { 1695 PetscInt size,i; 1696 HYPRE_Int *ii; 1697 HYPRE_Complex *a; 1698 1699 size = hypre_CSRMatrixNumRows(ha); 1700 a = hypre_CSRMatrixData(ha); 1701 ii = hypre_CSRMatrixI(ha); 1702 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1703 } 1704 /* offdiagonal part */ 1705 ha = hypre_ParCSRMatrixOffd(parcsr); 1706 if (ha) { 1707 PetscInt size,i; 1708 HYPRE_Int *ii; 1709 HYPRE_Complex *a; 1710 1711 size = hypre_CSRMatrixNumRows(ha); 1712 a = hypre_CSRMatrixData(ha); 1713 ii = hypre_CSRMatrixI(ha); 1714 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1715 } 1716 PetscFunctionReturn(0); 1717 } 1718 1719 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 1720 { 1721 hypre_ParCSRMatrix *parcsr; 1722 HYPRE_Int *lrows; 1723 PetscInt rst,ren,i; 1724 PetscErrorCode ierr; 1725 1726 PetscFunctionBegin; 1727 if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented"); 1728 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1729 ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr); 1730 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1731 for (i=0;i<numRows;i++) { 1732 if (rows[i] < rst || rows[i] >= ren) 1733 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported"); 1734 lrows[i] = rows[i] - rst; 1735 } 1736 PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows)); 1737 ierr = PetscFree(lrows);CHKERRQ(ierr); 1738 PetscFunctionReturn(0); 1739 } 1740 1741 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha) 1742 { 1743 PetscErrorCode ierr; 1744 1745 PetscFunctionBegin; 1746 if (ha) { 1747 HYPRE_Int *ii, size; 1748 HYPRE_Complex *a; 1749 1750 size = hypre_CSRMatrixNumRows(ha); 1751 a = hypre_CSRMatrixData(ha); 1752 ii = hypre_CSRMatrixI(ha); 1753 1754 if (a) {ierr = PetscArrayzero(a,ii[size]);CHKERRQ(ierr);} 1755 } 1756 PetscFunctionReturn(0); 1757 } 1758 1759 PetscErrorCode MatZeroEntries_HYPRE(Mat A) 1760 { 1761 hypre_ParCSRMatrix *parcsr; 1762 PetscErrorCode ierr; 1763 1764 PetscFunctionBegin; 1765 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1766 /* diagonal part */ 1767 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr); 1768 /* off-diagonal part */ 1769 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr); 1770 PetscFunctionReturn(0); 1771 } 1772 1773 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag) 1774 { 1775 PetscInt ii; 1776 HYPRE_Int *i, *j; 1777 HYPRE_Complex *a; 1778 1779 PetscFunctionBegin; 1780 if (!hA) PetscFunctionReturn(0); 1781 1782 i = hypre_CSRMatrixI(hA); 1783 j = hypre_CSRMatrixJ(hA); 1784 a = hypre_CSRMatrixData(hA); 1785 1786 for (ii = 0; ii < N; ii++) { 1787 HYPRE_Int jj, ibeg, iend, irow; 1788 1789 irow = rows[ii]; 1790 ibeg = i[irow]; 1791 iend = i[irow+1]; 1792 for (jj = ibeg; jj < iend; jj++) 1793 if (j[jj] == irow) a[jj] = diag; 1794 else a[jj] = 0.0; 1795 } 1796 PetscFunctionReturn(0); 1797 } 1798 1799 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1800 { 1801 hypre_ParCSRMatrix *parcsr; 1802 PetscInt *lrows,len; 1803 HYPRE_Complex hdiag; 1804 PetscErrorCode ierr; 1805 1806 PetscFunctionBegin; 1807 if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size"); 1808 ierr = PetscHYPREScalarCast(diag,&hdiag);CHKERRQ(ierr); 1809 /* retrieve the internal matrix */ 1810 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1811 /* get locally owned rows */ 1812 ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr); 1813 /* zero diagonal part */ 1814 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);CHKERRQ(ierr); 1815 /* zero off-diagonal part */ 1816 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr); 1817 1818 ierr = PetscFree(lrows);CHKERRQ(ierr); 1819 PetscFunctionReturn(0); 1820 } 1821 1822 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode) 1823 { 1824 PetscErrorCode ierr; 1825 1826 PetscFunctionBegin; 1827 if (mat->nooffprocentries) PetscFunctionReturn(0); 1828 1829 ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 1830 PetscFunctionReturn(0); 1831 } 1832 1833 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1834 { 1835 hypre_ParCSRMatrix *parcsr; 1836 HYPRE_Int hnz; 1837 PetscErrorCode ierr; 1838 1839 PetscFunctionBegin; 1840 /* retrieve the internal matrix */ 1841 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1842 /* call HYPRE API */ 1843 PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1844 if (nz) *nz = (PetscInt)hnz; 1845 PetscFunctionReturn(0); 1846 } 1847 1848 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1849 { 1850 hypre_ParCSRMatrix *parcsr; 1851 HYPRE_Int hnz; 1852 PetscErrorCode ierr; 1853 1854 PetscFunctionBegin; 1855 /* retrieve the internal matrix */ 1856 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1857 /* call HYPRE API */ 1858 hnz = nz ? (HYPRE_Int)(*nz) : 0; 1859 PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1860 PetscFunctionReturn(0); 1861 } 1862 1863 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 1864 { 1865 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1866 PetscInt i; 1867 1868 PetscFunctionBegin; 1869 if (!m || !n) PetscFunctionReturn(0); 1870 /* Ignore negative row indices 1871 * And negative column indices should be automatically ignored in hypre 1872 * */ 1873 for (i=0; i<m; i++) { 1874 if (idxm[i] >= 0) { 1875 HYPRE_Int hn = (HYPRE_Int)n; 1876 PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n))); 1877 } 1878 } 1879 PetscFunctionReturn(0); 1880 } 1881 1882 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg) 1883 { 1884 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1885 1886 PetscFunctionBegin; 1887 switch (op) { 1888 case MAT_NO_OFF_PROC_ENTRIES: 1889 if (flg) { 1890 PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0)); 1891 } 1892 break; 1893 case MAT_SORTED_FULL: 1894 hA->sorted_full = flg; 1895 break; 1896 default: 1897 break; 1898 } 1899 PetscFunctionReturn(0); 1900 } 1901 1902 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view) 1903 { 1904 hypre_ParCSRMatrix *parcsr; 1905 PetscErrorCode ierr; 1906 Mat B; 1907 PetscViewerFormat format; 1908 PetscErrorCode (*mview)(Mat,PetscViewer) = NULL; 1909 1910 PetscFunctionBegin; 1911 ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr); 1912 if (format != PETSC_VIEWER_NATIVE) { 1913 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1914 ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr); 1915 ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr); 1916 if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation"); 1917 ierr = (*mview)(B,view);CHKERRQ(ierr); 1918 ierr = MatDestroy(&B);CHKERRQ(ierr); 1919 } else { 1920 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1921 PetscMPIInt size; 1922 PetscBool isascii; 1923 const char *filename; 1924 1925 /* HYPRE uses only text files */ 1926 ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 1927 if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name); 1928 ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr); 1929 PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename)); 1930 ierr = MPI_Comm_size(hA->comm,&size);CHKERRMPI(ierr); 1931 if (size > 1) { 1932 ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr); 1933 } else { 1934 ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr); 1935 } 1936 } 1937 PetscFunctionReturn(0); 1938 } 1939 1940 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B) 1941 { 1942 hypre_ParCSRMatrix *parcsr; 1943 PetscErrorCode ierr; 1944 PetscCopyMode cpmode; 1945 1946 PetscFunctionBegin; 1947 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1948 if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) { 1949 parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1950 cpmode = PETSC_OWN_POINTER; 1951 } else { 1952 cpmode = PETSC_COPY_VALUES; 1953 } 1954 ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr); 1955 PetscFunctionReturn(0); 1956 } 1957 1958 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str) 1959 { 1960 hypre_ParCSRMatrix *acsr,*bcsr; 1961 PetscErrorCode ierr; 1962 1963 PetscFunctionBegin; 1964 if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) { 1965 ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr); 1966 ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr); 1967 PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1)); 1968 ierr = MatSetOption(B,MAT_SORTED_FULL,PETSC_TRUE);CHKERRQ(ierr); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */ 1969 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1970 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1971 } else { 1972 ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1973 } 1974 PetscFunctionReturn(0); 1975 } 1976 1977 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d) 1978 { 1979 hypre_ParCSRMatrix *parcsr; 1980 hypre_CSRMatrix *dmat; 1981 HYPRE_Complex *a; 1982 HYPRE_Complex *data = NULL; 1983 HYPRE_Int *diag = NULL; 1984 PetscInt i; 1985 PetscBool cong; 1986 PetscErrorCode ierr; 1987 1988 PetscFunctionBegin; 1989 ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr); 1990 if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns"); 1991 if (PetscDefined(USE_DEBUG)) { 1992 PetscBool miss; 1993 ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr); 1994 if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing"); 1995 } 1996 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1997 dmat = hypre_ParCSRMatrixDiag(parcsr); 1998 if (dmat) { 1999 /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 2000 ierr = VecGetArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 2001 diag = hypre_CSRMatrixI(dmat); 2002 data = hypre_CSRMatrixData(dmat); 2003 for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]]; 2004 ierr = VecRestoreArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 2005 } 2006 PetscFunctionReturn(0); 2007 } 2008 2009 #include <petscblaslapack.h> 2010 2011 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str) 2012 { 2013 PetscErrorCode ierr; 2014 2015 PetscFunctionBegin; 2016 if (str == SAME_NONZERO_PATTERN) { 2017 hypre_ParCSRMatrix *x,*y; 2018 hypre_CSRMatrix *xloc,*yloc; 2019 PetscInt xnnz,ynnz; 2020 HYPRE_Complex *xarr,*yarr; 2021 PetscBLASInt one=1,bnz; 2022 2023 ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr); 2024 ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr); 2025 2026 /* diagonal block */ 2027 xloc = hypre_ParCSRMatrixDiag(x); 2028 yloc = hypre_ParCSRMatrixDiag(y); 2029 xnnz = 0; 2030 ynnz = 0; 2031 xarr = NULL; 2032 yarr = NULL; 2033 if (xloc) { 2034 xarr = hypre_CSRMatrixData(xloc); 2035 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 2036 } 2037 if (yloc) { 2038 yarr = hypre_CSRMatrixData(yloc); 2039 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2040 } 2041 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz); 2042 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 2043 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 2044 2045 /* off-diagonal block */ 2046 xloc = hypre_ParCSRMatrixOffd(x); 2047 yloc = hypre_ParCSRMatrixOffd(y); 2048 xnnz = 0; 2049 ynnz = 0; 2050 xarr = NULL; 2051 yarr = NULL; 2052 if (xloc) { 2053 xarr = hypre_CSRMatrixData(xloc); 2054 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 2055 } 2056 if (yloc) { 2057 yarr = hypre_CSRMatrixData(yloc); 2058 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2059 } 2060 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz); 2061 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 2062 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 2063 } else if (str == SUBSET_NONZERO_PATTERN) { 2064 ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr); 2065 } else { 2066 Mat B; 2067 2068 ierr = MatAXPY_Basic_Preallocate(Y,X,&B);CHKERRQ(ierr); 2069 ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr); 2070 ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr); 2071 } 2072 PetscFunctionReturn(0); 2073 } 2074 2075 /*MC 2076 MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices 2077 based on the hypre IJ interface. 2078 2079 Level: intermediate 2080 2081 .seealso: MatCreate() 2082 M*/ 2083 2084 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 2085 { 2086 Mat_HYPRE *hB; 2087 PetscErrorCode ierr; 2088 2089 PetscFunctionBegin; 2090 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 2091 hB->inner_free = PETSC_TRUE; 2092 hB->available = PETSC_TRUE; 2093 hB->sorted_full= PETSC_FALSE; /* no assumption whether column indices are sorted or not */ 2094 hB->size = 0; 2095 hB->array = NULL; 2096 2097 B->data = (void*)hB; 2098 B->rmap->bs = 1; 2099 B->assembled = PETSC_FALSE; 2100 2101 ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2102 B->ops->mult = MatMult_HYPRE; 2103 B->ops->multtranspose = MatMultTranspose_HYPRE; 2104 B->ops->multadd = MatMultAdd_HYPRE; 2105 B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE; 2106 B->ops->setup = MatSetUp_HYPRE; 2107 B->ops->destroy = MatDestroy_HYPRE; 2108 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 2109 B->ops->assemblybegin = MatAssemblyBegin_HYPRE; 2110 B->ops->setvalues = MatSetValues_HYPRE; 2111 B->ops->missingdiagonal = MatMissingDiagonal_HYPRE; 2112 B->ops->scale = MatScale_HYPRE; 2113 B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE; 2114 B->ops->zeroentries = MatZeroEntries_HYPRE; 2115 B->ops->zerorows = MatZeroRows_HYPRE; 2116 B->ops->getrow = MatGetRow_HYPRE; 2117 B->ops->restorerow = MatRestoreRow_HYPRE; 2118 B->ops->getvalues = MatGetValues_HYPRE; 2119 B->ops->setoption = MatSetOption_HYPRE; 2120 B->ops->duplicate = MatDuplicate_HYPRE; 2121 B->ops->copy = MatCopy_HYPRE; 2122 B->ops->view = MatView_HYPRE; 2123 B->ops->getdiagonal = MatGetDiagonal_HYPRE; 2124 B->ops->axpy = MatAXPY_HYPRE; 2125 B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE; 2126 2127 /* build cache for off array entries formed */ 2128 ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr); 2129 2130 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRMPI(ierr); 2131 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 2132 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 2133 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 2134 ierr = PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_seqaij_hypre_C",MatProductSetFromOptions_HYPRE);CHKERRQ(ierr); 2135 ierr = PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_mpiaij_hypre_C",MatProductSetFromOptions_HYPRE);CHKERRQ(ierr); 2136 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr); 2137 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr); 2138 PetscFunctionReturn(0); 2139 } 2140 2141 static PetscErrorCode hypre_array_destroy(void *ptr) 2142 { 2143 PetscFunctionBegin; 2144 hypre_TFree(ptr,HYPRE_MEMORY_HOST); 2145 PetscFunctionReturn(0); 2146 } 2147