1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 #include <petscpkg_version.h> 7 #include <petsc/private/petschypre.h> 8 #include <petscmathypre.h> 9 #include <petsc/private/matimpl.h> 10 #include <../src/mat/impls/hypre/mhypre.h> 11 #include <../src/mat/impls/aij/mpi/mpiaij.h> 12 #include <../src/vec/vec/impls/hypre/vhyp.h> 13 #include <HYPRE.h> 14 #include <HYPRE_utilities.h> 15 #include <_hypre_parcsr_ls.h> 16 #include <_hypre_sstruct_ls.h> 17 18 #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0) 19 #define hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A) 20 #endif 21 22 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 23 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 24 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 25 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 26 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool); 27 static PetscErrorCode hypre_array_destroy(void*); 28 PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins); 29 30 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 31 { 32 PetscErrorCode ierr; 33 PetscInt i,n_d,n_o; 34 const PetscInt *ia_d,*ia_o; 35 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 36 HYPRE_Int *nnz_d=NULL,*nnz_o=NULL; 37 38 PetscFunctionBegin; 39 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 40 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 41 if (done_d) { 42 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 43 for (i=0; i<n_d; i++) { 44 nnz_d[i] = ia_d[i+1] - ia_d[i]; 45 } 46 } 47 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 48 } 49 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 50 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 51 if (done_o) { 52 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 53 for (i=0; i<n_o; i++) { 54 nnz_o[i] = ia_o[i+1] - ia_o[i]; 55 } 56 } 57 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 58 } 59 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 60 if (!done_o) { /* only diagonal part */ 61 ierr = PetscCalloc1(n_d,&nnz_o);CHKERRQ(ierr); 62 } 63 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 64 { /* If we don't do this, the columns of the matrix will be all zeros! */ 65 hypre_AuxParCSRMatrix *aux_matrix; 66 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 67 hypre_AuxParCSRMatrixDestroy(aux_matrix); 68 hypre_IJMatrixTranslator(ij) = NULL; 69 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 70 /* it seems they partially fixed it in 2.19.0 */ 71 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0) 72 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 73 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 74 #endif 75 } 76 #else 77 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o)); 78 #endif 79 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 80 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 81 } 82 PetscFunctionReturn(0); 83 } 84 85 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 86 { 87 PetscErrorCode ierr; 88 PetscInt rstart,rend,cstart,cend; 89 90 PetscFunctionBegin; 91 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 92 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 93 rstart = A->rmap->rstart; 94 rend = A->rmap->rend; 95 cstart = A->cmap->rstart; 96 cend = A->cmap->rend; 97 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 98 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 99 { 100 PetscBool same; 101 Mat A_d,A_o; 102 const PetscInt *colmap; 103 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 104 if (same) { 105 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 106 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 107 PetscFunctionReturn(0); 108 } 109 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 110 if (same) { 111 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 112 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 113 PetscFunctionReturn(0); 114 } 115 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 116 if (same) { 117 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 118 PetscFunctionReturn(0); 119 } 120 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 121 if (same) { 122 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 } 126 PetscFunctionReturn(0); 127 } 128 129 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 130 { 131 PetscErrorCode ierr; 132 PetscInt i,rstart,rend,ncols,nr,nc; 133 const PetscScalar *values; 134 const PetscInt *cols; 135 PetscBool flg; 136 137 PetscFunctionBegin; 138 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 139 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 140 if (flg && nr == nc) { 141 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 142 PetscFunctionReturn(0); 143 } 144 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 145 if (flg) { 146 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 147 PetscFunctionReturn(0); 148 } 149 150 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 151 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 152 for (i=rstart; i<rend; i++) { 153 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 154 if (ncols) { 155 HYPRE_Int nc = (HYPRE_Int)ncols; 156 157 if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i); 158 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values)); 159 } 160 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 161 } 162 PetscFunctionReturn(0); 163 } 164 165 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 166 { 167 PetscErrorCode ierr; 168 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 169 HYPRE_Int type; 170 hypre_ParCSRMatrix *par_matrix; 171 hypre_AuxParCSRMatrix *aux_matrix; 172 hypre_CSRMatrix *hdiag; 173 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 174 175 PetscFunctionBegin; 176 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 177 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 178 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 179 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 180 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 181 /* 182 this is the Hack part where we monkey directly with the hypre datastructures 183 */ 184 if (sameint) { 185 ierr = PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);CHKERRQ(ierr); 186 ierr = PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);CHKERRQ(ierr); 187 } else { 188 PetscInt i; 189 190 for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i]; 191 for (i=0;i<pdiag->nz;i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i]; 192 } 193 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 194 195 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 196 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 197 PetscFunctionReturn(0); 198 } 199 200 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 201 { 202 PetscErrorCode ierr; 203 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 204 Mat_SeqAIJ *pdiag,*poffd; 205 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 206 HYPRE_Int *hjj,type; 207 hypre_ParCSRMatrix *par_matrix; 208 hypre_AuxParCSRMatrix *aux_matrix; 209 hypre_CSRMatrix *hdiag,*hoffd; 210 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 211 212 PetscFunctionBegin; 213 pdiag = (Mat_SeqAIJ*) pA->A->data; 214 poffd = (Mat_SeqAIJ*) pA->B->data; 215 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 216 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 217 218 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 219 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 220 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 221 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 222 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 223 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 224 225 /* 226 this is the Hack part where we monkey directly with the hypre datastructures 227 */ 228 if (sameint) { 229 ierr = PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 230 } else { 231 for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]); 232 } 233 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 234 hjj = hdiag->j; 235 pjj = pdiag->j; 236 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 237 for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i]; 238 #else 239 for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i]; 240 #endif 241 ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr); 242 if (sameint) { 243 ierr = PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);CHKERRQ(ierr); 244 } else { 245 for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]); 246 } 247 248 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 249 If we hacked a hypre a bit more we might be able to avoid this step */ 250 #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0) 251 PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd)); 252 jj = (PetscInt*) hoffd->big_j; 253 #else 254 jj = (PetscInt*) hoffd->j; 255 #endif 256 pjj = poffd->j; 257 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 258 259 ierr = PetscArraycpy(hoffd->data,poffd->a,poffd->nz);CHKERRQ(ierr); 260 261 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 262 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 263 PetscFunctionReturn(0); 264 } 265 266 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 267 { 268 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 269 Mat lA; 270 ISLocalToGlobalMapping rl2g,cl2g; 271 IS is; 272 hypre_ParCSRMatrix *hA; 273 hypre_CSRMatrix *hdiag,*hoffd; 274 MPI_Comm comm; 275 HYPRE_Complex *hdd,*hod,*aa; 276 PetscScalar *data; 277 HYPRE_BigInt *col_map_offd; 278 HYPRE_Int *hdi,*hdj,*hoi,*hoj; 279 PetscInt *ii,*jj,*iptr,*jptr; 280 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 281 HYPRE_Int type; 282 PetscErrorCode ierr; 283 284 PetscFunctionBegin; 285 comm = PetscObjectComm((PetscObject)A); 286 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 287 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 288 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 289 M = hypre_ParCSRMatrixGlobalNumRows(hA); 290 N = hypre_ParCSRMatrixGlobalNumCols(hA); 291 str = hypre_ParCSRMatrixFirstRowIndex(hA); 292 stc = hypre_ParCSRMatrixFirstColDiag(hA); 293 hdiag = hypre_ParCSRMatrixDiag(hA); 294 hoffd = hypre_ParCSRMatrixOffd(hA); 295 dr = hypre_CSRMatrixNumRows(hdiag); 296 dc = hypre_CSRMatrixNumCols(hdiag); 297 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 298 hdi = hypre_CSRMatrixI(hdiag); 299 hdj = hypre_CSRMatrixJ(hdiag); 300 hdd = hypre_CSRMatrixData(hdiag); 301 oc = hypre_CSRMatrixNumCols(hoffd); 302 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 303 hoi = hypre_CSRMatrixI(hoffd); 304 hoj = hypre_CSRMatrixJ(hoffd); 305 hod = hypre_CSRMatrixData(hoffd); 306 if (reuse != MAT_REUSE_MATRIX) { 307 PetscInt *aux; 308 309 /* generate l2g maps for rows and cols */ 310 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 311 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 312 ierr = ISDestroy(&is);CHKERRQ(ierr); 313 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 314 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 315 for (i=0; i<dc; i++) aux[i] = i+stc; 316 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 317 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 318 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 319 ierr = ISDestroy(&is);CHKERRQ(ierr); 320 /* create MATIS object */ 321 ierr = MatCreate(comm,B);CHKERRQ(ierr); 322 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 323 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 324 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 325 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 326 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 327 328 /* allocate CSR for local matrix */ 329 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 330 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 331 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 332 } else { 333 PetscInt nr; 334 PetscBool done; 335 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 336 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 337 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 338 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 339 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 340 } 341 /* merge local matrices */ 342 ii = iptr; 343 jj = jptr; 344 aa = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 345 *ii = *(hdi++) + *(hoi++); 346 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 347 PetscScalar *aold = (PetscScalar*)aa; 348 PetscInt *jold = jj,nc = jd+jo; 349 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 350 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 351 *(++ii) = *(hdi++) + *(hoi++); 352 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 353 } 354 for (; cum<dr; cum++) *(++ii) = nnz; 355 if (reuse != MAT_REUSE_MATRIX) { 356 Mat_SeqAIJ* a; 357 358 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 359 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 360 /* hack SeqAIJ */ 361 a = (Mat_SeqAIJ*)(lA->data); 362 a->free_a = PETSC_TRUE; 363 a->free_ij = PETSC_TRUE; 364 ierr = MatDestroy(&lA);CHKERRQ(ierr); 365 } 366 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 367 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 368 if (reuse == MAT_INPLACE_MATRIX) { 369 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 370 } 371 PetscFunctionReturn(0); 372 } 373 374 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 375 { 376 Mat M = NULL; 377 Mat_HYPRE *hB; 378 MPI_Comm comm = PetscObjectComm((PetscObject)A); 379 PetscErrorCode ierr; 380 381 PetscFunctionBegin; 382 if (reuse == MAT_REUSE_MATRIX) { 383 /* always destroy the old matrix and create a new memory; 384 hope this does not churn the memory too much. The problem 385 is I do not know if it is possible to put the matrix back to 386 its initial state so that we can directly copy the values 387 the second time through. */ 388 hB = (Mat_HYPRE*)((*B)->data); 389 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 390 } else { 391 ierr = MatCreate(comm,&M);CHKERRQ(ierr); 392 ierr = MatSetType(M,MATHYPRE);CHKERRQ(ierr); 393 ierr = MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 394 hB = (Mat_HYPRE*)(M->data); 395 if (reuse == MAT_INITIAL_MATRIX) *B = M; 396 } 397 ierr = MatSetOption(*B,MAT_SORTED_FULL,PETSC_TRUE);CHKERRQ(ierr); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */ 398 ierr = MatSetOption(*B,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE);CHKERRQ(ierr); 399 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 400 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 401 if (reuse == MAT_INPLACE_MATRIX) { 402 ierr = MatHeaderReplace(A,&M);CHKERRQ(ierr); 403 } 404 (*B)->preallocated = PETSC_TRUE; 405 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 406 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 407 PetscFunctionReturn(0); 408 } 409 410 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 411 { 412 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 413 hypre_ParCSRMatrix *parcsr; 414 hypre_CSRMatrix *hdiag,*hoffd; 415 MPI_Comm comm; 416 PetscScalar *da,*oa,*aptr; 417 PetscInt *dii,*djj,*oii,*ojj,*iptr; 418 PetscInt i,dnnz,onnz,m,n; 419 HYPRE_Int type; 420 PetscMPIInt size; 421 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 422 PetscErrorCode ierr; 423 424 PetscFunctionBegin; 425 comm = PetscObjectComm((PetscObject)A); 426 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 427 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 428 if (reuse == MAT_REUSE_MATRIX) { 429 PetscBool ismpiaij,isseqaij; 430 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 431 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 432 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 433 } 434 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 435 436 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 437 hdiag = hypre_ParCSRMatrixDiag(parcsr); 438 hoffd = hypre_ParCSRMatrixOffd(parcsr); 439 m = hypre_CSRMatrixNumRows(hdiag); 440 n = hypre_CSRMatrixNumCols(hdiag); 441 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 442 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 443 if (reuse == MAT_INITIAL_MATRIX) { 444 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 445 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 446 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 447 } else if (reuse == MAT_REUSE_MATRIX) { 448 PetscInt nr; 449 PetscBool done; 450 if (size > 1) { 451 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 452 453 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 454 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 455 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 456 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 457 } else { 458 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 459 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 460 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 461 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 462 } 463 } else { /* MAT_INPLACE_MATRIX */ 464 if (!sameint) { 465 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 466 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 467 } else { 468 dii = (PetscInt*)hypre_CSRMatrixI(hdiag); 469 djj = (PetscInt*)hypre_CSRMatrixJ(hdiag); 470 } 471 da = (PetscScalar*)hypre_CSRMatrixData(hdiag); 472 } 473 474 if (!sameint) { 475 for (i=0;i<m+1;i++) dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]); 476 for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]); 477 } else { 478 ierr = PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1);CHKERRQ(ierr); 479 ierr = PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);CHKERRQ(ierr); 480 } 481 ierr = PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);CHKERRQ(ierr); 482 iptr = djj; 483 aptr = da; 484 for (i=0; i<m; i++) { 485 PetscInt nc = dii[i+1]-dii[i]; 486 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 487 iptr += nc; 488 aptr += nc; 489 } 490 if (size > 1) { 491 HYPRE_BigInt *coffd; 492 HYPRE_Int *offdj; 493 494 if (reuse == MAT_INITIAL_MATRIX) { 495 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 496 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 497 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 498 } else if (reuse == MAT_REUSE_MATRIX) { 499 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 500 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 501 PetscBool done; 502 503 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 504 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 505 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 506 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 507 } else { /* MAT_INPLACE_MATRIX */ 508 if (!sameint) { 509 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 510 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 511 } else { 512 oii = (PetscInt*)hypre_CSRMatrixI(hoffd); 513 ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd); 514 } 515 oa = (PetscScalar*)hypre_CSRMatrixData(hoffd); 516 } 517 if (!sameint) { 518 for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]); 519 } else { 520 ierr = PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);CHKERRQ(ierr); 521 } 522 offdj = hypre_CSRMatrixJ(hoffd); 523 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 524 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 525 ierr = PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);CHKERRQ(ierr); 526 iptr = ojj; 527 aptr = oa; 528 for (i=0; i<m; i++) { 529 PetscInt nc = oii[i+1]-oii[i]; 530 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 531 iptr += nc; 532 aptr += nc; 533 } 534 if (reuse == MAT_INITIAL_MATRIX) { 535 Mat_MPIAIJ *b; 536 Mat_SeqAIJ *d,*o; 537 538 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 539 /* hack MPIAIJ */ 540 b = (Mat_MPIAIJ*)((*B)->data); 541 d = (Mat_SeqAIJ*)b->A->data; 542 o = (Mat_SeqAIJ*)b->B->data; 543 d->free_a = PETSC_TRUE; 544 d->free_ij = PETSC_TRUE; 545 o->free_a = PETSC_TRUE; 546 o->free_ij = PETSC_TRUE; 547 } else if (reuse == MAT_INPLACE_MATRIX) { 548 Mat T; 549 550 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr); 551 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 552 hypre_CSRMatrixI(hdiag) = NULL; 553 hypre_CSRMatrixJ(hdiag) = NULL; 554 hypre_CSRMatrixI(hoffd) = NULL; 555 hypre_CSRMatrixJ(hoffd) = NULL; 556 } else { /* Hack MPIAIJ -> free ij but not a */ 557 Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data); 558 Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data); 559 Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data); 560 561 d->free_ij = PETSC_TRUE; 562 o->free_ij = PETSC_TRUE; 563 } 564 hypre_CSRMatrixData(hdiag) = NULL; 565 hypre_CSRMatrixData(hoffd) = NULL; 566 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 567 } 568 } else { 569 oii = NULL; 570 ojj = NULL; 571 oa = NULL; 572 if (reuse == MAT_INITIAL_MATRIX) { 573 Mat_SeqAIJ* b; 574 575 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 576 /* hack SeqAIJ */ 577 b = (Mat_SeqAIJ*)((*B)->data); 578 b->free_a = PETSC_TRUE; 579 b->free_ij = PETSC_TRUE; 580 } else if (reuse == MAT_INPLACE_MATRIX) { 581 Mat T; 582 583 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr); 584 if (sameint) { /* ownership of CSR pointers is transferred to PETSc */ 585 hypre_CSRMatrixI(hdiag) = NULL; 586 hypre_CSRMatrixJ(hdiag) = NULL; 587 } else { /* free ij but not a */ 588 Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data); 589 590 b->free_ij = PETSC_TRUE; 591 } 592 hypre_CSRMatrixData(hdiag) = NULL; 593 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 594 } 595 } 596 597 /* we have to use hypre_Tfree to free the HYPRE arrays 598 that PETSc now onws */ 599 if (reuse == MAT_INPLACE_MATRIX) { 600 PetscInt nh; 601 void *ptrs[6] = {da,oa,dii,djj,oii,ojj}; 602 const char *names[6] = {"_hypre_csr_da", 603 "_hypre_csr_oa", 604 "_hypre_csr_dii", 605 "_hypre_csr_djj", 606 "_hypre_csr_oii", 607 "_hypre_csr_ojj"}; 608 nh = sameint ? 6 : 2; 609 for (i=0; i<nh; i++) { 610 PetscContainer c; 611 612 ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr); 613 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 614 ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr); 615 ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr); 616 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 617 } 618 } 619 PetscFunctionReturn(0); 620 } 621 622 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 623 { 624 hypre_ParCSRMatrix *tA; 625 hypre_CSRMatrix *hdiag,*hoffd; 626 Mat_SeqAIJ *diag,*offd; 627 PetscInt *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts; 628 MPI_Comm comm = PetscObjectComm((PetscObject)A); 629 PetscBool ismpiaij,isseqaij; 630 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 631 PetscErrorCode ierr; 632 633 PetscFunctionBegin; 634 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 635 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 636 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name); 637 if (ismpiaij) { 638 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 639 640 diag = (Mat_SeqAIJ*)a->A->data; 641 offd = (Mat_SeqAIJ*)a->B->data; 642 garray = a->garray; 643 noffd = a->B->cmap->N; 644 dnnz = diag->nz; 645 onnz = offd->nz; 646 } else { 647 diag = (Mat_SeqAIJ*)A->data; 648 offd = NULL; 649 garray = NULL; 650 noffd = 0; 651 dnnz = diag->nz; 652 onnz = 0; 653 } 654 655 /* create a temporary ParCSR */ 656 if (HYPRE_AssumedPartitionCheck()) { 657 PetscMPIInt myid; 658 659 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 660 row_starts = A->rmap->range + myid; 661 col_starts = A->cmap->range + myid; 662 } else { 663 row_starts = A->rmap->range; 664 col_starts = A->cmap->range; 665 } 666 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz); 667 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 668 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 669 670 /* set diagonal part */ 671 hdiag = hypre_ParCSRMatrixDiag(tA); 672 if (sameint) { /* reuse CSR pointers */ 673 hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i; 674 hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j; 675 } else { /* malloc CSR pointers */ 676 HYPRE_Int *hi,*hj; 677 678 ierr = PetscMalloc2(A->rmap->n+1,&hi,dnnz,&hj);CHKERRQ(ierr); 679 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(diag->i[i]); 680 for (i = 0; i < dnnz; i++) hj[i] = (HYPRE_Int)(diag->j[i]); 681 hypre_CSRMatrixI(hdiag) = hi; 682 hypre_CSRMatrixJ(hdiag) = hj; 683 } 684 hypre_CSRMatrixData(hdiag) = (HYPRE_Complex*)diag->a; 685 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 686 hypre_CSRMatrixSetRownnz(hdiag); 687 hypre_CSRMatrixSetDataOwner(hdiag,0); 688 689 /* set offdiagonal part */ 690 hoffd = hypre_ParCSRMatrixOffd(tA); 691 if (offd) { 692 if (sameint) { /* reuse CSR pointers */ 693 hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i; 694 hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j; 695 } else { /* malloc CSR pointers */ 696 HYPRE_Int *hi,*hj; 697 698 ierr = PetscMalloc2(A->rmap->n+1,&hi,onnz,&hj);CHKERRQ(ierr); 699 for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(offd->i[i]); 700 for (i = 0; i < onnz; i++) hj[i] = (HYPRE_Int)(offd->j[i]); 701 hypre_CSRMatrixI(hoffd) = hi; 702 hypre_CSRMatrixJ(hoffd) = hj; 703 } 704 hypre_CSRMatrixData(hoffd) = (HYPRE_Complex*)offd->a; 705 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 706 hypre_CSRMatrixSetRownnz(hoffd); 707 hypre_CSRMatrixSetDataOwner(hoffd,0); 708 hypre_ParCSRMatrixSetNumNonzeros(tA); 709 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray; 710 } 711 *hA = tA; 712 PetscFunctionReturn(0); 713 } 714 715 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 716 { 717 hypre_CSRMatrix *hdiag,*hoffd; 718 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 719 PetscErrorCode ierr; 720 721 PetscFunctionBegin; 722 hdiag = hypre_ParCSRMatrixDiag(*hA); 723 hoffd = hypre_ParCSRMatrixOffd(*hA); 724 /* free temporary memory allocated by PETSc */ 725 if (!sameint) { 726 HYPRE_Int *hi,*hj; 727 728 hi = hypre_CSRMatrixI(hdiag); 729 hj = hypre_CSRMatrixJ(hdiag); 730 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 731 if (hoffd) { 732 hi = hypre_CSRMatrixI(hoffd); 733 hj = hypre_CSRMatrixJ(hoffd); 734 ierr = PetscFree2(hi,hj);CHKERRQ(ierr); 735 } 736 } 737 /* set pointers to NULL before destroying tA */ 738 hypre_CSRMatrixI(hdiag) = NULL; 739 hypre_CSRMatrixJ(hdiag) = NULL; 740 hypre_CSRMatrixData(hdiag) = NULL; 741 hypre_CSRMatrixI(hoffd) = NULL; 742 hypre_CSRMatrixJ(hoffd) = NULL; 743 hypre_CSRMatrixData(hoffd) = NULL; 744 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 745 hypre_ParCSRMatrixDestroy(*hA); 746 *hA = NULL; 747 PetscFunctionReturn(0); 748 } 749 750 /* calls RAP from BoomerAMG: 751 the resulting ParCSR will not own the column and row starts 752 It looks like we don't need to have the diagonal entries 753 ordered first in the rows of the diagonal part 754 for boomerAMGBuildCoarseOperator to work */ 755 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 756 { 757 HYPRE_Int P_owns_col_starts,R_owns_row_starts; 758 759 PetscFunctionBegin; 760 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 761 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 762 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP)); 763 PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP)); 764 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 765 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0); 766 hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0); 767 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1); 768 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1); 769 PetscFunctionReturn(0); 770 } 771 772 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C) 773 { 774 Mat B; 775 hypre_ParCSRMatrix *hA,*hP,*hPtAP; 776 PetscErrorCode ierr; 777 Mat_Product *product=C->product; 778 779 PetscFunctionBegin; 780 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 781 ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr); 782 ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr); 783 ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 784 785 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 786 C->product = product; 787 788 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 789 ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr); 790 PetscFunctionReturn(0); 791 } 792 793 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat C) 794 { 795 PetscErrorCode ierr; 796 797 PetscFunctionBegin; 798 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 799 C->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 800 C->ops->productnumeric = MatProductNumeric_PtAP; 801 PetscFunctionReturn(0); 802 } 803 804 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C) 805 { 806 Mat B; 807 Mat_HYPRE *hP; 808 hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr; 809 HYPRE_Int type; 810 MPI_Comm comm = PetscObjectComm((PetscObject)A); 811 PetscBool ishypre; 812 PetscErrorCode ierr; 813 814 PetscFunctionBegin; 815 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 816 if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 817 hP = (Mat_HYPRE*)P->data; 818 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 819 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 820 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 821 822 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 823 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 824 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 825 826 /* create temporary matrix and merge to C */ 827 ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 828 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 829 PetscFunctionReturn(0); 830 } 831 832 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C) 833 { 834 Mat B; 835 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 836 Mat_HYPRE *hA,*hP; 837 PetscBool ishypre; 838 HYPRE_Int type; 839 PetscErrorCode ierr; 840 841 PetscFunctionBegin; 842 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 843 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 844 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 845 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 846 hA = (Mat_HYPRE*)A->data; 847 hP = (Mat_HYPRE*)P->data; 848 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 849 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 850 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 851 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 852 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 853 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 854 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 855 ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 856 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 857 PetscFunctionReturn(0); 858 } 859 860 /* calls hypre_ParMatmul 861 hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA 862 hypre_ParMatrixCreate does not duplicate the communicator 863 It looks like we don't need to have the diagonal entries 864 ordered first in the rows of the diagonal part 865 for boomerAMGBuildCoarseOperator to work */ 866 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB) 867 { 868 PetscFunctionBegin; 869 PetscStackPush("hypre_ParMatmul"); 870 *hAB = hypre_ParMatmul(hA,hB); 871 PetscStackPop; 872 PetscFunctionReturn(0); 873 } 874 875 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C) 876 { 877 Mat D; 878 hypre_ParCSRMatrix *hA,*hB,*hAB = NULL; 879 PetscErrorCode ierr; 880 Mat_Product *product=C->product; 881 882 PetscFunctionBegin; 883 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 884 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 885 ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr); 886 ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 887 888 ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr); 889 C->product = product; 890 891 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 892 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 893 PetscFunctionReturn(0); 894 } 895 896 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat C) 897 { 898 PetscErrorCode ierr; 899 900 PetscFunctionBegin; 901 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 902 C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 903 C->ops->productnumeric = MatProductNumeric_AB; 904 PetscFunctionReturn(0); 905 } 906 907 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C) 908 { 909 Mat D; 910 hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL; 911 Mat_HYPRE *hA,*hB; 912 PetscBool ishypre; 913 HYPRE_Int type; 914 PetscErrorCode ierr; 915 Mat_Product *product; 916 917 PetscFunctionBegin; 918 ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr); 919 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE); 920 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 921 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 922 hA = (Mat_HYPRE*)A->data; 923 hB = (Mat_HYPRE*)B->data; 924 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 925 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 926 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type)); 927 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 928 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 929 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr)); 930 ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr); 931 ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 932 933 /* need to use HeaderReplace because HeaderMerge messes up with the communicator */ 934 product = C->product; /* save it from MatHeaderReplace() */ 935 C->product = NULL; 936 ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr); 937 C->product = product; 938 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 939 C->ops->productnumeric = MatProductNumeric_AB; 940 PetscFunctionReturn(0); 941 } 942 943 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D) 944 { 945 Mat E; 946 hypre_ParCSRMatrix *hA,*hB,*hC,*hABC; 947 PetscErrorCode ierr; 948 949 PetscFunctionBegin; 950 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 951 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 952 ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr); 953 ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr); 954 ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr); 955 ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr); 956 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 957 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 958 ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr); 959 PetscFunctionReturn(0); 960 } 961 962 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat D) 963 { 964 PetscErrorCode ierr; 965 966 PetscFunctionBegin; 967 ierr = MatSetType(D,MATAIJ);CHKERRQ(ierr); 968 PetscFunctionReturn(0); 969 } 970 971 /* ---------------------------------------------------- */ 972 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C) 973 { 974 PetscFunctionBegin; 975 C->ops->productnumeric = MatProductNumeric_AB; 976 PetscFunctionReturn(0); 977 } 978 979 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C) 980 { 981 PetscErrorCode ierr; 982 Mat_Product *product = C->product; 983 PetscBool Ahypre; 984 985 PetscFunctionBegin; 986 ierr = PetscObjectTypeCompare((PetscObject)product->A,MATHYPRE,&Ahypre);CHKERRQ(ierr); 987 if (Ahypre) { /* A is a Hypre matrix */ 988 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 989 C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE; 990 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 991 PetscFunctionReturn(0); 992 } 993 PetscFunctionReturn(0); 994 } 995 996 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C) 997 { 998 PetscFunctionBegin; 999 C->ops->productnumeric = MatProductNumeric_PtAP; 1000 PetscFunctionReturn(0); 1001 } 1002 1003 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C) 1004 { 1005 PetscErrorCode ierr; 1006 Mat_Product *product = C->product; 1007 PetscBool flg; 1008 PetscInt type = 0; 1009 const char *outTypes[4] = {"aij","seqaij","mpiaij","hypre"}; 1010 PetscInt ntype = 4; 1011 Mat A = product->A; 1012 PetscBool Ahypre; 1013 1014 PetscFunctionBegin; 1015 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&Ahypre);CHKERRQ(ierr); 1016 if (Ahypre) { /* A is a Hypre matrix */ 1017 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 1018 C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE; 1019 C->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 1020 PetscFunctionReturn(0); 1021 } 1022 1023 /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */ 1024 /* Get runtime option */ 1025 if (product->api_user) { 1026 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatPtAP_HYPRE","Mat");CHKERRQ(ierr); 1027 ierr = PetscOptionsEList("-matptap_hypre_outtype","MatPtAP outtype","MatPtAP outtype",outTypes,ntype,outTypes[type],&type,&flg);CHKERRQ(ierr); 1028 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1029 } else { 1030 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatProduct_PtAP_HYPRE","Mat");CHKERRQ(ierr); 1031 ierr = PetscOptionsEList("-matproduct_ptap_hypre_outtype","MatProduct_PtAP outtype","MatProduct_PtAP",outTypes,ntype,outTypes[type],&type,&flg);CHKERRQ(ierr); 1032 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1033 } 1034 1035 if (type == 0 || type == 1 || type == 2) { 1036 ierr = MatSetType(C,MATAIJ);CHKERRQ(ierr); 1037 } else if (type == 3) { 1038 ierr = MatSetType(C,MATHYPRE);CHKERRQ(ierr); 1039 } else SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"MatPtAP outtype is not supported"); 1040 C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE; 1041 C->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 1042 PetscFunctionReturn(0); 1043 } 1044 1045 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C) 1046 { 1047 PetscErrorCode ierr; 1048 Mat_Product *product = C->product; 1049 1050 PetscFunctionBegin; 1051 switch (product->type) { 1052 case MATPRODUCT_AB: 1053 ierr = MatProductSetFromOptions_HYPRE_AB(C);CHKERRQ(ierr); 1054 break; 1055 case MATPRODUCT_PtAP: 1056 ierr = MatProductSetFromOptions_HYPRE_PtAP(C);CHKERRQ(ierr); 1057 break; 1058 default: 1059 break; 1060 } 1061 PetscFunctionReturn(0); 1062 } 1063 1064 /* -------------------------------------------------------- */ 1065 1066 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 1067 { 1068 PetscErrorCode ierr; 1069 1070 PetscFunctionBegin; 1071 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);CHKERRQ(ierr); 1072 PetscFunctionReturn(0); 1073 } 1074 1075 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 1076 { 1077 PetscErrorCode ierr; 1078 1079 PetscFunctionBegin; 1080 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);CHKERRQ(ierr); 1081 PetscFunctionReturn(0); 1082 } 1083 1084 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1085 { 1086 PetscErrorCode ierr; 1087 1088 PetscFunctionBegin; 1089 if (y != z) { 1090 ierr = VecCopy(y,z);CHKERRQ(ierr); 1091 } 1092 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);CHKERRQ(ierr); 1093 PetscFunctionReturn(0); 1094 } 1095 1096 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1097 { 1098 PetscErrorCode ierr; 1099 1100 PetscFunctionBegin; 1101 if (y != z) { 1102 ierr = VecCopy(y,z);CHKERRQ(ierr); 1103 } 1104 ierr = MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);CHKERRQ(ierr); 1105 PetscFunctionReturn(0); 1106 } 1107 1108 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */ 1109 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans) 1110 { 1111 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1112 hypre_ParCSRMatrix *parcsr; 1113 hypre_ParVector *hx,*hy; 1114 HYPRE_Complex *ax,*ay,*sax,*say; 1115 PetscErrorCode ierr; 1116 1117 PetscFunctionBegin; 1118 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 1119 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 1120 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 1121 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1122 ierr = VecGetArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1123 if (trans) { 1124 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 1125 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 1126 hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx); 1127 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 1128 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 1129 } else { 1130 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 1131 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 1132 hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy); 1133 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 1134 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 1135 } 1136 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 1137 ierr = VecRestoreArray(y,(PetscScalar**)&ay);CHKERRQ(ierr); 1138 PetscFunctionReturn(0); 1139 } 1140 1141 static PetscErrorCode MatDestroy_HYPRE(Mat A) 1142 { 1143 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1144 PetscErrorCode ierr; 1145 1146 PetscFunctionBegin; 1147 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 1148 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 1149 if (hA->ij) { 1150 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 1151 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 1152 } 1153 if (hA->comm) {ierr = MPI_Comm_free(&hA->comm);CHKERRMPI(ierr);} 1154 1155 ierr = MatStashDestroy_Private(&A->stash);CHKERRQ(ierr); 1156 1157 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1158 1159 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 1160 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 1161 ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_seqaij_hypre_C",NULL);CHKERRQ(ierr); 1162 ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 1163 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr); 1164 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr); 1165 ierr = PetscFree(A->data);CHKERRQ(ierr); 1166 PetscFunctionReturn(0); 1167 } 1168 1169 static PetscErrorCode MatSetUp_HYPRE(Mat A) 1170 { 1171 PetscErrorCode ierr; 1172 1173 PetscFunctionBegin; 1174 ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr); 1175 PetscFunctionReturn(0); 1176 } 1177 1178 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 1179 { 1180 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1181 Vec x,b; 1182 PetscMPIInt n; 1183 PetscInt i,j,rstart,ncols,flg; 1184 PetscInt *row,*col; 1185 PetscScalar *val; 1186 PetscErrorCode ierr; 1187 1188 PetscFunctionBegin; 1189 if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 1190 1191 if (!A->nooffprocentries) { 1192 while (1) { 1193 ierr = MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 1194 if (!flg) break; 1195 1196 for (i=0; i<n;) { 1197 /* Now identify the consecutive vals belonging to the same row */ 1198 for (j=i,rstart=row[j]; j<n; j++) { 1199 if (row[j] != rstart) break; 1200 } 1201 if (j < n) ncols = j-i; 1202 else ncols = n-i; 1203 /* Now assemble all these values with a single function call */ 1204 ierr = MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);CHKERRQ(ierr); 1205 1206 i = j; 1207 } 1208 } 1209 ierr = MatStashScatterEnd_Private(&A->stash);CHKERRQ(ierr); 1210 } 1211 1212 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 1213 /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */ 1214 /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */ 1215 if (!hA->sorted_full) { 1216 hypre_AuxParCSRMatrix *aux_matrix; 1217 1218 /* call destroy just to make sure we do not leak anything */ 1219 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1220 PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix)); 1221 hypre_IJMatrixTranslator(hA->ij) = NULL; 1222 1223 /* Initialize with assembled flag -> it only recreates the aux_par_matrix */ 1224 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1225 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1226 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */ 1227 #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0) 1228 PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix)); 1229 #else 1230 PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize_v2,(aux_matrix,HYPRE_MEMORY_HOST)); 1231 #endif 1232 } 1233 if (hA->x) PetscFunctionReturn(0); 1234 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1235 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1236 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 1237 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 1238 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 1239 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 1240 ierr = VecDestroy(&x);CHKERRQ(ierr); 1241 ierr = VecDestroy(&b);CHKERRQ(ierr); 1242 PetscFunctionReturn(0); 1243 } 1244 1245 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array) 1246 { 1247 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1248 PetscErrorCode ierr; 1249 1250 PetscFunctionBegin; 1251 if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use"); 1252 1253 if (hA->size >= size) { 1254 *array = hA->array; 1255 } else { 1256 ierr = PetscFree(hA->array);CHKERRQ(ierr); 1257 hA->size = size; 1258 ierr = PetscMalloc(hA->size,&hA->array);CHKERRQ(ierr); 1259 *array = hA->array; 1260 } 1261 1262 hA->available = PETSC_FALSE; 1263 PetscFunctionReturn(0); 1264 } 1265 1266 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array) 1267 { 1268 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1269 1270 PetscFunctionBegin; 1271 *array = NULL; 1272 hA->available = PETSC_TRUE; 1273 PetscFunctionReturn(0); 1274 } 1275 1276 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 1277 { 1278 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1279 PetscScalar *vals = (PetscScalar *)v; 1280 HYPRE_Complex *sscr; 1281 PetscInt *cscr[2]; 1282 PetscInt i,nzc; 1283 void *array = NULL; 1284 PetscErrorCode ierr; 1285 1286 PetscFunctionBegin; 1287 ierr = MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);CHKERRQ(ierr); 1288 cscr[0] = (PetscInt*)array; 1289 cscr[1] = ((PetscInt*)array)+nc; 1290 sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2); 1291 for (i=0,nzc=0;i<nc;i++) { 1292 if (cols[i] >= 0) { 1293 cscr[0][nzc ] = cols[i]; 1294 cscr[1][nzc++] = i; 1295 } 1296 } 1297 if (!nzc) { 1298 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1299 PetscFunctionReturn(0); 1300 } 1301 1302 if (ins == ADD_VALUES) { 1303 for (i=0;i<nr;i++) { 1304 if (rows[i] >= 0 && nzc) { 1305 PetscInt j; 1306 HYPRE_Int hnc = (HYPRE_Int)nzc; 1307 1308 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1309 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); } 1310 PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1311 } 1312 vals += nc; 1313 } 1314 } else { /* INSERT_VALUES */ 1315 PetscInt rst,ren; 1316 1317 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1318 for (i=0;i<nr;i++) { 1319 if (rows[i] >= 0 && nzc) { 1320 PetscInt j; 1321 HYPRE_Int hnc = (HYPRE_Int)nzc; 1322 1323 if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]); 1324 for (j=0;j<nzc;j++) { ierr = PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); } 1325 /* nonlocal values */ 1326 if (rows[i] < rst || rows[i] >= ren) { ierr = MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE);CHKERRQ(ierr); } 1327 /* local values */ 1328 else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr)); 1329 } 1330 vals += nc; 1331 } 1332 } 1333 1334 ierr = MatRestoreArray_HYPRE(A,&array);CHKERRQ(ierr); 1335 PetscFunctionReturn(0); 1336 } 1337 1338 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1339 { 1340 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1341 HYPRE_Int *hdnnz,*honnz; 1342 PetscInt i,rs,re,cs,ce,bs; 1343 PetscMPIInt size; 1344 PetscErrorCode ierr; 1345 1346 PetscFunctionBegin; 1347 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1348 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1349 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1350 rs = A->rmap->rstart; 1351 re = A->rmap->rend; 1352 cs = A->cmap->rstart; 1353 ce = A->cmap->rend; 1354 if (!hA->ij) { 1355 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij)); 1356 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1357 } else { 1358 HYPRE_BigInt hrs,hre,hcs,hce; 1359 PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce)); 1360 if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re); 1361 if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce); 1362 } 1363 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs; 1364 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs; 1365 1366 if (!dnnz) { 1367 ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr); 1368 for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz; 1369 } else { 1370 hdnnz = (HYPRE_Int*)dnnz; 1371 } 1372 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRMPI(ierr); 1373 if (size > 1) { 1374 hypre_AuxParCSRMatrix *aux_matrix; 1375 if (!onnz) { 1376 ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr); 1377 for (i=0;i<A->rmap->n;i++) honnz[i] = onz; 1378 } else honnz = (HYPRE_Int*)onnz; 1379 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems 1380 they assume the user will input the entire row values, properly sorted 1381 In PETSc, we don't make such an assumption and set this flag to 1, 1382 unless the option MAT_SORTED_FULL is set to true. 1383 Also, to avoid possible memory leaks, we destroy and recreate the translator 1384 This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize 1385 the IJ matrix for us */ 1386 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1387 hypre_AuxParCSRMatrixDestroy(aux_matrix); 1388 hypre_IJMatrixTranslator(hA->ij) = NULL; 1389 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz)); 1390 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1391 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !hA->sorted_full; 1392 } else { 1393 honnz = NULL; 1394 PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz)); 1395 } 1396 1397 /* reset assembled flag and call the initialize method */ 1398 hypre_IJMatrixAssembleFlag(hA->ij) = 0; 1399 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1400 if (!dnnz) { 1401 ierr = PetscFree(hdnnz);CHKERRQ(ierr); 1402 } 1403 if (!onnz && honnz) { 1404 ierr = PetscFree(honnz);CHKERRQ(ierr); 1405 } 1406 1407 /* Match AIJ logic */ 1408 A->preallocated = PETSC_TRUE; 1409 A->assembled = PETSC_FALSE; 1410 PetscFunctionReturn(0); 1411 } 1412 1413 /*@C 1414 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1415 1416 Collective on Mat 1417 1418 Input Parameters: 1419 + A - the matrix 1420 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1421 (same value is used for all local rows) 1422 . dnnz - array containing the number of nonzeros in the various rows of the 1423 DIAGONAL portion of the local submatrix (possibly different for each row) 1424 or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure. 1425 The size of this array is equal to the number of local rows, i.e 'm'. 1426 For matrices that will be factored, you must leave room for (and set) 1427 the diagonal entry even if it is zero. 1428 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1429 submatrix (same value is used for all local rows). 1430 - onnz - array containing the number of nonzeros in the various rows of the 1431 OFF-DIAGONAL portion of the local submatrix (possibly different for 1432 each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero 1433 structure. The size of this array is equal to the number 1434 of local rows, i.e 'm'. 1435 1436 Notes: 1437 If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored. 1438 1439 Level: intermediate 1440 1441 .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE 1442 @*/ 1443 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1444 { 1445 PetscErrorCode ierr; 1446 1447 PetscFunctionBegin; 1448 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1449 PetscValidType(A,1); 1450 ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr); 1451 PetscFunctionReturn(0); 1452 } 1453 1454 /* 1455 MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix 1456 1457 Collective 1458 1459 Input Parameters: 1460 + parcsr - the pointer to the hypre_ParCSRMatrix 1461 . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported. 1462 - copymode - PETSc copying options 1463 1464 Output Parameter: 1465 . A - the matrix 1466 1467 Level: intermediate 1468 1469 .seealso: MatHYPRE, PetscCopyMode 1470 */ 1471 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A) 1472 { 1473 Mat T; 1474 Mat_HYPRE *hA; 1475 MPI_Comm comm; 1476 PetscInt rstart,rend,cstart,cend,M,N; 1477 PetscBool isseqaij,isseqaijmkl,ismpiaij,isaij,ishyp,isis; 1478 PetscErrorCode ierr; 1479 1480 PetscFunctionBegin; 1481 comm = hypre_ParCSRMatrixComm(parcsr); 1482 ierr = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1483 ierr = PetscStrcmp(mtype,MATSEQAIJMKL,&isseqaijmkl);CHKERRQ(ierr); 1484 ierr = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 1485 ierr = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr); 1486 ierr = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr); 1487 ierr = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr); 1488 isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij); 1489 if (!isaij && !ishyp && !isis) SETERRQ7(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATSEQAIJMKL,MATMPIAIJ,MATIS,MATHYPRE); 1490 /* access ParCSRMatrix */ 1491 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1492 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1493 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1494 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1495 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1496 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1497 1498 /* fix for empty local rows/columns */ 1499 if (rend < rstart) rend = rstart; 1500 if (cend < cstart) cend = cstart; 1501 1502 /* PETSc convention */ 1503 rend++; 1504 cend++; 1505 rend = PetscMin(rend,M); 1506 cend = PetscMin(cend,N); 1507 1508 /* create PETSc matrix with MatHYPRE */ 1509 ierr = MatCreate(comm,&T);CHKERRQ(ierr); 1510 ierr = MatSetSizes(T,rend-rstart,cend-cstart,M,N);CHKERRQ(ierr); 1511 ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr); 1512 hA = (Mat_HYPRE*)(T->data); 1513 1514 /* create HYPRE_IJMatrix */ 1515 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 1516 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1517 1518 /* create new ParCSR object if needed */ 1519 if (ishyp && copymode == PETSC_COPY_VALUES) { 1520 hypre_ParCSRMatrix *new_parcsr; 1521 hypre_CSRMatrix *hdiag,*hoffd,*ndiag,*noffd; 1522 1523 new_parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1524 hdiag = hypre_ParCSRMatrixDiag(parcsr); 1525 hoffd = hypre_ParCSRMatrixOffd(parcsr); 1526 ndiag = hypre_ParCSRMatrixDiag(new_parcsr); 1527 noffd = hypre_ParCSRMatrixOffd(new_parcsr); 1528 ierr = PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));CHKERRQ(ierr); 1529 ierr = PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));CHKERRQ(ierr); 1530 parcsr = new_parcsr; 1531 copymode = PETSC_OWN_POINTER; 1532 } 1533 1534 /* set ParCSR object */ 1535 hypre_IJMatrixObject(hA->ij) = parcsr; 1536 T->preallocated = PETSC_TRUE; 1537 1538 /* set assembled flag */ 1539 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1540 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1541 if (ishyp) { 1542 PetscMPIInt myid = 0; 1543 1544 /* make sure we always have row_starts and col_starts available */ 1545 if (HYPRE_AssumedPartitionCheck()) { 1546 ierr = MPI_Comm_rank(comm,&myid);CHKERRMPI(ierr); 1547 } 1548 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1549 PetscLayout map; 1550 1551 ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr); 1552 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1553 hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1554 } 1555 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1556 PetscLayout map; 1557 1558 ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr); 1559 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1560 hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid); 1561 } 1562 /* prevent from freeing the pointer */ 1563 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1564 *A = T; 1565 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1566 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1567 } else if (isaij) { 1568 if (copymode != PETSC_OWN_POINTER) { 1569 /* prevent from freeing the pointer */ 1570 hA->inner_free = PETSC_FALSE; 1571 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1572 ierr = MatDestroy(&T);CHKERRQ(ierr); 1573 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1574 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr); 1575 *A = T; 1576 } 1577 } else if (isis) { 1578 ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1579 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1580 ierr = MatDestroy(&T);CHKERRQ(ierr); 1581 } 1582 PetscFunctionReturn(0); 1583 } 1584 1585 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1586 { 1587 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1588 HYPRE_Int type; 1589 1590 PetscFunctionBegin; 1591 if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present"); 1592 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 1593 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1594 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr)); 1595 PetscFunctionReturn(0); 1596 } 1597 1598 /* 1599 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1600 1601 Not collective 1602 1603 Input Parameters: 1604 + A - the MATHYPRE object 1605 1606 Output Parameter: 1607 . parcsr - the pointer to the hypre_ParCSRMatrix 1608 1609 Level: intermediate 1610 1611 .seealso: MatHYPRE, PetscCopyMode 1612 */ 1613 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1614 { 1615 PetscErrorCode ierr; 1616 1617 PetscFunctionBegin; 1618 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1619 PetscValidType(A,1); 1620 ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr); 1621 PetscFunctionReturn(0); 1622 } 1623 1624 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd) 1625 { 1626 hypre_ParCSRMatrix *parcsr; 1627 hypre_CSRMatrix *ha; 1628 PetscInt rst; 1629 PetscErrorCode ierr; 1630 1631 PetscFunctionBegin; 1632 if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks"); 1633 ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr); 1634 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1635 if (missing) *missing = PETSC_FALSE; 1636 if (dd) *dd = -1; 1637 ha = hypre_ParCSRMatrixDiag(parcsr); 1638 if (ha) { 1639 PetscInt size,i; 1640 HYPRE_Int *ii,*jj; 1641 1642 size = hypre_CSRMatrixNumRows(ha); 1643 ii = hypre_CSRMatrixI(ha); 1644 jj = hypre_CSRMatrixJ(ha); 1645 for (i = 0; i < size; i++) { 1646 PetscInt j; 1647 PetscBool found = PETSC_FALSE; 1648 1649 for (j = ii[i]; j < ii[i+1] && !found; j++) 1650 found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE; 1651 1652 if (!found) { 1653 PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i); 1654 if (missing) *missing = PETSC_TRUE; 1655 if (dd) *dd = i+rst; 1656 PetscFunctionReturn(0); 1657 } 1658 } 1659 if (!size) { 1660 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1661 if (missing) *missing = PETSC_TRUE; 1662 if (dd) *dd = rst; 1663 } 1664 } else { 1665 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1666 if (missing) *missing = PETSC_TRUE; 1667 if (dd) *dd = rst; 1668 } 1669 PetscFunctionReturn(0); 1670 } 1671 1672 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s) 1673 { 1674 hypre_ParCSRMatrix *parcsr; 1675 hypre_CSRMatrix *ha; 1676 PetscErrorCode ierr; 1677 HYPRE_Complex hs; 1678 1679 PetscFunctionBegin; 1680 ierr = PetscHYPREScalarCast(s,&hs);CHKERRQ(ierr); 1681 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1682 /* diagonal part */ 1683 ha = hypre_ParCSRMatrixDiag(parcsr); 1684 if (ha) { 1685 PetscInt size,i; 1686 HYPRE_Int *ii; 1687 HYPRE_Complex *a; 1688 1689 size = hypre_CSRMatrixNumRows(ha); 1690 a = hypre_CSRMatrixData(ha); 1691 ii = hypre_CSRMatrixI(ha); 1692 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1693 } 1694 /* offdiagonal part */ 1695 ha = hypre_ParCSRMatrixOffd(parcsr); 1696 if (ha) { 1697 PetscInt size,i; 1698 HYPRE_Int *ii; 1699 HYPRE_Complex *a; 1700 1701 size = hypre_CSRMatrixNumRows(ha); 1702 a = hypre_CSRMatrixData(ha); 1703 ii = hypre_CSRMatrixI(ha); 1704 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1705 } 1706 PetscFunctionReturn(0); 1707 } 1708 1709 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 1710 { 1711 hypre_ParCSRMatrix *parcsr; 1712 HYPRE_Int *lrows; 1713 PetscInt rst,ren,i; 1714 PetscErrorCode ierr; 1715 1716 PetscFunctionBegin; 1717 if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented"); 1718 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1719 ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr); 1720 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1721 for (i=0;i<numRows;i++) { 1722 if (rows[i] < rst || rows[i] >= ren) 1723 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported"); 1724 lrows[i] = rows[i] - rst; 1725 } 1726 PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows)); 1727 ierr = PetscFree(lrows);CHKERRQ(ierr); 1728 PetscFunctionReturn(0); 1729 } 1730 1731 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha) 1732 { 1733 PetscErrorCode ierr; 1734 1735 PetscFunctionBegin; 1736 if (ha) { 1737 HYPRE_Int *ii, size; 1738 HYPRE_Complex *a; 1739 1740 size = hypre_CSRMatrixNumRows(ha); 1741 a = hypre_CSRMatrixData(ha); 1742 ii = hypre_CSRMatrixI(ha); 1743 1744 if (a) {ierr = PetscArrayzero(a,ii[size]);CHKERRQ(ierr);} 1745 } 1746 PetscFunctionReturn(0); 1747 } 1748 1749 PetscErrorCode MatZeroEntries_HYPRE(Mat A) 1750 { 1751 hypre_ParCSRMatrix *parcsr; 1752 PetscErrorCode ierr; 1753 1754 PetscFunctionBegin; 1755 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1756 /* diagonal part */ 1757 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));CHKERRQ(ierr); 1758 /* off-diagonal part */ 1759 ierr = MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));CHKERRQ(ierr); 1760 PetscFunctionReturn(0); 1761 } 1762 1763 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag) 1764 { 1765 PetscInt ii; 1766 HYPRE_Int *i, *j; 1767 HYPRE_Complex *a; 1768 1769 PetscFunctionBegin; 1770 if (!hA) PetscFunctionReturn(0); 1771 1772 i = hypre_CSRMatrixI(hA); 1773 j = hypre_CSRMatrixJ(hA); 1774 a = hypre_CSRMatrixData(hA); 1775 1776 for (ii = 0; ii < N; ii++) { 1777 HYPRE_Int jj, ibeg, iend, irow; 1778 1779 irow = rows[ii]; 1780 ibeg = i[irow]; 1781 iend = i[irow+1]; 1782 for (jj = ibeg; jj < iend; jj++) 1783 if (j[jj] == irow) a[jj] = diag; 1784 else a[jj] = 0.0; 1785 } 1786 PetscFunctionReturn(0); 1787 } 1788 1789 static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1790 { 1791 hypre_ParCSRMatrix *parcsr; 1792 PetscInt *lrows,len; 1793 HYPRE_Complex hdiag; 1794 PetscErrorCode ierr; 1795 1796 PetscFunctionBegin; 1797 if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size"); 1798 ierr = PetscHYPREScalarCast(diag,&hdiag);CHKERRQ(ierr); 1799 /* retrieve the internal matrix */ 1800 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1801 /* get locally owned rows */ 1802 ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr); 1803 /* zero diagonal part */ 1804 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);CHKERRQ(ierr); 1805 /* zero off-diagonal part */ 1806 ierr = MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);CHKERRQ(ierr); 1807 1808 ierr = PetscFree(lrows);CHKERRQ(ierr); 1809 PetscFunctionReturn(0); 1810 } 1811 1812 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode) 1813 { 1814 PetscErrorCode ierr; 1815 1816 PetscFunctionBegin; 1817 if (mat->nooffprocentries) PetscFunctionReturn(0); 1818 1819 ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 1820 PetscFunctionReturn(0); 1821 } 1822 1823 static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1824 { 1825 hypre_ParCSRMatrix *parcsr; 1826 HYPRE_Int hnz; 1827 PetscErrorCode ierr; 1828 1829 PetscFunctionBegin; 1830 /* retrieve the internal matrix */ 1831 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1832 /* call HYPRE API */ 1833 PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1834 if (nz) *nz = (PetscInt)hnz; 1835 PetscFunctionReturn(0); 1836 } 1837 1838 static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1839 { 1840 hypre_ParCSRMatrix *parcsr; 1841 HYPRE_Int hnz; 1842 PetscErrorCode ierr; 1843 1844 PetscFunctionBegin; 1845 /* retrieve the internal matrix */ 1846 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1847 /* call HYPRE API */ 1848 hnz = nz ? (HYPRE_Int)(*nz) : 0; 1849 PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v)); 1850 PetscFunctionReturn(0); 1851 } 1852 1853 static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 1854 { 1855 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1856 PetscInt i; 1857 1858 PetscFunctionBegin; 1859 if (!m || !n) PetscFunctionReturn(0); 1860 /* Ignore negative row indices 1861 * And negative column indices should be automatically ignored in hypre 1862 * */ 1863 for (i=0; i<m; i++) { 1864 if (idxm[i] >= 0) { 1865 HYPRE_Int hn = (HYPRE_Int)n; 1866 PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n))); 1867 } 1868 } 1869 PetscFunctionReturn(0); 1870 } 1871 1872 static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg) 1873 { 1874 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1875 1876 PetscFunctionBegin; 1877 switch (op) { 1878 case MAT_NO_OFF_PROC_ENTRIES: 1879 if (flg) { 1880 PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0)); 1881 } 1882 break; 1883 case MAT_SORTED_FULL: 1884 hA->sorted_full = flg; 1885 break; 1886 default: 1887 break; 1888 } 1889 PetscFunctionReturn(0); 1890 } 1891 1892 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view) 1893 { 1894 hypre_ParCSRMatrix *parcsr; 1895 PetscErrorCode ierr; 1896 Mat B; 1897 PetscViewerFormat format; 1898 PetscErrorCode (*mview)(Mat,PetscViewer) = NULL; 1899 1900 PetscFunctionBegin; 1901 ierr = PetscViewerGetFormat(view,&format);CHKERRQ(ierr); 1902 if (format != PETSC_VIEWER_NATIVE) { 1903 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1904 ierr = MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);CHKERRQ(ierr); 1905 ierr = MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);CHKERRQ(ierr); 1906 if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");CHKERRQ(ierr); 1907 ierr = (*mview)(B,view);CHKERRQ(ierr); 1908 ierr = MatDestroy(&B);CHKERRQ(ierr); 1909 } else { 1910 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1911 PetscMPIInt size; 1912 PetscBool isascii; 1913 const char *filename; 1914 1915 /* HYPRE uses only text files */ 1916 ierr = PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 1917 if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name); 1918 ierr = PetscViewerFileGetName(view,&filename);CHKERRQ(ierr); 1919 PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename)); 1920 ierr = MPI_Comm_size(hA->comm,&size);CHKERRMPI(ierr); 1921 if (size > 1) { 1922 ierr = PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);CHKERRQ(ierr); 1923 } else { 1924 ierr = PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);CHKERRQ(ierr); 1925 } 1926 } 1927 PetscFunctionReturn(0); 1928 } 1929 1930 static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B) 1931 { 1932 hypre_ParCSRMatrix *parcsr; 1933 PetscErrorCode ierr; 1934 PetscCopyMode cpmode; 1935 1936 PetscFunctionBegin; 1937 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1938 if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) { 1939 parcsr = hypre_ParCSRMatrixClone(parcsr,0); 1940 cpmode = PETSC_OWN_POINTER; 1941 } else { 1942 cpmode = PETSC_COPY_VALUES; 1943 } 1944 ierr = MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);CHKERRQ(ierr); 1945 PetscFunctionReturn(0); 1946 } 1947 1948 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str) 1949 { 1950 hypre_ParCSRMatrix *acsr,*bcsr; 1951 PetscErrorCode ierr; 1952 1953 PetscFunctionBegin; 1954 if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) { 1955 ierr = MatHYPREGetParCSR_HYPRE(A,&acsr);CHKERRQ(ierr); 1956 ierr = MatHYPREGetParCSR_HYPRE(B,&bcsr);CHKERRQ(ierr); 1957 PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1)); 1958 ierr = MatSetOption(B,MAT_SORTED_FULL,PETSC_TRUE); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */ 1959 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1960 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1961 } else { 1962 ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1963 } 1964 PetscFunctionReturn(0); 1965 } 1966 1967 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d) 1968 { 1969 hypre_ParCSRMatrix *parcsr; 1970 hypre_CSRMatrix *dmat; 1971 HYPRE_Complex *a; 1972 HYPRE_Complex *data = NULL; 1973 HYPRE_Int *diag = NULL; 1974 PetscInt i; 1975 PetscBool cong; 1976 PetscErrorCode ierr; 1977 1978 PetscFunctionBegin; 1979 ierr = MatHasCongruentLayouts(A,&cong);CHKERRQ(ierr); 1980 if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns"); 1981 if (PetscDefined(USE_DEBUG)) { 1982 PetscBool miss; 1983 ierr = MatMissingDiagonal(A,&miss,NULL);CHKERRQ(ierr); 1984 if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing"); 1985 } 1986 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1987 dmat = hypre_ParCSRMatrixDiag(parcsr); 1988 if (dmat) { 1989 /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 1990 ierr = VecGetArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 1991 diag = hypre_CSRMatrixI(dmat); 1992 data = hypre_CSRMatrixData(dmat); 1993 for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]]; 1994 ierr = VecRestoreArray(d,(PetscScalar**)&a);CHKERRQ(ierr); 1995 } 1996 PetscFunctionReturn(0); 1997 } 1998 1999 #include <petscblaslapack.h> 2000 2001 static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str) 2002 { 2003 PetscErrorCode ierr; 2004 2005 PetscFunctionBegin; 2006 if (str == SAME_NONZERO_PATTERN) { 2007 hypre_ParCSRMatrix *x,*y; 2008 hypre_CSRMatrix *xloc,*yloc; 2009 PetscInt xnnz,ynnz; 2010 HYPRE_Complex *xarr,*yarr; 2011 PetscBLASInt one=1,bnz; 2012 2013 ierr = MatHYPREGetParCSR(Y,&y);CHKERRQ(ierr); 2014 ierr = MatHYPREGetParCSR(X,&x);CHKERRQ(ierr); 2015 2016 /* diagonal block */ 2017 xloc = hypre_ParCSRMatrixDiag(x); 2018 yloc = hypre_ParCSRMatrixDiag(y); 2019 xnnz = 0; 2020 ynnz = 0; 2021 xarr = NULL; 2022 yarr = NULL; 2023 if (xloc) { 2024 xarr = hypre_CSRMatrixData(xloc); 2025 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 2026 } 2027 if (yloc) { 2028 yarr = hypre_CSRMatrixData(yloc); 2029 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2030 } 2031 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz); 2032 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 2033 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 2034 2035 /* off-diagonal block */ 2036 xloc = hypre_ParCSRMatrixOffd(x); 2037 yloc = hypre_ParCSRMatrixOffd(y); 2038 xnnz = 0; 2039 ynnz = 0; 2040 xarr = NULL; 2041 yarr = NULL; 2042 if (xloc) { 2043 xarr = hypre_CSRMatrixData(xloc); 2044 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 2045 } 2046 if (yloc) { 2047 yarr = hypre_CSRMatrixData(yloc); 2048 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2049 } 2050 if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz); 2051 ierr = PetscBLASIntCast(xnnz,&bnz);CHKERRQ(ierr); 2052 PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one)); 2053 } else if (str == SUBSET_NONZERO_PATTERN) { 2054 ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr); 2055 } else { 2056 Mat B; 2057 2058 ierr = MatAXPY_Basic_Preallocate(Y,X,&B);CHKERRQ(ierr); 2059 ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr); 2060 ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr); 2061 } 2062 PetscFunctionReturn(0); 2063 } 2064 2065 /*MC 2066 MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices 2067 based on the hypre IJ interface. 2068 2069 Level: intermediate 2070 2071 .seealso: MatCreate() 2072 M*/ 2073 2074 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 2075 { 2076 Mat_HYPRE *hB; 2077 PetscErrorCode ierr; 2078 2079 PetscFunctionBegin; 2080 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 2081 hB->inner_free = PETSC_TRUE; 2082 hB->available = PETSC_TRUE; 2083 hB->sorted_full= PETSC_FALSE; /* no assumption whether column indices are sorted or not */ 2084 hB->size = 0; 2085 hB->array = NULL; 2086 2087 B->data = (void*)hB; 2088 B->rmap->bs = 1; 2089 B->assembled = PETSC_FALSE; 2090 2091 ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2092 B->ops->mult = MatMult_HYPRE; 2093 B->ops->multtranspose = MatMultTranspose_HYPRE; 2094 B->ops->multadd = MatMultAdd_HYPRE; 2095 B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE; 2096 B->ops->setup = MatSetUp_HYPRE; 2097 B->ops->destroy = MatDestroy_HYPRE; 2098 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 2099 B->ops->assemblybegin = MatAssemblyBegin_HYPRE; 2100 B->ops->setvalues = MatSetValues_HYPRE; 2101 B->ops->missingdiagonal = MatMissingDiagonal_HYPRE; 2102 B->ops->scale = MatScale_HYPRE; 2103 B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE; 2104 B->ops->zeroentries = MatZeroEntries_HYPRE; 2105 B->ops->zerorows = MatZeroRows_HYPRE; 2106 B->ops->getrow = MatGetRow_HYPRE; 2107 B->ops->restorerow = MatRestoreRow_HYPRE; 2108 B->ops->getvalues = MatGetValues_HYPRE; 2109 B->ops->setoption = MatSetOption_HYPRE; 2110 B->ops->duplicate = MatDuplicate_HYPRE; 2111 B->ops->copy = MatCopy_HYPRE; 2112 B->ops->view = MatView_HYPRE; 2113 B->ops->getdiagonal = MatGetDiagonal_HYPRE; 2114 B->ops->axpy = MatAXPY_HYPRE; 2115 B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE; 2116 2117 /* build cache for off array entries formed */ 2118 ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr); 2119 2120 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRMPI(ierr); 2121 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 2122 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 2123 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 2124 ierr = PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_seqaij_hypre_C",MatProductSetFromOptions_HYPRE);CHKERRQ(ierr); 2125 ierr = PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_mpiaij_hypre_C",MatProductSetFromOptions_HYPRE);CHKERRQ(ierr); 2126 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr); 2127 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr); 2128 PetscFunctionReturn(0); 2129 } 2130 2131 static PetscErrorCode hypre_array_destroy(void *ptr) 2132 { 2133 PetscFunctionBegin; 2134 hypre_TFree(ptr,HYPRE_MEMORY_HOST); 2135 PetscFunctionReturn(0); 2136 } 2137