1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 #include <petscmathypre.h> 7 #include <petsc/private/matimpl.h> 8 #include <../src/mat/impls/hypre/mhypre.h> 9 #include <../src/mat/impls/aij/mpi/mpiaij.h> 10 #include <../src/vec/vec/impls/hypre/vhyp.h> 11 #include <HYPRE.h> 12 #include <HYPRE_utilities.h> 13 #include <_hypre_parcsr_ls.h> 14 #include <_hypre_sstruct_ls.h> 15 16 static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*); 17 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix); 18 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix); 19 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix); 20 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool); 21 static PetscErrorCode hypre_array_destroy(void*); 22 23 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 24 { 25 PetscErrorCode ierr; 26 PetscInt i,n_d,n_o; 27 const PetscInt *ia_d,*ia_o; 28 PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE; 29 PetscInt *nnz_d=NULL,*nnz_o=NULL; 30 31 PetscFunctionBegin; 32 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 33 ierr = MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);CHKERRQ(ierr); 34 if (done_d) { 35 ierr = PetscMalloc1(n_d,&nnz_d);CHKERRQ(ierr); 36 for (i=0; i<n_d; i++) { 37 nnz_d[i] = ia_d[i+1] - ia_d[i]; 38 } 39 } 40 ierr = MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);CHKERRQ(ierr); 41 } 42 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 43 ierr = MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 44 if (done_o) { 45 ierr = PetscMalloc1(n_o,&nnz_o);CHKERRQ(ierr); 46 for (i=0; i<n_o; i++) { 47 nnz_o[i] = ia_o[i+1] - ia_o[i]; 48 } 49 } 50 ierr = MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);CHKERRQ(ierr); 51 } 52 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 53 if (!done_o) { /* only diagonal part */ 54 ierr = PetscMalloc1(n_d,&nnz_o);CHKERRQ(ierr); 55 for (i=0; i<n_d; i++) { 56 nnz_o[i] = 0; 57 } 58 } 59 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o)); 60 ierr = PetscFree(nnz_d);CHKERRQ(ierr); 61 ierr = PetscFree(nnz_o);CHKERRQ(ierr); 62 } 63 PetscFunctionReturn(0); 64 } 65 66 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 67 { 68 PetscErrorCode ierr; 69 PetscInt rstart,rend,cstart,cend; 70 71 PetscFunctionBegin; 72 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 73 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 74 rstart = A->rmap->rstart; 75 rend = A->rmap->rend; 76 cstart = A->cmap->rstart; 77 cend = A->cmap->rend; 78 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 79 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 80 { 81 PetscBool same; 82 Mat A_d,A_o; 83 const PetscInt *colmap; 84 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);CHKERRQ(ierr); 85 if (same) { 86 ierr = MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 87 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 88 PetscFunctionReturn(0); 89 } 90 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);CHKERRQ(ierr); 91 if (same) { 92 ierr = MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);CHKERRQ(ierr); 93 ierr = MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);CHKERRQ(ierr); 94 PetscFunctionReturn(0); 95 } 96 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);CHKERRQ(ierr); 97 if (same) { 98 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 99 PetscFunctionReturn(0); 100 } 101 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);CHKERRQ(ierr); 102 if (same) { 103 ierr = MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);CHKERRQ(ierr); 104 PetscFunctionReturn(0); 105 } 106 } 107 PetscFunctionReturn(0); 108 } 109 110 static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij) 111 { 112 PetscErrorCode ierr; 113 PetscInt i,rstart,rend,ncols,nr,nc; 114 const PetscScalar *values; 115 const PetscInt *cols; 116 PetscBool flg; 117 118 PetscFunctionBegin; 119 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); 120 ierr = MatGetSize(A,&nr,&nc);CHKERRQ(ierr); 121 if (flg && nr == nc) { 122 ierr = MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); 126 if (flg) { 127 ierr = MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);CHKERRQ(ierr); 128 PetscFunctionReturn(0); 129 } 130 131 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 132 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 133 for (i=rstart; i<rend; i++) { 134 ierr = MatGetRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 135 if (ncols) { 136 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values)); 137 } 138 ierr = MatRestoreRow(A,i,&ncols,&cols,&values);CHKERRQ(ierr); 139 } 140 PetscFunctionReturn(0); 141 } 142 143 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 144 { 145 PetscErrorCode ierr; 146 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data; 147 HYPRE_Int type; 148 hypre_ParCSRMatrix *par_matrix; 149 hypre_AuxParCSRMatrix *aux_matrix; 150 hypre_CSRMatrix *hdiag; 151 152 PetscFunctionBegin; 153 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 154 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 155 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 156 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 157 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 158 /* 159 this is the Hack part where we monkey directly with the hypre datastructures 160 */ 161 ierr = PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt)); 162 ierr = PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt)); 163 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar)); 164 165 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 166 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 167 PetscFunctionReturn(0); 168 } 169 170 static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 171 { 172 PetscErrorCode ierr; 173 Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data; 174 Mat_SeqAIJ *pdiag,*poffd; 175 PetscInt i,*garray = pA->garray,*jj,cstart,*pjj; 176 HYPRE_Int type; 177 hypre_ParCSRMatrix *par_matrix; 178 hypre_AuxParCSRMatrix *aux_matrix; 179 hypre_CSRMatrix *hdiag,*hoffd; 180 181 PetscFunctionBegin; 182 pdiag = (Mat_SeqAIJ*) pA->A->data; 183 poffd = (Mat_SeqAIJ*) pA->B->data; 184 /* cstart is only valid for square MPIAIJ layed out in the usual way */ 185 ierr = MatGetOwnershipRange(A,&cstart,NULL);CHKERRQ(ierr); 186 187 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij)); 188 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type)); 189 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 190 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix)); 191 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 192 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 193 194 /* 195 this is the Hack part where we monkey directly with the hypre datastructures 196 */ 197 ierr = PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 198 /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */ 199 jj = (PetscInt*)hdiag->j; 200 pjj = (PetscInt*)pdiag->j; 201 for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i]; 202 ierr = PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar)); 203 ierr = PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt)); 204 /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this 205 If we hacked a hypre a bit more we might be able to avoid this step */ 206 jj = (PetscInt*) hoffd->j; 207 pjj = (PetscInt*) poffd->j; 208 for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]]; 209 ierr = PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar)); 210 211 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij); 212 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 213 PetscFunctionReturn(0); 214 } 215 216 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B) 217 { 218 Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data); 219 Mat lA; 220 ISLocalToGlobalMapping rl2g,cl2g; 221 IS is; 222 hypre_ParCSRMatrix *hA; 223 hypre_CSRMatrix *hdiag,*hoffd; 224 MPI_Comm comm; 225 PetscScalar *hdd,*hod,*aa,*data; 226 HYPRE_Int *col_map_offd,*hdi,*hdj,*hoi,*hoj; 227 PetscInt *ii,*jj,*iptr,*jptr; 228 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N; 229 HYPRE_Int type; 230 PetscErrorCode ierr; 231 232 PetscFunctionBegin; 233 comm = PetscObjectComm((PetscObject)A); 234 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type)); 235 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 236 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA)); 237 M = hypre_ParCSRMatrixGlobalNumRows(hA); 238 N = hypre_ParCSRMatrixGlobalNumCols(hA); 239 str = hypre_ParCSRMatrixFirstRowIndex(hA); 240 stc = hypre_ParCSRMatrixFirstColDiag(hA); 241 hdiag = hypre_ParCSRMatrixDiag(hA); 242 hoffd = hypre_ParCSRMatrixOffd(hA); 243 dr = hypre_CSRMatrixNumRows(hdiag); 244 dc = hypre_CSRMatrixNumCols(hdiag); 245 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 246 hdi = hypre_CSRMatrixI(hdiag); 247 hdj = hypre_CSRMatrixJ(hdiag); 248 hdd = hypre_CSRMatrixData(hdiag); 249 oc = hypre_CSRMatrixNumCols(hoffd); 250 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 251 hoi = hypre_CSRMatrixI(hoffd); 252 hoj = hypre_CSRMatrixJ(hoffd); 253 hod = hypre_CSRMatrixData(hoffd); 254 if (reuse != MAT_REUSE_MATRIX) { 255 PetscInt *aux; 256 257 /* generate l2g maps for rows and cols */ 258 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 259 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 260 ierr = ISDestroy(&is);CHKERRQ(ierr); 261 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 262 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 263 for (i=0; i<dc; i++) aux[i] = i+stc; 264 for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i]; 265 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 266 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 267 ierr = ISDestroy(&is);CHKERRQ(ierr); 268 /* create MATIS object */ 269 ierr = MatCreate(comm,B);CHKERRQ(ierr); 270 ierr = MatSetSizes(*B,dr,dc,M,N);CHKERRQ(ierr); 271 ierr = MatSetType(*B,MATIS);CHKERRQ(ierr); 272 ierr = MatSetLocalToGlobalMapping(*B,rl2g,cl2g);CHKERRQ(ierr); 273 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 274 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 275 276 /* allocate CSR for local matrix */ 277 ierr = PetscMalloc1(dr+1,&iptr);CHKERRQ(ierr); 278 ierr = PetscMalloc1(nnz,&jptr);CHKERRQ(ierr); 279 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 280 } else { 281 PetscInt nr; 282 PetscBool done; 283 ierr = MatISGetLocalMat(*B,&lA);CHKERRQ(ierr); 284 ierr = MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);CHKERRQ(ierr); 285 if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr); 286 if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz); 287 ierr = MatSeqAIJGetArray(lA,&data);CHKERRQ(ierr); 288 } 289 /* merge local matrices */ 290 ii = iptr; 291 jj = jptr; 292 aa = data; 293 *ii = *(hdi++) + *(hoi++); 294 for (jd=0,jo=0,cum=0; *ii<nnz; cum++) { 295 PetscScalar *aold = aa; 296 PetscInt *jold = jj,nc = jd+jo; 297 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; } 298 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; } 299 *(++ii) = *(hdi++) + *(hoi++); 300 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);CHKERRQ(ierr); 301 } 302 for (; cum<dr; cum++) *(++ii) = nnz; 303 if (reuse != MAT_REUSE_MATRIX) { 304 Mat_SeqAIJ* a; 305 306 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);CHKERRQ(ierr); 307 ierr = MatISSetLocalMat(*B,lA);CHKERRQ(ierr); 308 /* hack SeqAIJ */ 309 a = (Mat_SeqAIJ*)(lA->data); 310 a->free_a = PETSC_TRUE; 311 a->free_ij = PETSC_TRUE; 312 ierr = MatDestroy(&lA);CHKERRQ(ierr); 313 } 314 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 315 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 316 if (reuse == MAT_INPLACE_MATRIX) { 317 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 318 } 319 PetscFunctionReturn(0); 320 } 321 322 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 323 { 324 Mat_HYPRE *hB; 325 MPI_Comm comm = PetscObjectComm((PetscObject)A); 326 PetscErrorCode ierr; 327 328 PetscFunctionBegin; 329 if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX"); 330 if (reuse == MAT_REUSE_MATRIX) { 331 /* always destroy the old matrix and create a new memory; 332 hope this does not churn the memory too much. The problem 333 is I do not know if it is possible to put the matrix back to 334 its initial state so that we can directly copy the values 335 the second time through. */ 336 hB = (Mat_HYPRE*)((*B)->data); 337 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij)); 338 } else { 339 ierr = MatCreate(comm,B);CHKERRQ(ierr); 340 ierr = MatSetType(*B,MATHYPRE);CHKERRQ(ierr); 341 ierr = MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 342 hB = (Mat_HYPRE*)((*B)->data); 343 } 344 ierr = MatHYPRE_CreateFromMat(A,hB);CHKERRQ(ierr); 345 ierr = MatHYPRE_IJMatrixCopy(A,hB->ij);CHKERRQ(ierr); 346 (*B)->preallocated = PETSC_TRUE; 347 ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 348 ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 349 PetscFunctionReturn(0); 350 } 351 352 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 353 { 354 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 355 hypre_ParCSRMatrix *parcsr; 356 hypre_CSRMatrix *hdiag,*hoffd; 357 MPI_Comm comm; 358 PetscScalar *da,*oa,*aptr; 359 PetscInt *dii,*djj,*oii,*ojj,*iptr; 360 PetscInt i,dnnz,onnz,m,n; 361 HYPRE_Int type; 362 PetscMPIInt size; 363 PetscErrorCode ierr; 364 365 PetscFunctionBegin; 366 comm = PetscObjectComm((PetscObject)A); 367 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 368 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 369 if (reuse == MAT_REUSE_MATRIX) { 370 PetscBool ismpiaij,isseqaij; 371 ierr = PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 372 ierr = PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 373 if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported"); 374 } 375 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 376 377 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 378 hdiag = hypre_ParCSRMatrixDiag(parcsr); 379 hoffd = hypre_ParCSRMatrixOffd(parcsr); 380 m = hypre_CSRMatrixNumRows(hdiag); 381 n = hypre_CSRMatrixNumCols(hdiag); 382 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 383 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 384 if (reuse == MAT_INITIAL_MATRIX) { 385 ierr = PetscMalloc1(m+1,&dii);CHKERRQ(ierr); 386 ierr = PetscMalloc1(dnnz,&djj);CHKERRQ(ierr); 387 ierr = PetscMalloc1(dnnz,&da);CHKERRQ(ierr); 388 } else if (reuse == MAT_REUSE_MATRIX) { 389 PetscInt nr; 390 PetscBool done; 391 if (size > 1) { 392 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 393 394 ierr = MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 395 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m); 396 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz); 397 ierr = MatSeqAIJGetArray(b->A,&da);CHKERRQ(ierr); 398 } else { 399 ierr = MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);CHKERRQ(ierr); 400 if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m); 401 if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz); 402 ierr = MatSeqAIJGetArray(*B,&da);CHKERRQ(ierr); 403 } 404 } else { /* MAT_INPLACE_MATRIX */ 405 dii = (PetscInt*)hypre_CSRMatrixI(hdiag); 406 djj = (PetscInt*)hypre_CSRMatrixJ(hdiag); 407 da = hypre_CSRMatrixData(hdiag); 408 } 409 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 410 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));CHKERRQ(ierr); 411 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));CHKERRQ(ierr); 412 iptr = djj; 413 aptr = da; 414 for (i=0; i<m; i++) { 415 PetscInt nc = dii[i+1]-dii[i]; 416 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 417 iptr += nc; 418 aptr += nc; 419 } 420 if (size > 1) { 421 HYPRE_Int *offdj,*coffd; 422 423 if (reuse == MAT_INITIAL_MATRIX) { 424 ierr = PetscMalloc1(m+1,&oii);CHKERRQ(ierr); 425 ierr = PetscMalloc1(onnz,&ojj);CHKERRQ(ierr); 426 ierr = PetscMalloc1(onnz,&oa);CHKERRQ(ierr); 427 } else if (reuse == MAT_REUSE_MATRIX) { 428 Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data); 429 PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd); 430 PetscBool done; 431 432 ierr = MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);CHKERRQ(ierr); 433 if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr); 434 if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz); 435 ierr = MatSeqAIJGetArray(b->B,&oa);CHKERRQ(ierr); 436 } else { /* MAT_INPLACE_MATRIX */ 437 oii = (PetscInt*)hypre_CSRMatrixI(hoffd); 438 ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd); 439 oa = hypre_CSRMatrixData(hoffd); 440 } 441 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 442 offdj = hypre_CSRMatrixJ(hoffd); 443 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 444 for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]]; 445 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));CHKERRQ(ierr); 446 iptr = ojj; 447 aptr = oa; 448 for (i=0; i<m; i++) { 449 PetscInt nc = oii[i+1]-oii[i]; 450 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr);CHKERRQ(ierr); 451 iptr += nc; 452 aptr += nc; 453 } 454 if (reuse == MAT_INITIAL_MATRIX) { 455 Mat_MPIAIJ *b; 456 Mat_SeqAIJ *d,*o; 457 458 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);CHKERRQ(ierr); 459 /* hack MPIAIJ */ 460 b = (Mat_MPIAIJ*)((*B)->data); 461 d = (Mat_SeqAIJ*)b->A->data; 462 o = (Mat_SeqAIJ*)b->B->data; 463 d->free_a = PETSC_TRUE; 464 d->free_ij = PETSC_TRUE; 465 o->free_a = PETSC_TRUE; 466 o->free_ij = PETSC_TRUE; 467 } else if (reuse == MAT_INPLACE_MATRIX) { 468 Mat T; 469 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);CHKERRQ(ierr); 470 hypre_CSRMatrixI(hdiag) = NULL; 471 hypre_CSRMatrixJ(hdiag) = NULL; 472 hypre_CSRMatrixData(hdiag) = NULL; 473 hypre_CSRMatrixI(hoffd) = NULL; 474 hypre_CSRMatrixJ(hoffd) = NULL; 475 hypre_CSRMatrixData(hoffd) = NULL; 476 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 477 } 478 } else { 479 oii = NULL; 480 ojj = NULL; 481 oa = NULL; 482 if (reuse == MAT_INITIAL_MATRIX) { 483 Mat_SeqAIJ* b; 484 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);CHKERRQ(ierr); 485 /* hack SeqAIJ */ 486 b = (Mat_SeqAIJ*)((*B)->data); 487 b->free_a = PETSC_TRUE; 488 b->free_ij = PETSC_TRUE; 489 } else if (reuse == MAT_INPLACE_MATRIX) { 490 Mat T; 491 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);CHKERRQ(ierr); 492 hypre_CSRMatrixI(hdiag) = NULL; 493 hypre_CSRMatrixJ(hdiag) = NULL; 494 hypre_CSRMatrixData(hdiag) = NULL; 495 ierr = MatHeaderReplace(A,&T);CHKERRQ(ierr); 496 } 497 } 498 499 /* we have to use hypre_Tfree to free the arrays */ 500 if (reuse == MAT_INPLACE_MATRIX) { 501 void *ptrs[6] = {dii,djj,da,oii,ojj,oa}; 502 const char *names[6] = {"_hypre_csr_dii", 503 "_hypre_csr_djj", 504 "_hypre_csr_da", 505 "_hypre_csr_oii", 506 "_hypre_csr_ojj", 507 "_hypre_csr_oa"}; 508 for (i=0; i<6; i++) { 509 PetscContainer c; 510 511 ierr = PetscContainerCreate(comm,&c);CHKERRQ(ierr); 512 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 513 ierr = PetscContainerSetUserDestroy(c,hypre_array_destroy);CHKERRQ(ierr); 514 ierr = PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);CHKERRQ(ierr); 515 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 516 } 517 } 518 PetscFunctionReturn(0); 519 } 520 521 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 522 { 523 hypre_ParCSRMatrix *tA; 524 hypre_CSRMatrix *hdiag,*hoffd; 525 Mat_SeqAIJ *diag,*offd; 526 PetscInt *garray,noffd,dnnz,onnz,*row_starts,*col_starts; 527 MPI_Comm comm = PetscObjectComm((PetscObject)A); 528 PetscBool ismpiaij,isseqaij; 529 PetscErrorCode ierr; 530 531 PetscFunctionBegin; 532 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 533 ierr = PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 534 if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type); 535 if (ismpiaij) { 536 Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data); 537 538 diag = (Mat_SeqAIJ*)a->A->data; 539 offd = (Mat_SeqAIJ*)a->B->data; 540 garray = a->garray; 541 noffd = a->B->cmap->N; 542 dnnz = diag->nz; 543 onnz = offd->nz; 544 } else { 545 diag = (Mat_SeqAIJ*)A->data; 546 offd = NULL; 547 garray = NULL; 548 noffd = 0; 549 dnnz = diag->nz; 550 onnz = 0; 551 } 552 553 /* create a temporary ParCSR */ 554 if (HYPRE_AssumedPartitionCheck()) { 555 PetscMPIInt myid; 556 557 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 558 row_starts = A->rmap->range + myid; 559 col_starts = A->cmap->range + myid; 560 } else { 561 row_starts = A->rmap->range; 562 col_starts = A->cmap->range; 563 } 564 tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz); 565 hypre_ParCSRMatrixSetRowStartsOwner(tA,0); 566 hypre_ParCSRMatrixSetColStartsOwner(tA,0); 567 568 /* set diagonal part */ 569 hdiag = hypre_ParCSRMatrixDiag(tA); 570 hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i; 571 hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j; 572 hypre_CSRMatrixData(hdiag) = diag->a; 573 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 574 hypre_CSRMatrixSetRownnz(hdiag); 575 hypre_CSRMatrixSetDataOwner(hdiag,0); 576 577 /* set offdiagonal part */ 578 hoffd = hypre_ParCSRMatrixOffd(tA); 579 if (offd) { 580 hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i; 581 hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j; 582 hypre_CSRMatrixData(hoffd) = offd->a; 583 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 584 hypre_CSRMatrixSetRownnz(hoffd); 585 hypre_CSRMatrixSetDataOwner(hoffd,0); 586 hypre_ParCSRMatrixSetNumNonzeros(tA); 587 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray; 588 } 589 *hA = tA; 590 PetscFunctionReturn(0); 591 } 592 593 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 594 { 595 hypre_CSRMatrix *hdiag,*hoffd; 596 597 PetscFunctionBegin; 598 hdiag = hypre_ParCSRMatrixDiag(*hA); 599 hoffd = hypre_ParCSRMatrixOffd(*hA); 600 /* set pointers to NULL before destroying tA */ 601 hypre_CSRMatrixI(hdiag) = NULL; 602 hypre_CSRMatrixJ(hdiag) = NULL; 603 hypre_CSRMatrixData(hdiag) = NULL; 604 hypre_CSRMatrixI(hoffd) = NULL; 605 hypre_CSRMatrixJ(hoffd) = NULL; 606 hypre_CSRMatrixData(hoffd) = NULL; 607 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 608 hypre_ParCSRMatrixDestroy(*hA); 609 *hA = NULL; 610 PetscFunctionReturn(0); 611 } 612 613 /* calls RAP from BoomerAMG: 614 the resulting ParCSR will not own the column and row starts 615 It looks like we don't need to have the diagonal entries 616 ordered first in the rows of the diagonal part 617 for boomerAMGBuildCoarseOperator to work */ 618 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 619 { 620 PetscErrorCode ierr; 621 HYPRE_Int P_owns_col_starts,R_owns_row_starts; 622 623 PetscFunctionBegin; 624 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 625 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 626 PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP)); 627 PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP)); 628 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 629 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0); 630 hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0); 631 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1); 632 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1); 633 PetscFunctionReturn(0); 634 } 635 636 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C) 637 { 638 Mat B; 639 hypre_ParCSRMatrix *hA,*hP,*hPtAP; 640 PetscErrorCode ierr; 641 642 PetscFunctionBegin; 643 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 644 ierr = MatAIJGetParCSR_Private(P,&hP);CHKERRQ(ierr); 645 ierr = MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);CHKERRQ(ierr); 646 ierr = MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 647 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 648 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 649 ierr = MatAIJRestoreParCSR_Private(P,&hP);CHKERRQ(ierr); 650 PetscFunctionReturn(0); 651 } 652 653 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C) 654 { 655 PetscErrorCode ierr; 656 657 PetscFunctionBegin; 658 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 659 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 660 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 661 PetscFunctionReturn(0); 662 } 663 664 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C) 665 { 666 Mat B; 667 Mat_HYPRE *hP; 668 hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr; 669 HYPRE_Int type; 670 MPI_Comm comm = PetscObjectComm((PetscObject)A); 671 PetscBool ishypre; 672 PetscErrorCode ierr; 673 674 PetscFunctionBegin; 675 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 676 if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 677 hP = (Mat_HYPRE*)P->data; 678 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 679 if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 680 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 681 682 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 683 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 684 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 685 686 /* create temporary matrix and merge to C */ 687 ierr = MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 688 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 689 PetscFunctionReturn(0); 690 } 691 692 static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 693 { 694 PetscErrorCode ierr; 695 696 PetscFunctionBegin; 697 if (scall == MAT_INITIAL_MATRIX) { 698 const char *deft = MATAIJ; 699 char type[256]; 700 PetscBool flg; 701 702 ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr); 703 ierr = PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);CHKERRQ(ierr); 704 ierr = PetscOptionsEnd();CHKERRQ(ierr); 705 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 706 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 707 if (flg) { 708 ierr = MatSetType(*C,type);CHKERRQ(ierr); 709 } else { 710 ierr = MatSetType(*C,deft);CHKERRQ(ierr); 711 } 712 (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 713 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 714 } 715 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 716 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 717 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 718 PetscFunctionReturn(0); 719 } 720 721 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C) 722 { 723 Mat B; 724 hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr; 725 Mat_HYPRE *hA,*hP; 726 PetscBool ishypre; 727 HYPRE_Int type; 728 PetscErrorCode ierr; 729 730 PetscFunctionBegin; 731 ierr = PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);CHKERRQ(ierr); 732 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE); 733 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 734 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 735 hA = (Mat_HYPRE*)A->data; 736 hP = (Mat_HYPRE*)P->data; 737 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 738 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 739 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type)); 740 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 741 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 742 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr)); 743 ierr = MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);CHKERRQ(ierr); 744 ierr = MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);CHKERRQ(ierr); 745 ierr = MatHeaderMerge(C,&B);CHKERRQ(ierr); 746 PetscFunctionReturn(0); 747 } 748 749 static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 750 { 751 PetscErrorCode ierr; 752 753 PetscFunctionBegin; 754 if (scall == MAT_INITIAL_MATRIX) { 755 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 756 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 757 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 758 (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 759 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 760 } 761 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 762 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 763 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 764 PetscFunctionReturn(0); 765 } 766 767 /* calls hypre_ParMatmul 768 hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA 769 hypre_ParMatrixCreate does not duplicate the communicator 770 It looks like we don't need to have the diagonal entries 771 ordered first in the rows of the diagonal part 772 for boomerAMGBuildCoarseOperator to work */ 773 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB) 774 { 775 PetscFunctionBegin; 776 PetscStackPush("hypre_ParMatmul"); 777 *hAB = hypre_ParMatmul(hA,hB); 778 PetscStackPop; 779 PetscFunctionReturn(0); 780 } 781 782 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C) 783 { 784 Mat D; 785 hypre_ParCSRMatrix *hA,*hB,*hAB = NULL; 786 PetscErrorCode ierr; 787 788 PetscFunctionBegin; 789 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 790 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 791 ierr = MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);CHKERRQ(ierr); 792 ierr = MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 793 ierr = MatHeaderMerge(C,&D);CHKERRQ(ierr); 794 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 795 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 796 PetscFunctionReturn(0); 797 } 798 799 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C) 800 { 801 PetscErrorCode ierr; 802 803 PetscFunctionBegin; 804 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 805 ierr = MatSetType(*C,MATAIJ);CHKERRQ(ierr); 806 (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 807 PetscFunctionReturn(0); 808 } 809 810 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C) 811 { 812 Mat D; 813 hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL; 814 Mat_HYPRE *hA,*hB; 815 PetscBool ishypre; 816 HYPRE_Int type; 817 PetscErrorCode ierr; 818 819 PetscFunctionBegin; 820 ierr = PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);CHKERRQ(ierr); 821 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE); 822 ierr = PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);CHKERRQ(ierr); 823 if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE); 824 hA = (Mat_HYPRE*)A->data; 825 hB = (Mat_HYPRE*)B->data; 826 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 827 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 828 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type)); 829 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported"); 830 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr)); 831 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr)); 832 ierr = MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);CHKERRQ(ierr); 833 ierr = MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);CHKERRQ(ierr); 834 /* need to use HeaderReplace because HeaderMerge messes up with the communicator */ 835 ierr = MatHeaderReplace(C,&D);CHKERRQ(ierr); 836 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 837 PetscFunctionReturn(0); 838 } 839 840 static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 841 { 842 PetscErrorCode ierr; 843 844 PetscFunctionBegin; 845 if (scall == MAT_INITIAL_MATRIX) { 846 ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 847 ierr = MatCreate(PetscObjectComm((PetscObject)A),C);CHKERRQ(ierr); 848 ierr = MatSetType(*C,MATHYPRE);CHKERRQ(ierr); 849 (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 850 ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 851 } 852 ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 853 ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr); 854 ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 855 PetscFunctionReturn(0); 856 } 857 858 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D) 859 { 860 Mat E; 861 hypre_ParCSRMatrix *hA,*hB,*hC,*hABC; 862 PetscErrorCode ierr; 863 864 PetscFunctionBegin; 865 ierr = MatAIJGetParCSR_Private(A,&hA);CHKERRQ(ierr); 866 ierr = MatAIJGetParCSR_Private(B,&hB);CHKERRQ(ierr); 867 ierr = MatAIJGetParCSR_Private(C,&hC);CHKERRQ(ierr); 868 ierr = MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);CHKERRQ(ierr); 869 ierr = MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);CHKERRQ(ierr); 870 ierr = MatHeaderMerge(D,&E);CHKERRQ(ierr); 871 ierr = MatAIJRestoreParCSR_Private(A,&hA);CHKERRQ(ierr); 872 ierr = MatAIJRestoreParCSR_Private(B,&hB);CHKERRQ(ierr); 873 ierr = MatAIJRestoreParCSR_Private(C,&hC);CHKERRQ(ierr); 874 PetscFunctionReturn(0); 875 } 876 877 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D) 878 { 879 PetscErrorCode ierr; 880 881 PetscFunctionBegin; 882 ierr = MatCreate(PetscObjectComm((PetscObject)A),D);CHKERRQ(ierr); 883 ierr = MatSetType(*D,MATAIJ);CHKERRQ(ierr); 884 PetscFunctionReturn(0); 885 } 886 887 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 888 { 889 PetscErrorCode ierr; 890 891 PetscFunctionBegin; 892 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);CHKERRQ(ierr); 893 PetscFunctionReturn(0); 894 } 895 896 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 897 { 898 PetscErrorCode ierr; 899 900 PetscFunctionBegin; 901 ierr = MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);CHKERRQ(ierr); 902 PetscFunctionReturn(0); 903 } 904 905 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans) 906 { 907 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 908 hypre_ParCSRMatrix *parcsr; 909 hypre_ParVector *hx,*hy; 910 PetscScalar *ax,*ay,*sax,*say; 911 PetscErrorCode ierr; 912 913 PetscFunctionBegin; 914 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr)); 915 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx)); 916 PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy)); 917 ierr = VecGetArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 918 ierr = VecGetArray(y,&ay);CHKERRQ(ierr); 919 if (trans) { 920 VecHYPRE_ParVectorReplacePointer(hA->x,ay,say); 921 VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax); 922 hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx); 923 VecHYPRE_ParVectorReplacePointer(hA->x,say,ay); 924 VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax); 925 } else { 926 VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax); 927 VecHYPRE_ParVectorReplacePointer(hA->b,ay,say); 928 hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy); 929 VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax); 930 VecHYPRE_ParVectorReplacePointer(hA->b,say,ay); 931 } 932 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&ax);CHKERRQ(ierr); 933 ierr = VecRestoreArray(y,&ay);CHKERRQ(ierr); 934 PetscFunctionReturn(0); 935 } 936 937 static PetscErrorCode MatDestroy_HYPRE(Mat A) 938 { 939 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 940 PetscErrorCode ierr; 941 942 PetscFunctionBegin; 943 if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x)); 944 if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b)); 945 if (hA->ij) { 946 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 947 PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij)); 948 } 949 if (hA->comm) { ierr = MPI_Comm_free(&hA->comm);CHKERRQ(ierr); } 950 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);CHKERRQ(ierr); 951 ierr = PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);CHKERRQ(ierr); 952 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);CHKERRQ(ierr); 953 ierr = PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);CHKERRQ(ierr); 954 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);CHKERRQ(ierr); 955 ierr = PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);CHKERRQ(ierr); 956 ierr = PetscFree(A->data);CHKERRQ(ierr); 957 PetscFunctionReturn(0); 958 } 959 960 static PetscErrorCode MatSetUp_HYPRE(Mat A) 961 { 962 PetscErrorCode ierr; 963 964 PetscFunctionBegin; 965 ierr = MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);CHKERRQ(ierr); 966 PetscFunctionReturn(0); 967 } 968 969 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 970 { 971 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 972 Vec x,b; 973 PetscErrorCode ierr; 974 975 PetscFunctionBegin; 976 if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 977 PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij)); 978 if (hA->x) PetscFunctionReturn(0); 979 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 980 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 981 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);CHKERRQ(ierr); 982 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);CHKERRQ(ierr); 983 ierr = VecHYPRE_IJVectorCreate(x,&hA->x);CHKERRQ(ierr); 984 ierr = VecHYPRE_IJVectorCreate(b,&hA->b);CHKERRQ(ierr); 985 ierr = VecDestroy(&x);CHKERRQ(ierr); 986 ierr = VecDestroy(&b);CHKERRQ(ierr); 987 PetscFunctionReturn(0); 988 } 989 990 #define MATHYPRE_SCRATCH 2048 991 992 PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 993 { 994 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 995 PetscScalar *vals = (PetscScalar *)v; 996 PetscScalar sscr[MATHYPRE_SCRATCH]; 997 HYPRE_Int cscr[2][MATHYPRE_SCRATCH]; 998 HYPRE_Int i,nzc; 999 PetscErrorCode ierr; 1000 1001 PetscFunctionBegin; 1002 for (i=0,nzc=0;i<nc;i++) { 1003 if (cols[i] >= 0) { 1004 cscr[0][nzc ] = cols[i]; 1005 cscr[1][nzc++] = i; 1006 } 1007 } 1008 if (!nzc) PetscFunctionReturn(0); 1009 1010 if (ins == ADD_VALUES) { 1011 for (i=0;i<nr;i++) { 1012 if (rows[i] >= 0 && nzc) { 1013 PetscInt j; 1014 for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]]; 1015 PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr)); 1016 } 1017 vals += nc; 1018 } 1019 } else { /* INSERT_VALUES */ 1020 #if defined(PETSC_USE_DEBUG) 1021 /* Insert values cannot be used to insert offproc entries */ 1022 PetscInt rst,ren; 1023 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1024 for (i=0;i<nr;i++) 1025 if (rows[i] >= 0 && (rows[i] < rst || rows[i] >= ren)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use INSERT_VALUES for off-proc entries with MatHYPRE. Use ADD_VALUES instead"); 1026 #endif 1027 for (i=0;i<nr;i++) { 1028 if (rows[i] >= 0 && nzc) { 1029 PetscInt j; 1030 for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]]; 1031 PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr)); 1032 } 1033 vals += nc; 1034 } 1035 } 1036 PetscFunctionReturn(0); 1037 } 1038 1039 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1040 { 1041 Mat_HYPRE *hA = (Mat_HYPRE*)A->data; 1042 HYPRE_Int *hdnnz,*honnz; 1043 PetscInt i,rs,re,cs,ce,bs; 1044 PetscMPIInt size; 1045 PetscErrorCode ierr; 1046 1047 PetscFunctionBegin; 1048 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1049 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1050 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1051 rs = A->rmap->rstart; 1052 re = A->rmap->rend; 1053 cs = A->cmap->rstart; 1054 ce = A->cmap->rend; 1055 if (!hA->ij) { 1056 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij)); 1057 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1058 } else { 1059 HYPRE_Int hrs,hre,hcs,hce; 1060 PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce)); 1061 if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re); 1062 if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce); 1063 } 1064 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1065 1066 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs; 1067 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs; 1068 1069 if (!dnnz) { 1070 ierr = PetscMalloc1(A->rmap->n,&hdnnz);CHKERRQ(ierr); 1071 for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz; 1072 } else { 1073 hdnnz = (HYPRE_Int*)dnnz; 1074 } 1075 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 1076 if (size > 1) { 1077 if (!onnz) { 1078 ierr = PetscMalloc1(A->rmap->n,&honnz);CHKERRQ(ierr); 1079 for (i=0;i<A->rmap->n;i++) honnz[i] = onz; 1080 } else { 1081 honnz = (HYPRE_Int*)onnz; 1082 } 1083 PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz)); 1084 } else { 1085 honnz = NULL; 1086 PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz)); 1087 } 1088 if (!dnnz) { 1089 ierr = PetscFree(hdnnz);CHKERRQ(ierr); 1090 } 1091 if (!onnz && honnz) { 1092 ierr = PetscFree(honnz);CHKERRQ(ierr); 1093 } 1094 A->preallocated = PETSC_TRUE; 1095 1096 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0 */ 1097 { 1098 hypre_AuxParCSRMatrix *aux_matrix; 1099 aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij); 1100 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 1101 } 1102 PetscFunctionReturn(0); 1103 } 1104 1105 /*@C 1106 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1107 1108 Collective on Mat 1109 1110 Input Parameters: 1111 + A - the matrix 1112 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1113 (same value is used for all local rows) 1114 . dnnz - array containing the number of nonzeros in the various rows of the 1115 DIAGONAL portion of the local submatrix (possibly different for each row) 1116 or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure. 1117 The size of this array is equal to the number of local rows, i.e 'm'. 1118 For matrices that will be factored, you must leave room for (and set) 1119 the diagonal entry even if it is zero. 1120 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1121 submatrix (same value is used for all local rows). 1122 - onnz - array containing the number of nonzeros in the various rows of the 1123 OFF-DIAGONAL portion of the local submatrix (possibly different for 1124 each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero 1125 structure. The size of this array is equal to the number 1126 of local rows, i.e 'm'. 1127 1128 Notes: 1129 If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored. 1130 1131 Level: intermediate 1132 1133 .keywords: matrix, aij, compressed row, sparse, parallel 1134 1135 .seealso: MatCreate(), MatMPIAIJSetPreallocation, MATHYPRE 1136 @*/ 1137 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1138 { 1139 PetscErrorCode ierr; 1140 1141 PetscFunctionBegin; 1142 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1143 PetscValidType(A,1); 1144 ierr = PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));CHKERRQ(ierr); 1145 PetscFunctionReturn(0); 1146 } 1147 1148 /* 1149 MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix 1150 1151 Collective 1152 1153 Input Parameters: 1154 + vparcsr - the pointer to the hypre_ParCSRMatrix 1155 . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported. 1156 - copymode - PETSc copying options 1157 1158 Output Parameter: 1159 . A - the matrix 1160 1161 Level: intermediate 1162 1163 .seealso: MatHYPRE, PetscCopyMode 1164 */ 1165 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A) 1166 { 1167 Mat T; 1168 Mat_HYPRE *hA; 1169 hypre_ParCSRMatrix *parcsr; 1170 MPI_Comm comm; 1171 PetscInt rstart,rend,cstart,cend,M,N; 1172 PetscBool isseqaij,ismpiaij,isaij,ishyp,isis; 1173 PetscErrorCode ierr; 1174 1175 PetscFunctionBegin; 1176 parcsr = (hypre_ParCSRMatrix *)vparcsr; 1177 comm = hypre_ParCSRMatrixComm(parcsr); 1178 ierr = PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1179 ierr = PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr); 1180 ierr = PetscStrcmp(mtype,MATAIJ,&isaij);CHKERRQ(ierr); 1181 ierr = PetscStrcmp(mtype,MATHYPRE,&ishyp);CHKERRQ(ierr); 1182 ierr = PetscStrcmp(mtype,MATIS,&isis);CHKERRQ(ierr); 1183 isaij = (PetscBool)(isseqaij || ismpiaij || isaij); 1184 if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE); 1185 if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES"); 1186 1187 /* access ParCSRMatrix */ 1188 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1189 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1190 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1191 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1192 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1193 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1194 1195 /* fix for empty local rows/columns */ 1196 if (rend < rstart) rend = rstart; 1197 if (cend < cstart) cend = cstart; 1198 1199 /* create PETSc matrix with MatHYPRE */ 1200 ierr = MatCreate(comm,&T);CHKERRQ(ierr); 1201 ierr = MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);CHKERRQ(ierr); 1202 ierr = MatSetType(T,MATHYPRE);CHKERRQ(ierr); 1203 hA = (Mat_HYPRE*)(T->data); 1204 1205 /* create HYPRE_IJMatrix */ 1206 PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij)); 1207 1208 /* set ParCSR object */ 1209 PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR)); 1210 hypre_IJMatrixObject(hA->ij) = parcsr; 1211 T->preallocated = PETSC_TRUE; 1212 1213 /* set assembled flag */ 1214 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1215 PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij)); 1216 if (ishyp) { 1217 PetscMPIInt myid = 0; 1218 1219 /* make sure we always have row_starts and col_starts available */ 1220 if (HYPRE_AssumedPartitionCheck()) { 1221 ierr = MPI_Comm_rank(comm,&myid);CHKERRQ(ierr); 1222 } 1223 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1224 PetscLayout map; 1225 1226 ierr = MatGetLayouts(T,NULL,&map);CHKERRQ(ierr); 1227 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1228 hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid); 1229 } 1230 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1231 PetscLayout map; 1232 1233 ierr = MatGetLayouts(T,&map,NULL);CHKERRQ(ierr); 1234 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 1235 hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid); 1236 } 1237 /* prevent from freeing the pointer */ 1238 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1239 *A = T; 1240 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1241 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1242 } else if (isaij) { 1243 if (copymode != PETSC_OWN_POINTER) { 1244 /* prevent from freeing the pointer */ 1245 hA->inner_free = PETSC_FALSE; 1246 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1247 ierr = MatDestroy(&T);CHKERRQ(ierr); 1248 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1249 ierr = MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);CHKERRQ(ierr); 1250 *A = T; 1251 } 1252 } else if (isis) { 1253 ierr = MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);CHKERRQ(ierr); 1254 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1255 ierr = MatDestroy(&T);CHKERRQ(ierr); 1256 } 1257 PetscFunctionReturn(0); 1258 } 1259 1260 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1261 { 1262 Mat_HYPRE* hA = (Mat_HYPRE*)A->data; 1263 HYPRE_Int type; 1264 PetscErrorCode ierr; 1265 1266 PetscFunctionBegin; 1267 if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present"); 1268 PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type)); 1269 if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1270 PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr)); 1271 PetscFunctionReturn(0); 1272 } 1273 1274 /* 1275 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1276 1277 Not collective 1278 1279 Input Parameters: 1280 + A - the MATHYPRE object 1281 1282 Output Parameter: 1283 . parcsr - the pointer to the hypre_ParCSRMatrix 1284 1285 Level: intermediate 1286 1287 .seealso: MatHYPRE, PetscCopyMode 1288 */ 1289 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1290 { 1291 PetscErrorCode ierr; 1292 1293 PetscFunctionBegin; 1294 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1295 PetscValidType(A,1); 1296 ierr = PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));CHKERRQ(ierr); 1297 PetscFunctionReturn(0); 1298 } 1299 1300 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd) 1301 { 1302 hypre_ParCSRMatrix *parcsr; 1303 hypre_CSRMatrix *ha; 1304 PetscInt rst; 1305 PetscErrorCode ierr; 1306 1307 PetscFunctionBegin; 1308 if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks"); 1309 ierr = MatGetOwnershipRange(A,&rst,NULL);CHKERRQ(ierr); 1310 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1311 if (missing) *missing = PETSC_FALSE; 1312 if (dd) *dd = -1; 1313 ha = hypre_ParCSRMatrixDiag(parcsr); 1314 if (ha) { 1315 PetscInt size,i; 1316 HYPRE_Int *ii,*jj; 1317 1318 size = hypre_CSRMatrixNumRows(ha); 1319 ii = hypre_CSRMatrixI(ha); 1320 jj = hypre_CSRMatrixJ(ha); 1321 for (i = 0; i < size; i++) { 1322 PetscInt j; 1323 PetscBool found = PETSC_FALSE; 1324 1325 for (j = ii[i]; j < ii[i+1] && !found; j++) 1326 found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE; 1327 1328 if (!found) { 1329 PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i); 1330 if (missing) *missing = PETSC_TRUE; 1331 if (dd) *dd = i+rst; 1332 PetscFunctionReturn(0); 1333 } 1334 } 1335 if (!size) { 1336 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1337 if (missing) *missing = PETSC_TRUE; 1338 if (dd) *dd = rst; 1339 } 1340 } else { 1341 PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n"); 1342 if (missing) *missing = PETSC_TRUE; 1343 if (dd) *dd = rst; 1344 } 1345 PetscFunctionReturn(0); 1346 } 1347 1348 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s) 1349 { 1350 hypre_ParCSRMatrix *parcsr; 1351 hypre_CSRMatrix *ha; 1352 PetscErrorCode ierr; 1353 1354 PetscFunctionBegin; 1355 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1356 /* diagonal part */ 1357 ha = hypre_ParCSRMatrixDiag(parcsr); 1358 if (ha) { 1359 PetscInt size,i; 1360 HYPRE_Int *ii; 1361 PetscScalar *a; 1362 1363 size = hypre_CSRMatrixNumRows(ha); 1364 a = hypre_CSRMatrixData(ha); 1365 ii = hypre_CSRMatrixI(ha); 1366 for (i = 0; i < ii[size]; i++) a[i] *= s; 1367 } 1368 /* offdiagonal part */ 1369 ha = hypre_ParCSRMatrixOffd(parcsr); 1370 if (ha) { 1371 PetscInt size,i; 1372 HYPRE_Int *ii; 1373 PetscScalar *a; 1374 1375 size = hypre_CSRMatrixNumRows(ha); 1376 a = hypre_CSRMatrixData(ha); 1377 ii = hypre_CSRMatrixI(ha); 1378 for (i = 0; i < ii[size]; i++) a[i] *= s; 1379 } 1380 PetscFunctionReturn(0); 1381 } 1382 1383 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 1384 { 1385 hypre_ParCSRMatrix *parcsr; 1386 HYPRE_Int *lrows; 1387 PetscInt rst,ren,i; 1388 PetscErrorCode ierr; 1389 1390 PetscFunctionBegin; 1391 if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented"); 1392 ierr = MatHYPREGetParCSR_HYPRE(A,&parcsr);CHKERRQ(ierr); 1393 ierr = PetscMalloc1(numRows,&lrows);CHKERRQ(ierr); 1394 ierr = MatGetOwnershipRange(A,&rst,&ren);CHKERRQ(ierr); 1395 for (i=0;i<numRows;i++) { 1396 if (rows[i] < rst || rows[i] >= ren) 1397 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported"); 1398 lrows[i] = rows[i] - rst; 1399 } 1400 PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows)); 1401 ierr = PetscFree(lrows);CHKERRQ(ierr); 1402 PetscFunctionReturn(0); 1403 } 1404 1405 /*MC 1406 MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices 1407 based on the hypre IJ interface. 1408 1409 Level: intermediate 1410 1411 .seealso: MatCreate() 1412 M*/ 1413 1414 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 1415 { 1416 Mat_HYPRE *hB; 1417 PetscErrorCode ierr; 1418 1419 PetscFunctionBegin; 1420 ierr = PetscNewLog(B,&hB);CHKERRQ(ierr); 1421 hB->inner_free = PETSC_TRUE; 1422 1423 B->data = (void*)hB; 1424 B->rmap->bs = 1; 1425 B->assembled = PETSC_FALSE; 1426 1427 ierr = PetscMemzero(B->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1428 B->ops->mult = MatMult_HYPRE; 1429 B->ops->multtranspose = MatMultTranspose_HYPRE; 1430 B->ops->setup = MatSetUp_HYPRE; 1431 B->ops->destroy = MatDestroy_HYPRE; 1432 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 1433 B->ops->ptap = MatPtAP_HYPRE_HYPRE; 1434 B->ops->matmult = MatMatMult_HYPRE_HYPRE; 1435 B->ops->setvalues = MatSetValues_HYPRE; 1436 B->ops->missingdiagonal = MatMissingDiagonal_HYPRE; 1437 B->ops->scale = MatScale_HYPRE; 1438 B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE; 1439 1440 ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);CHKERRQ(ierr); 1441 ierr = PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);CHKERRQ(ierr); 1442 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);CHKERRQ(ierr); 1443 ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);CHKERRQ(ierr); 1444 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 1445 ierr = PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);CHKERRQ(ierr); 1446 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);CHKERRQ(ierr); 1447 ierr = PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);CHKERRQ(ierr); 1448 PetscFunctionReturn(0); 1449 } 1450 1451 static PetscErrorCode hypre_array_destroy(void *ptr) 1452 { 1453 PetscFunctionBegin; 1454 hypre_TFree(ptr,HYPRE_MEMORY_HOST); 1455 PetscFunctionReturn(0); 1456 } 1457