1 #define PETSCMAT_DLL 2 3 /* 4 This provides a matrix that consists of Mats 5 */ 6 7 #include "private/matimpl.h" /*I "petscmat.h" I*/ 8 #include "../src/mat/impls/baij/seq/baij.h" /* use the common AIJ data-structure */ 9 #include "petscksp.h" 10 11 #define CHUNKSIZE 15 12 13 typedef struct { 14 SEQAIJHEADER(Mat); 15 SEQBAIJHEADER; 16 Mat *diags; 17 18 Vec left,right,middle,workb; /* dummy vectors to perform local parts of product */ 19 } Mat_BlockMat; 20 21 #undef __FUNCT__ 22 #define __FUNCT__ "MatSOR_BlockMat_Symmetric" 23 PetscErrorCode MatSOR_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 24 { 25 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 26 PetscScalar *x; 27 const Mat *v = a->a; 28 const PetscScalar *b; 29 PetscErrorCode ierr; 30 PetscInt n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs; 31 const PetscInt *idx; 32 IS row,col; 33 MatFactorInfo info; 34 Vec left = a->left,right = a->right, middle = a->middle; 35 Mat *diag; 36 37 PetscFunctionBegin; 38 its = its*lits; 39 if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits); 40 if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat"); 41 if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0"); 42 if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift"); 43 if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP)) 44 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep"); 45 46 if (!a->diags) { 47 ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr); 48 ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr); 49 for (i=0; i<mbs; i++) { 50 ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERING_ND,&row,&col);CHKERRQ(ierr); 51 ierr = MatCholeskyFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,&info);CHKERRQ(ierr); 52 ierr = MatCholeskyFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr); 53 ierr = ISDestroy(row);CHKERRQ(ierr); 54 ierr = ISDestroy(col);CHKERRQ(ierr); 55 } 56 ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr); 57 } 58 diag = a->diags; 59 60 ierr = VecSet(xx,0.0);CHKERRQ(ierr); 61 ierr = VecGetArray(xx,&x);CHKERRQ(ierr); 62 /* copy right hand side because it must be modified during iteration */ 63 ierr = VecCopy(bb,a->workb);CHKERRQ(ierr); 64 ierr = VecGetArray(a->workb,(PetscScalar**)&b);CHKERRQ(ierr); 65 66 /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */ 67 while (its--) { 68 if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){ 69 70 for (i=0; i<mbs; i++) { 71 n = a->i[i+1] - a->i[i] - 1; 72 idx = a->j + a->i[i] + 1; 73 v = a->a + a->i[i] + 1; 74 75 ierr = VecSet(left,0.0);CHKERRQ(ierr); 76 for (j=0; j<n; j++) { 77 ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr); 78 ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr); 79 ierr = VecResetArray(right);CHKERRQ(ierr); 80 } 81 ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr); 82 ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr); 83 ierr = VecResetArray(right);CHKERRQ(ierr); 84 85 ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr); 86 ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr); 87 88 /* now adjust right hand side, see MatSOR_SeqSBAIJ */ 89 for (j=0; j<n; j++) { 90 ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr); 91 ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr); 92 ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr); 93 ierr = VecResetArray(middle);CHKERRQ(ierr); 94 } 95 ierr = VecResetArray(right);CHKERRQ(ierr); 96 97 } 98 } 99 if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){ 100 101 for (i=mbs-1; i>=0; i--) { 102 n = a->i[i+1] - a->i[i] - 1; 103 idx = a->j + a->i[i] + 1; 104 v = a->a + a->i[i] + 1; 105 106 ierr = VecSet(left,0.0);CHKERRQ(ierr); 107 for (j=0; j<n; j++) { 108 ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr); 109 ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr); 110 ierr = VecResetArray(right);CHKERRQ(ierr); 111 } 112 ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr); 113 ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr); 114 ierr = VecResetArray(right);CHKERRQ(ierr); 115 116 ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr); 117 ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr); 118 ierr = VecResetArray(right);CHKERRQ(ierr); 119 120 } 121 } 122 } 123 ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr); 124 ierr = VecRestoreArray(a->workb,(PetscScalar**)&b);CHKERRQ(ierr); 125 PetscFunctionReturn(0); 126 } 127 128 #undef __FUNCT__ 129 #define __FUNCT__ "MatSOR_BlockMat" 130 PetscErrorCode MatSOR_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 131 { 132 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 133 PetscScalar *x; 134 const Mat *v = a->a; 135 const PetscScalar *b; 136 PetscErrorCode ierr; 137 PetscInt n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs; 138 const PetscInt *idx; 139 IS row,col; 140 MatFactorInfo info; 141 Vec left = a->left,right = a->right; 142 Mat *diag; 143 144 PetscFunctionBegin; 145 its = its*lits; 146 if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits); 147 if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat"); 148 if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0"); 149 if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift"); 150 151 if (!a->diags) { 152 ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr); 153 ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr); 154 for (i=0; i<mbs; i++) { 155 ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERING_ND,&row,&col);CHKERRQ(ierr); 156 ierr = MatLUFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,col,&info);CHKERRQ(ierr); 157 ierr = MatLUFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr); 158 ierr = ISDestroy(row);CHKERRQ(ierr); 159 ierr = ISDestroy(col);CHKERRQ(ierr); 160 } 161 } 162 diag = a->diags; 163 164 ierr = VecSet(xx,0.0);CHKERRQ(ierr); 165 ierr = VecGetArray(xx,&x);CHKERRQ(ierr); 166 ierr = VecGetArray(bb,(PetscScalar**)&b);CHKERRQ(ierr); 167 168 /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */ 169 while (its--) { 170 if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){ 171 172 for (i=0; i<mbs; i++) { 173 n = a->i[i+1] - a->i[i]; 174 idx = a->j + a->i[i]; 175 v = a->a + a->i[i]; 176 177 ierr = VecSet(left,0.0);CHKERRQ(ierr); 178 for (j=0; j<n; j++) { 179 if (idx[j] != i) { 180 ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr); 181 ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr); 182 ierr = VecResetArray(right);CHKERRQ(ierr); 183 } 184 } 185 ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr); 186 ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr); 187 ierr = VecResetArray(right);CHKERRQ(ierr); 188 189 ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr); 190 ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr); 191 ierr = VecResetArray(right);CHKERRQ(ierr); 192 } 193 } 194 if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){ 195 196 for (i=mbs-1; i>=0; i--) { 197 n = a->i[i+1] - a->i[i]; 198 idx = a->j + a->i[i]; 199 v = a->a + a->i[i]; 200 201 ierr = VecSet(left,0.0);CHKERRQ(ierr); 202 for (j=0; j<n; j++) { 203 if (idx[j] != i) { 204 ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr); 205 ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr); 206 ierr = VecResetArray(right);CHKERRQ(ierr); 207 } 208 } 209 ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr); 210 ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr); 211 ierr = VecResetArray(right);CHKERRQ(ierr); 212 213 ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr); 214 ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr); 215 ierr = VecResetArray(right);CHKERRQ(ierr); 216 217 } 218 } 219 } 220 ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr); 221 ierr = VecRestoreArray(bb,(PetscScalar**)&b);CHKERRQ(ierr); 222 PetscFunctionReturn(0); 223 } 224 225 #undef __FUNCT__ 226 #define __FUNCT__ "MatSetValues_BlockMat" 227 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is) 228 { 229 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 230 PetscInt *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1; 231 PetscInt *imax=a->imax,*ai=a->i,*ailen=a->ilen; 232 PetscInt *aj=a->j,nonew=a->nonew,bs=A->rmap->bs,brow,bcol; 233 PetscErrorCode ierr; 234 PetscInt ridx,cidx; 235 PetscTruth roworiented=a->roworiented; 236 MatScalar value; 237 Mat *ap,*aa = a->a; 238 239 PetscFunctionBegin; 240 if (v) PetscValidScalarPointer(v,6); 241 for (k=0; k<m; k++) { /* loop over added rows */ 242 row = im[k]; 243 brow = row/bs; 244 if (row < 0) continue; 245 #if defined(PETSC_USE_DEBUG) 246 if (row >= A->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap->N-1); 247 #endif 248 rp = aj + ai[brow]; 249 ap = aa + ai[brow]; 250 rmax = imax[brow]; 251 nrow = ailen[brow]; 252 low = 0; 253 high = nrow; 254 for (l=0; l<n; l++) { /* loop over added columns */ 255 if (in[l] < 0) continue; 256 #if defined(PETSC_USE_DEBUG) 257 if (in[l] >= A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap->n-1); 258 #endif 259 col = in[l]; bcol = col/bs; 260 if (A->symmetric && brow > bcol) continue; 261 ridx = row % bs; cidx = col % bs; 262 if (roworiented) { 263 value = v[l + k*n]; 264 } else { 265 value = v[k + l*m]; 266 } 267 if (col <= lastcol) low = 0; else high = nrow; 268 lastcol = col; 269 while (high-low > 7) { 270 t = (low+high)/2; 271 if (rp[t] > bcol) high = t; 272 else low = t; 273 } 274 for (i=low; i<high; i++) { 275 if (rp[i] > bcol) break; 276 if (rp[i] == bcol) { 277 goto noinsert1; 278 } 279 } 280 if (nonew == 1) goto noinsert1; 281 if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col); 282 MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat); 283 N = nrow++ - 1; high++; 284 /* shift up all the later entries in this row */ 285 for (ii=N; ii>=i; ii--) { 286 rp[ii+1] = rp[ii]; 287 ap[ii+1] = ap[ii]; 288 } 289 if (N>=i) ap[i] = 0; 290 rp[i] = bcol; 291 a->nz++; 292 noinsert1:; 293 if (!*(ap+i)) { 294 ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr); 295 } 296 ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr); 297 low = i; 298 } 299 ailen[brow] = nrow; 300 } 301 A->same_nonzero = PETSC_FALSE; 302 PetscFunctionReturn(0); 303 } 304 305 #undef __FUNCT__ 306 #define __FUNCT__ "MatLoad_BlockMat" 307 PetscErrorCode MatLoad_BlockMat(PetscViewer viewer, const MatType type,Mat *A) 308 { 309 PetscErrorCode ierr; 310 Mat tmpA; 311 PetscInt i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0; 312 const PetscInt *cols; 313 const PetscScalar *values; 314 PetscTruth flg = PETSC_FALSE,notdone; 315 Mat_SeqAIJ *a; 316 Mat_BlockMat *amat; 317 318 PetscFunctionBegin; 319 ierr = MatLoad_SeqAIJ(viewer,MATSEQAIJ,&tmpA);CHKERRQ(ierr); 320 321 ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr); 322 ierr = PetscOptionsBegin(PETSC_COMM_SELF,PETSC_NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr); 323 ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr); 324 ierr = PetscOptionsTruth("-matload_symmetric","Store the matrix as symmetric","MatLoad",flg,&flg,PETSC_NULL);CHKERRQ(ierr); 325 ierr = PetscOptionsEnd();CHKERRQ(ierr); 326 327 /* Determine number of nonzero blocks for each block row */ 328 a = (Mat_SeqAIJ*) tmpA->data; 329 mbs = m/bs; 330 ierr = PetscMalloc3(mbs,PetscInt,&lens,bs,PetscInt*,&ii,bs,PetscInt,&ilens);CHKERRQ(ierr); 331 ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr); 332 333 for (i=0; i<mbs; i++) { 334 for (j=0; j<bs; j++) { 335 ii[j] = a->j + a->i[i*bs + j]; 336 ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j]; 337 } 338 339 currentcol = -1; 340 notdone = PETSC_TRUE; 341 while (PETSC_TRUE) { 342 notdone = PETSC_FALSE; 343 nextcol = 1000000000; 344 for (j=0; j<bs; j++) { 345 while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { 346 ii[j]++; 347 ilens[j]--; 348 } 349 if (ilens[j] > 0) { 350 notdone = PETSC_TRUE; 351 nextcol = PetscMin(nextcol,ii[j][0]/bs); 352 } 353 } 354 if (!notdone) break; 355 if (!flg || (nextcol >= i)) lens[i]++; 356 currentcol = nextcol; 357 } 358 } 359 360 ierr = MatCreateBlockMat(PETSC_COMM_SELF,m,n,bs,0,lens,A);CHKERRQ(ierr); 361 if (flg) { 362 ierr = MatSetOption(*A,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr); 363 } 364 amat = (Mat_BlockMat*)(*A)->data; 365 366 /* preallocate the submatrices */ 367 ierr = PetscMalloc(bs*sizeof(PetscInt),&llens);CHKERRQ(ierr); 368 for (i=0; i<mbs; i++) { /* loops for block rows */ 369 for (j=0; j<bs; j++) { 370 ii[j] = a->j + a->i[i*bs + j]; 371 ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j]; 372 } 373 374 currentcol = 1000000000; 375 for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */ 376 if (ilens[j] > 0) { 377 currentcol = PetscMin(currentcol,ii[j][0]/bs); 378 } 379 } 380 381 notdone = PETSC_TRUE; 382 while (PETSC_TRUE) { /* loops over blocks in block row */ 383 384 notdone = PETSC_FALSE; 385 nextcol = 1000000000; 386 ierr = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr); 387 for (j=0; j<bs; j++) { /* loop over rows in block */ 388 while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */ 389 ii[j]++; 390 ilens[j]--; 391 llens[j]++; 392 } 393 if (ilens[j] > 0) { 394 notdone = PETSC_TRUE; 395 nextcol = PetscMin(nextcol,ii[j][0]/bs); 396 } 397 } 398 if (cnt >= amat->maxnz) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt); 399 if (!flg || currentcol >= i) { 400 amat->j[cnt] = currentcol; 401 ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr); 402 } 403 404 if (!notdone) break; 405 currentcol = nextcol; 406 } 407 amat->ilen[i] = lens[i]; 408 } 409 CHKMEMQ; 410 411 ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr); 412 ierr = PetscFree(llens);CHKERRQ(ierr); 413 414 /* copy over the matrix, one row at a time */ 415 for (i=0; i<m; i++) { 416 ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr); 417 ierr = MatSetValues(*A,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr); 418 ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr); 419 } 420 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 421 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 422 PetscFunctionReturn(0); 423 } 424 425 #undef __FUNCT__ 426 #define __FUNCT__ "MatView_BlockMat" 427 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer) 428 { 429 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 430 PetscErrorCode ierr; 431 const char *name; 432 PetscViewerFormat format; 433 434 PetscFunctionBegin; 435 ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr); 436 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 437 if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) { 438 ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr); 439 if (A->symmetric) { 440 ierr = PetscViewerASCIIPrintf(viewer,"Only upper triangular part of symmetric matrix is stored\n");CHKERRQ(ierr); 441 } 442 } 443 PetscFunctionReturn(0); 444 } 445 446 #undef __FUNCT__ 447 #define __FUNCT__ "MatDestroy_BlockMat" 448 PetscErrorCode MatDestroy_BlockMat(Mat mat) 449 { 450 PetscErrorCode ierr; 451 Mat_BlockMat *bmat = (Mat_BlockMat*)mat->data; 452 PetscInt i; 453 454 PetscFunctionBegin; 455 if (bmat->right) { 456 ierr = VecDestroy(bmat->right);CHKERRQ(ierr); 457 } 458 if (bmat->left) { 459 ierr = VecDestroy(bmat->left);CHKERRQ(ierr); 460 } 461 if (bmat->middle) { 462 ierr = VecDestroy(bmat->middle);CHKERRQ(ierr); 463 } 464 if (bmat->workb) { 465 ierr = VecDestroy(bmat->workb);CHKERRQ(ierr); 466 } 467 if (bmat->diags) { 468 for (i=0; i<mat->rmap->n/mat->rmap->bs; i++) { 469 if (bmat->diags[i]) {ierr = MatDestroy(bmat->diags[i]);CHKERRQ(ierr);} 470 } 471 } 472 if (bmat->a) { 473 for (i=0; i<bmat->nz; i++) { 474 if (bmat->a[i]) {ierr = MatDestroy(bmat->a[i]);CHKERRQ(ierr);} 475 } 476 } 477 ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr); 478 ierr = PetscFree(bmat);CHKERRQ(ierr); 479 PetscFunctionReturn(0); 480 } 481 482 #undef __FUNCT__ 483 #define __FUNCT__ "MatMult_BlockMat" 484 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y) 485 { 486 Mat_BlockMat *bmat = (Mat_BlockMat*)A->data; 487 PetscErrorCode ierr; 488 PetscScalar *xx,*yy; 489 PetscInt *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j; 490 Mat *aa; 491 492 PetscFunctionBegin; 493 CHKMEMQ; 494 /* 495 Standard CSR multiply except each entry is a Mat 496 */ 497 ierr = VecGetArray(x,&xx);CHKERRQ(ierr); 498 499 ierr = VecSet(y,0.0);CHKERRQ(ierr); 500 ierr = VecGetArray(y,&yy);CHKERRQ(ierr); 501 aj = bmat->j; 502 aa = bmat->a; 503 ii = bmat->i; 504 for (i=0; i<m; i++) { 505 jrow = ii[i]; 506 ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr); 507 n = ii[i+1] - jrow; 508 for (j=0; j<n; j++) { 509 ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr); 510 ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr); 511 ierr = VecResetArray(bmat->right);CHKERRQ(ierr); 512 jrow++; 513 } 514 ierr = VecResetArray(bmat->left);CHKERRQ(ierr); 515 } 516 ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr); 517 ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr); 518 CHKMEMQ; 519 PetscFunctionReturn(0); 520 } 521 522 #undef __FUNCT__ 523 #define __FUNCT__ "MatMult_BlockMat_Symmetric" 524 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y) 525 { 526 Mat_BlockMat *bmat = (Mat_BlockMat*)A->data; 527 PetscErrorCode ierr; 528 PetscScalar *xx,*yy; 529 PetscInt *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j; 530 Mat *aa; 531 532 PetscFunctionBegin; 533 CHKMEMQ; 534 /* 535 Standard CSR multiply except each entry is a Mat 536 */ 537 ierr = VecGetArray(x,&xx);CHKERRQ(ierr); 538 539 ierr = VecSet(y,0.0);CHKERRQ(ierr); 540 ierr = VecGetArray(y,&yy);CHKERRQ(ierr); 541 aj = bmat->j; 542 aa = bmat->a; 543 ii = bmat->i; 544 for (i=0; i<m; i++) { 545 jrow = ii[i]; 546 n = ii[i+1] - jrow; 547 ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr); 548 ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr); 549 /* if we ALWAYS required a diagonal entry then could remove this if test */ 550 if (aj[jrow] == i) { 551 ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr); 552 ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr); 553 ierr = VecResetArray(bmat->right);CHKERRQ(ierr); 554 jrow++; 555 n--; 556 } 557 for (j=0; j<n; j++) { 558 ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr); /* upper triangular part */ 559 ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr); 560 ierr = VecResetArray(bmat->right);CHKERRQ(ierr); 561 562 ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr); /* lower triangular part */ 563 ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr); 564 ierr = VecResetArray(bmat->right);CHKERRQ(ierr); 565 jrow++; 566 } 567 ierr = VecResetArray(bmat->left);CHKERRQ(ierr); 568 ierr = VecResetArray(bmat->middle);CHKERRQ(ierr); 569 } 570 ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr); 571 ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr); 572 CHKMEMQ; 573 PetscFunctionReturn(0); 574 } 575 576 #undef __FUNCT__ 577 #define __FUNCT__ "MatMultAdd_BlockMat" 578 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z) 579 { 580 PetscFunctionBegin; 581 PetscFunctionReturn(0); 582 } 583 584 #undef __FUNCT__ 585 #define __FUNCT__ "MatMultTranspose_BlockMat" 586 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y) 587 { 588 PetscFunctionBegin; 589 PetscFunctionReturn(0); 590 } 591 592 #undef __FUNCT__ 593 #define __FUNCT__ "MatMultTransposeAdd_BlockMat" 594 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z) 595 { 596 PetscFunctionBegin; 597 PetscFunctionReturn(0); 598 } 599 600 /* 601 Adds diagonal pointers to sparse matrix structure. 602 */ 603 #undef __FUNCT__ 604 #define __FUNCT__ "MatMarkDiagonal_BlockMat" 605 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A) 606 { 607 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 608 PetscErrorCode ierr; 609 PetscInt i,j,mbs = A->rmap->n/A->rmap->bs; 610 611 PetscFunctionBegin; 612 if (!a->diag) { 613 ierr = PetscMalloc(mbs*sizeof(PetscInt),&a->diag);CHKERRQ(ierr); 614 } 615 for (i=0; i<mbs; i++) { 616 a->diag[i] = a->i[i+1]; 617 for (j=a->i[i]; j<a->i[i+1]; j++) { 618 if (a->j[j] == i) { 619 a->diag[i] = j; 620 break; 621 } 622 } 623 } 624 PetscFunctionReturn(0); 625 } 626 627 #undef __FUNCT__ 628 #define __FUNCT__ "MatGetSubMatrix_BlockMat" 629 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *B) 630 { 631 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 632 Mat_SeqAIJ *c; 633 PetscErrorCode ierr; 634 PetscInt i,k,first,step,lensi,nrows,ncols; 635 PetscInt *j_new,*i_new,*aj = a->j,*ailen = a->ilen; 636 PetscScalar *a_new; 637 Mat C,*aa = a->a; 638 PetscTruth stride,equal; 639 640 PetscFunctionBegin; 641 ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr); 642 if (!equal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for idential column and row indices"); 643 ierr = ISStride(iscol,&stride);CHKERRQ(ierr); 644 if (!stride) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for stride indices"); 645 ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr); 646 if (step != A->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only select one entry from each block"); 647 648 ierr = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr); 649 ncols = nrows; 650 651 /* create submatrix */ 652 if (scall == MAT_REUSE_MATRIX) { 653 PetscInt n_cols,n_rows; 654 C = *B; 655 ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr); 656 if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size"); 657 ierr = MatZeroEntries(C);CHKERRQ(ierr); 658 } else { 659 ierr = MatCreate(((PetscObject)A)->comm,&C);CHKERRQ(ierr); 660 ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 661 if (A->symmetric) { 662 ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr); 663 } else { 664 ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr); 665 } 666 ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr); 667 ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr); 668 } 669 c = (Mat_SeqAIJ*)C->data; 670 671 /* loop over rows inserting into submatrix */ 672 a_new = c->a; 673 j_new = c->j; 674 i_new = c->i; 675 676 for (i=0; i<nrows; i++) { 677 lensi = ailen[i]; 678 for (k=0; k<lensi; k++) { 679 *j_new++ = *aj++; 680 ierr = MatGetValue(*aa++,first,first,a_new++);CHKERRQ(ierr); 681 } 682 i_new[i+1] = i_new[i] + lensi; 683 c->ilen[i] = lensi; 684 } 685 686 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 687 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 688 *B = C; 689 PetscFunctionReturn(0); 690 } 691 692 #undef __FUNCT__ 693 #define __FUNCT__ "MatAssemblyEnd_BlockMat" 694 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode) 695 { 696 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 697 PetscErrorCode ierr; 698 PetscInt fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax; 699 PetscInt m = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0; 700 Mat *aa = a->a,*ap; 701 702 PetscFunctionBegin; 703 if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0); 704 705 if (m) rmax = ailen[0]; /* determine row with most nonzeros */ 706 for (i=1; i<m; i++) { 707 /* move each row back by the amount of empty slots (fshift) before it*/ 708 fshift += imax[i-1] - ailen[i-1]; 709 rmax = PetscMax(rmax,ailen[i]); 710 if (fshift) { 711 ip = aj + ai[i] ; 712 ap = aa + ai[i] ; 713 N = ailen[i]; 714 for (j=0; j<N; j++) { 715 ip[j-fshift] = ip[j]; 716 ap[j-fshift] = ap[j]; 717 } 718 } 719 ai[i] = ai[i-1] + ailen[i-1]; 720 } 721 if (m) { 722 fshift += imax[m-1] - ailen[m-1]; 723 ai[m] = ai[m-1] + ailen[m-1]; 724 } 725 /* reset ilen and imax for each row */ 726 for (i=0; i<m; i++) { 727 ailen[i] = imax[i] = ai[i+1] - ai[i]; 728 } 729 a->nz = ai[m]; 730 for (i=0; i<a->nz; i++) { 731 #if defined(PETSC_USE_DEBUG) 732 if (!aa[i]) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz); 733 #endif 734 ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 735 ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 736 } 737 CHKMEMQ; 738 ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap->n/A->cmap->bs,fshift,a->nz);CHKERRQ(ierr); 739 ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr); 740 ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr); 741 a->reallocs = 0; 742 A->info.nz_unneeded = (double)fshift; 743 a->rmax = rmax; 744 745 A->same_nonzero = PETSC_TRUE; 746 ierr = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr); 747 PetscFunctionReturn(0); 748 } 749 750 #undef __FUNCT__ 751 #define __FUNCT__ "MatSetOption_BlockMat" 752 PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt,PetscTruth flg) 753 { 754 PetscFunctionBegin; 755 if (opt == MAT_SYMMETRIC && flg) { 756 A->ops->sor = MatSOR_BlockMat_Symmetric; 757 A->ops->mult = MatMult_BlockMat_Symmetric; 758 } else { 759 PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]); 760 } 761 PetscFunctionReturn(0); 762 } 763 764 765 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat, 766 0, 767 0, 768 MatMult_BlockMat, 769 /* 4*/ MatMultAdd_BlockMat, 770 MatMultTranspose_BlockMat, 771 MatMultTransposeAdd_BlockMat, 772 0, 773 0, 774 0, 775 /*10*/ 0, 776 0, 777 0, 778 MatSOR_BlockMat, 779 0, 780 /*15*/ 0, 781 0, 782 0, 783 0, 784 0, 785 /*20*/ 0, 786 MatAssemblyEnd_BlockMat, 787 MatSetOption_BlockMat, 788 0, 789 /*24*/ 0, 790 0, 791 0, 792 0, 793 0, 794 /*29*/ 0, 795 0, 796 0, 797 0, 798 0, 799 /*34*/ 0, 800 0, 801 0, 802 0, 803 0, 804 /*39*/ 0, 805 0, 806 0, 807 0, 808 0, 809 /*44*/ 0, 810 0, 811 0, 812 0, 813 0, 814 /*49*/ 0, 815 0, 816 0, 817 0, 818 0, 819 /*54*/ 0, 820 0, 821 0, 822 0, 823 0, 824 /*59*/ MatGetSubMatrix_BlockMat, 825 MatDestroy_BlockMat, 826 MatView_BlockMat, 827 0, 828 0, 829 /*64*/ 0, 830 0, 831 0, 832 0, 833 0, 834 /*69*/ 0, 835 0, 836 0, 837 0, 838 0, 839 /*74*/ 0, 840 0, 841 0, 842 0, 843 0, 844 /*79*/ 0, 845 0, 846 0, 847 0, 848 MatLoad_BlockMat, 849 /*84*/ 0, 850 0, 851 0, 852 0, 853 0, 854 /*89*/ 0, 855 0, 856 0, 857 0, 858 0, 859 /*94*/ 0, 860 0, 861 0, 862 0}; 863 864 #undef __FUNCT__ 865 #define __FUNCT__ "MatBlockMatSetPreallocation" 866 /*@C 867 MatBlockMatSetPreallocation - For good matrix assembly performance 868 the user should preallocate the matrix storage by setting the parameter nz 869 (or the array nnz). By setting these parameters accurately, performance 870 during matrix assembly can be increased by more than a factor of 50. 871 872 Collective on MPI_Comm 873 874 Input Parameters: 875 + B - The matrix 876 . bs - size of each block in matrix 877 . nz - number of nonzeros per block row (same for all rows) 878 - nnz - array containing the number of nonzeros in the various block rows 879 (possibly different for each row) or PETSC_NULL 880 881 Notes: 882 If nnz is given then nz is ignored 883 884 Specify the preallocated storage with either nz or nnz (not both). 885 Set nz=PETSC_DEFAULT and nnz=PETSC_NULL for PETSc to control dynamic memory 886 allocation. For large problems you MUST preallocate memory or you 887 will get TERRIBLE performance, see the users' manual chapter on matrices. 888 889 Level: intermediate 890 891 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues() 892 893 @*/ 894 PetscErrorCode PETSCMAT_DLLEXPORT MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[]) 895 { 896 PetscErrorCode ierr,(*f)(Mat,PetscInt,PetscInt,const PetscInt[]); 897 898 PetscFunctionBegin; 899 ierr = PetscObjectQueryFunction((PetscObject)B,"MatBlockMatSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr); 900 if (f) { 901 ierr = (*f)(B,bs,nz,nnz);CHKERRQ(ierr); 902 } 903 PetscFunctionReturn(0); 904 } 905 906 EXTERN_C_BEGIN 907 #undef __FUNCT__ 908 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat" 909 PetscErrorCode PETSCMAT_DLLEXPORT MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz) 910 { 911 Mat_BlockMat *bmat = (Mat_BlockMat*)A->data; 912 PetscErrorCode ierr; 913 PetscInt i; 914 915 PetscFunctionBegin; 916 if (bs < 1) SETERRQ1(((PetscObject)A)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Block size given %D must be great than zero",bs); 917 if (A->rmap->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of rows %D",bs,A->rmap->n); 918 if (A->cmap->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of columns %D",bs,A->cmap->n); 919 if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5; 920 if (nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz); 921 if (nnz) { 922 for (i=0; i<A->rmap->n/bs; i++) { 923 if (nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]); 924 if (nnz[i] > A->cmap->n/bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap->n/bs); 925 } 926 } 927 A->rmap->bs = A->cmap->bs = bs; 928 bmat->mbs = A->rmap->n/bs; 929 930 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->right);CHKERRQ(ierr); 931 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->middle);CHKERRQ(ierr); 932 ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr); 933 934 if (!bmat->imax) { 935 ierr = PetscMalloc2(A->rmap->n,PetscInt,&bmat->imax,A->rmap->n,PetscInt,&bmat->ilen);CHKERRQ(ierr); 936 ierr = PetscLogObjectMemory(A,2*A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr); 937 } 938 if (nnz) { 939 nz = 0; 940 for (i=0; i<A->rmap->n/A->rmap->bs; i++) { 941 bmat->imax[i] = nnz[i]; 942 nz += nnz[i]; 943 } 944 } else { 945 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently requires block row by row preallocation"); 946 } 947 948 /* bmat->ilen will count nonzeros in each row so far. */ 949 for (i=0; i<bmat->mbs; i++) { bmat->ilen[i] = 0;} 950 951 /* allocate the matrix space */ 952 ierr = MatSeqXAIJFreeAIJ(A,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr); 953 ierr = PetscMalloc3(nz,Mat,&bmat->a,nz,PetscInt,&bmat->j,A->rmap->n+1,PetscInt,&bmat->i);CHKERRQ(ierr); 954 ierr = PetscLogObjectMemory(A,(A->rmap->n+1)*sizeof(PetscInt)+nz*(sizeof(PetscScalar)+sizeof(PetscInt)));CHKERRQ(ierr); 955 bmat->i[0] = 0; 956 for (i=1; i<bmat->mbs+1; i++) { 957 bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1]; 958 } 959 bmat->singlemalloc = PETSC_TRUE; 960 bmat->free_a = PETSC_TRUE; 961 bmat->free_ij = PETSC_TRUE; 962 963 bmat->nz = 0; 964 bmat->maxnz = nz; 965 A->info.nz_unneeded = (double)bmat->maxnz; 966 967 PetscFunctionReturn(0); 968 } 969 EXTERN_C_END 970 971 /*MC 972 MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix 973 consisting of (usually) sparse blocks. 974 975 Level: advanced 976 977 .seealso: MatCreateBlockMat() 978 979 M*/ 980 981 EXTERN_C_BEGIN 982 #undef __FUNCT__ 983 #define __FUNCT__ "MatCreate_BlockMat" 984 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_BlockMat(Mat A) 985 { 986 Mat_BlockMat *b; 987 PetscErrorCode ierr; 988 989 PetscFunctionBegin; 990 ierr = PetscNewLog(A,Mat_BlockMat,&b);CHKERRQ(ierr); 991 A->data = (void*)b; 992 ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 993 994 ierr = PetscLayoutSetBlockSize(A->rmap,1);CHKERRQ(ierr); 995 ierr = PetscLayoutSetBlockSize(A->cmap,1);CHKERRQ(ierr); 996 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 997 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 998 999 A->assembled = PETSC_TRUE; 1000 A->preallocated = PETSC_FALSE; 1001 ierr = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr); 1002 1003 ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatBlockMatSetPreallocation_C", 1004 "MatBlockMatSetPreallocation_BlockMat", 1005 MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr); 1006 1007 PetscFunctionReturn(0); 1008 } 1009 EXTERN_C_END 1010 1011 #undef __FUNCT__ 1012 #define __FUNCT__ "MatCreateBlockMat" 1013 /*@C 1014 MatCreateBlockMat - Creates a new matrix based sparse Mat storage 1015 1016 Collective on MPI_Comm 1017 1018 Input Parameters: 1019 + comm - MPI communicator 1020 . m - number of rows 1021 . n - number of columns 1022 . bs - size of each submatrix 1023 . nz - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known) 1024 - nnz - expected number of nonzers per block row if known (use PETSC_NULL otherwise) 1025 1026 1027 Output Parameter: 1028 . A - the matrix 1029 1030 Level: intermediate 1031 1032 PETSc requires that matrices and vectors being used for certain 1033 operations are partitioned accordingly. For example, when 1034 creating a bmat matrix, A, that supports parallel matrix-vector 1035 products using MatMult(A,x,y) the user should set the number 1036 of local matrix rows to be the number of local elements of the 1037 corresponding result vector, y. Note that this is information is 1038 required for use of the matrix interface routines, even though 1039 the bmat matrix may not actually be physically partitioned. 1040 For example, 1041 1042 .keywords: matrix, bmat, create 1043 1044 .seealso: MATBLOCKMAT 1045 @*/ 1046 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A) 1047 { 1048 PetscErrorCode ierr; 1049 1050 PetscFunctionBegin; 1051 ierr = MatCreate(comm,A);CHKERRQ(ierr); 1052 ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 1053 ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr); 1054 ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr); 1055 PetscFunctionReturn(0); 1056 } 1057 1058 1059 1060