1 2 /* 3 This provides a matrix that consists of Mats 4 */ 5 6 #include <petsc-private/matimpl.h> /*I "petscmat.h" I*/ 7 #include <../src/mat/impls/baij/seq/baij.h> /* use the common AIJ data-structure */ 8 9 typedef struct { 10 SEQAIJHEADER(Mat); 11 SEQBAIJHEADER; 12 Mat *diags; 13 14 Vec left,right,middle,workb; /* dummy vectors to perform local parts of product */ 15 } Mat_BlockMat; 16 17 extern PetscErrorCode MatBlockMatSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt*); 18 19 #undef __FUNCT__ 20 #define __FUNCT__ "MatSOR_BlockMat_Symmetric" 21 PetscErrorCode MatSOR_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 22 { 23 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 24 PetscScalar *x; 25 const Mat *v = a->a; 26 const PetscScalar *b; 27 PetscErrorCode ierr; 28 PetscInt n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs; 29 const PetscInt *idx; 30 IS row,col; 31 MatFactorInfo info; 32 Vec left = a->left,right = a->right, middle = a->middle; 33 Mat *diag; 34 35 PetscFunctionBegin; 36 its = its*lits; 37 if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits); 38 if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat"); 39 if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0"); 40 if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift"); 41 if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP)) 42 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep"); 43 44 if (!a->diags) { 45 ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr); 46 ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr); 47 for (i=0; i<mbs; i++) { 48 ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr); 49 ierr = MatCholeskyFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,&info);CHKERRQ(ierr); 50 ierr = MatCholeskyFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr); 51 ierr = ISDestroy(&row);CHKERRQ(ierr); 52 ierr = ISDestroy(&col);CHKERRQ(ierr); 53 } 54 ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr); 55 } 56 diag = a->diags; 57 58 ierr = VecSet(xx,0.0);CHKERRQ(ierr); 59 ierr = VecGetArray(xx,&x);CHKERRQ(ierr); 60 /* copy right hand side because it must be modified during iteration */ 61 ierr = VecCopy(bb,a->workb);CHKERRQ(ierr); 62 ierr = VecGetArrayRead(a->workb,&b);CHKERRQ(ierr); 63 64 /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */ 65 while (its--) { 66 if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){ 67 68 for (i=0; i<mbs; i++) { 69 n = a->i[i+1] - a->i[i] - 1; 70 idx = a->j + a->i[i] + 1; 71 v = a->a + a->i[i] + 1; 72 73 ierr = VecSet(left,0.0);CHKERRQ(ierr); 74 for (j=0; j<n; j++) { 75 ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr); 76 ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr); 77 ierr = VecResetArray(right);CHKERRQ(ierr); 78 } 79 ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr); 80 ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr); 81 ierr = VecResetArray(right);CHKERRQ(ierr); 82 83 ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr); 84 ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr); 85 86 /* now adjust right hand side, see MatSOR_SeqSBAIJ */ 87 for (j=0; j<n; j++) { 88 ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr); 89 ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr); 90 ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr); 91 ierr = VecResetArray(middle);CHKERRQ(ierr); 92 } 93 ierr = VecResetArray(right);CHKERRQ(ierr); 94 95 } 96 } 97 if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){ 98 99 for (i=mbs-1; i>=0; i--) { 100 n = a->i[i+1] - a->i[i] - 1; 101 idx = a->j + a->i[i] + 1; 102 v = a->a + a->i[i] + 1; 103 104 ierr = VecSet(left,0.0);CHKERRQ(ierr); 105 for (j=0; j<n; j++) { 106 ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr); 107 ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr); 108 ierr = VecResetArray(right);CHKERRQ(ierr); 109 } 110 ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr); 111 ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr); 112 ierr = VecResetArray(right);CHKERRQ(ierr); 113 114 ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr); 115 ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr); 116 ierr = VecResetArray(right);CHKERRQ(ierr); 117 118 } 119 } 120 } 121 ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr); 122 ierr = VecRestoreArrayRead(a->workb,&b);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 126 #undef __FUNCT__ 127 #define __FUNCT__ "MatSOR_BlockMat" 128 PetscErrorCode MatSOR_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 129 { 130 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 131 PetscScalar *x; 132 const Mat *v = a->a; 133 const PetscScalar *b; 134 PetscErrorCode ierr; 135 PetscInt n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs; 136 const PetscInt *idx; 137 IS row,col; 138 MatFactorInfo info; 139 Vec left = a->left,right = a->right; 140 Mat *diag; 141 142 PetscFunctionBegin; 143 its = its*lits; 144 if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits); 145 if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat"); 146 if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0"); 147 if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift"); 148 149 if (!a->diags) { 150 ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr); 151 ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr); 152 for (i=0; i<mbs; i++) { 153 ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr); 154 ierr = MatLUFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,col,&info);CHKERRQ(ierr); 155 ierr = MatLUFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr); 156 ierr = ISDestroy(&row);CHKERRQ(ierr); 157 ierr = ISDestroy(&col);CHKERRQ(ierr); 158 } 159 } 160 diag = a->diags; 161 162 ierr = VecSet(xx,0.0);CHKERRQ(ierr); 163 ierr = VecGetArray(xx,&x);CHKERRQ(ierr); 164 ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr); 165 166 /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */ 167 while (its--) { 168 if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){ 169 170 for (i=0; i<mbs; i++) { 171 n = a->i[i+1] - a->i[i]; 172 idx = a->j + a->i[i]; 173 v = a->a + a->i[i]; 174 175 ierr = VecSet(left,0.0);CHKERRQ(ierr); 176 for (j=0; j<n; j++) { 177 if (idx[j] != i) { 178 ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr); 179 ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr); 180 ierr = VecResetArray(right);CHKERRQ(ierr); 181 } 182 } 183 ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr); 184 ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr); 185 ierr = VecResetArray(right);CHKERRQ(ierr); 186 187 ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr); 188 ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr); 189 ierr = VecResetArray(right);CHKERRQ(ierr); 190 } 191 } 192 if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){ 193 194 for (i=mbs-1; i>=0; i--) { 195 n = a->i[i+1] - a->i[i]; 196 idx = a->j + a->i[i]; 197 v = a->a + a->i[i]; 198 199 ierr = VecSet(left,0.0);CHKERRQ(ierr); 200 for (j=0; j<n; j++) { 201 if (idx[j] != i) { 202 ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr); 203 ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr); 204 ierr = VecResetArray(right);CHKERRQ(ierr); 205 } 206 } 207 ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr); 208 ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr); 209 ierr = VecResetArray(right);CHKERRQ(ierr); 210 211 ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr); 212 ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr); 213 ierr = VecResetArray(right);CHKERRQ(ierr); 214 215 } 216 } 217 } 218 ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr); 219 ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr); 220 PetscFunctionReturn(0); 221 } 222 223 #undef __FUNCT__ 224 #define __FUNCT__ "MatSetValues_BlockMat" 225 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is) 226 { 227 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 228 PetscInt *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1; 229 PetscInt *imax=a->imax,*ai=a->i,*ailen=a->ilen; 230 PetscInt *aj=a->j,nonew=a->nonew,bs=A->rmap->bs,brow,bcol; 231 PetscErrorCode ierr; 232 PetscInt ridx,cidx; 233 PetscBool roworiented=a->roworiented; 234 MatScalar value; 235 Mat *ap,*aa = a->a; 236 237 PetscFunctionBegin; 238 if (v) PetscValidScalarPointer(v,6); 239 for (k=0; k<m; k++) { /* loop over added rows */ 240 row = im[k]; 241 brow = row/bs; 242 if (row < 0) continue; 243 #if defined(PETSC_USE_DEBUG) 244 if (row >= A->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap->N-1); 245 #endif 246 rp = aj + ai[brow]; 247 ap = aa + ai[brow]; 248 rmax = imax[brow]; 249 nrow = ailen[brow]; 250 low = 0; 251 high = nrow; 252 for (l=0; l<n; l++) { /* loop over added columns */ 253 if (in[l] < 0) continue; 254 #if defined(PETSC_USE_DEBUG) 255 if (in[l] >= A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap->n-1); 256 #endif 257 col = in[l]; bcol = col/bs; 258 if (A->symmetric && brow > bcol) continue; 259 ridx = row % bs; cidx = col % bs; 260 if (roworiented) { 261 value = v[l + k*n]; 262 } else { 263 value = v[k + l*m]; 264 } 265 if (col <= lastcol) low = 0; else high = nrow; 266 lastcol = col; 267 while (high-low > 7) { 268 t = (low+high)/2; 269 if (rp[t] > bcol) high = t; 270 else low = t; 271 } 272 for (i=low; i<high; i++) { 273 if (rp[i] > bcol) break; 274 if (rp[i] == bcol) { 275 goto noinsert1; 276 } 277 } 278 if (nonew == 1) goto noinsert1; 279 if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col); 280 MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat); 281 N = nrow++ - 1; high++; 282 /* shift up all the later entries in this row */ 283 for (ii=N; ii>=i; ii--) { 284 rp[ii+1] = rp[ii]; 285 ap[ii+1] = ap[ii]; 286 } 287 if (N>=i) ap[i] = 0; 288 rp[i] = bcol; 289 a->nz++; 290 noinsert1:; 291 if (!*(ap+i)) { 292 ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr); 293 } 294 ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr); 295 low = i; 296 } 297 ailen[brow] = nrow; 298 } 299 A->same_nonzero = PETSC_FALSE; 300 PetscFunctionReturn(0); 301 } 302 303 #undef __FUNCT__ 304 #define __FUNCT__ "MatLoad_BlockMat" 305 PetscErrorCode MatLoad_BlockMat(Mat newmat, PetscViewer viewer) 306 { 307 PetscErrorCode ierr; 308 Mat tmpA; 309 PetscInt i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0; 310 const PetscInt *cols; 311 const PetscScalar *values; 312 PetscBool flg = PETSC_FALSE,notdone; 313 Mat_SeqAIJ *a; 314 Mat_BlockMat *amat; 315 316 PetscFunctionBegin; 317 ierr = MatCreate(PETSC_COMM_SELF,&tmpA);CHKERRQ(ierr); 318 ierr = MatSetType(tmpA,MATSEQAIJ);CHKERRQ(ierr); 319 ierr = MatLoad_SeqAIJ(tmpA,viewer);CHKERRQ(ierr); 320 321 ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr); 322 ierr = PetscOptionsBegin(PETSC_COMM_SELF,PETSC_NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr); 323 ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr); 324 ierr = PetscOptionsBool("-matload_symmetric","Store the matrix as symmetric","MatLoad",flg,&flg,PETSC_NULL);CHKERRQ(ierr); 325 ierr = PetscOptionsEnd();CHKERRQ(ierr); 326 327 /* Determine number of nonzero blocks for each block row */ 328 a = (Mat_SeqAIJ*) tmpA->data; 329 mbs = m/bs; 330 ierr = PetscMalloc3(mbs,PetscInt,&lens,bs,PetscInt*,&ii,bs,PetscInt,&ilens);CHKERRQ(ierr); 331 ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr); 332 333 for (i=0; i<mbs; i++) { 334 for (j=0; j<bs; j++) { 335 ii[j] = a->j + a->i[i*bs + j]; 336 ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j]; 337 } 338 339 currentcol = -1; 340 notdone = PETSC_TRUE; 341 while (PETSC_TRUE) { 342 notdone = PETSC_FALSE; 343 nextcol = 1000000000; 344 for (j=0; j<bs; j++) { 345 while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { 346 ii[j]++; 347 ilens[j]--; 348 } 349 if (ilens[j] > 0) { 350 notdone = PETSC_TRUE; 351 nextcol = PetscMin(nextcol,ii[j][0]/bs); 352 } 353 } 354 if (!notdone) break; 355 if (!flg || (nextcol >= i)) lens[i]++; 356 currentcol = nextcol; 357 } 358 } 359 360 if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) { 361 ierr = MatSetSizes(newmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 362 } 363 ierr = MatBlockMatSetPreallocation(newmat,bs,0,lens);CHKERRQ(ierr); 364 if (flg) { 365 ierr = MatSetOption(newmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr); 366 } 367 amat = (Mat_BlockMat*)(newmat)->data; 368 369 /* preallocate the submatrices */ 370 ierr = PetscMalloc(bs*sizeof(PetscInt),&llens);CHKERRQ(ierr); 371 for (i=0; i<mbs; i++) { /* loops for block rows */ 372 for (j=0; j<bs; j++) { 373 ii[j] = a->j + a->i[i*bs + j]; 374 ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j]; 375 } 376 377 currentcol = 1000000000; 378 for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */ 379 if (ilens[j] > 0) { 380 currentcol = PetscMin(currentcol,ii[j][0]/bs); 381 } 382 } 383 384 notdone = PETSC_TRUE; 385 while (PETSC_TRUE) { /* loops over blocks in block row */ 386 387 notdone = PETSC_FALSE; 388 nextcol = 1000000000; 389 ierr = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr); 390 for (j=0; j<bs; j++) { /* loop over rows in block */ 391 while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */ 392 ii[j]++; 393 ilens[j]--; 394 llens[j]++; 395 } 396 if (ilens[j] > 0) { 397 notdone = PETSC_TRUE; 398 nextcol = PetscMin(nextcol,ii[j][0]/bs); 399 } 400 } 401 if (cnt >= amat->maxnz) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt); 402 if (!flg || currentcol >= i) { 403 amat->j[cnt] = currentcol; 404 ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr); 405 } 406 407 if (!notdone) break; 408 currentcol = nextcol; 409 } 410 amat->ilen[i] = lens[i]; 411 } 412 CHKMEMQ; 413 414 ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr); 415 ierr = PetscFree(llens);CHKERRQ(ierr); 416 417 /* copy over the matrix, one row at a time */ 418 for (i=0; i<m; i++) { 419 ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr); 420 ierr = MatSetValues(newmat,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr); 421 ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr); 422 } 423 ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 424 ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 425 PetscFunctionReturn(0); 426 } 427 428 #undef __FUNCT__ 429 #define __FUNCT__ "MatView_BlockMat" 430 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer) 431 { 432 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 433 PetscErrorCode ierr; 434 const char *name; 435 PetscViewerFormat format; 436 437 PetscFunctionBegin; 438 ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr); 439 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 440 if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) { 441 ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr); 442 if (A->symmetric) { 443 ierr = PetscViewerASCIIPrintf(viewer,"Only upper triangular part of symmetric matrix is stored\n");CHKERRQ(ierr); 444 } 445 } 446 PetscFunctionReturn(0); 447 } 448 449 #undef __FUNCT__ 450 #define __FUNCT__ "MatDestroy_BlockMat" 451 PetscErrorCode MatDestroy_BlockMat(Mat mat) 452 { 453 PetscErrorCode ierr; 454 Mat_BlockMat *bmat = (Mat_BlockMat*)mat->data; 455 PetscInt i; 456 457 PetscFunctionBegin; 458 ierr = VecDestroy(&bmat->right);CHKERRQ(ierr); 459 ierr = VecDestroy(&bmat->left);CHKERRQ(ierr); 460 ierr = VecDestroy(&bmat->middle);CHKERRQ(ierr); 461 ierr = VecDestroy(&bmat->workb);CHKERRQ(ierr); 462 if (bmat->diags) { 463 for (i=0; i<mat->rmap->n/mat->rmap->bs; i++) { 464 ierr = MatDestroy(&bmat->diags[i]);CHKERRQ(ierr); 465 } 466 } 467 if (bmat->a) { 468 for (i=0; i<bmat->nz; i++) { 469 ierr = MatDestroy(&bmat->a[i]);CHKERRQ(ierr); 470 } 471 } 472 ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr); 473 ierr = PetscFree(mat->data);CHKERRQ(ierr); 474 PetscFunctionReturn(0); 475 } 476 477 #undef __FUNCT__ 478 #define __FUNCT__ "MatMult_BlockMat" 479 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y) 480 { 481 Mat_BlockMat *bmat = (Mat_BlockMat*)A->data; 482 PetscErrorCode ierr; 483 PetscScalar *xx,*yy; 484 PetscInt *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j; 485 Mat *aa; 486 487 PetscFunctionBegin; 488 CHKMEMQ; 489 /* 490 Standard CSR multiply except each entry is a Mat 491 */ 492 ierr = VecGetArray(x,&xx);CHKERRQ(ierr); 493 494 ierr = VecSet(y,0.0);CHKERRQ(ierr); 495 ierr = VecGetArray(y,&yy);CHKERRQ(ierr); 496 aj = bmat->j; 497 aa = bmat->a; 498 ii = bmat->i; 499 for (i=0; i<m; i++) { 500 jrow = ii[i]; 501 ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr); 502 n = ii[i+1] - jrow; 503 for (j=0; j<n; j++) { 504 ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr); 505 ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr); 506 ierr = VecResetArray(bmat->right);CHKERRQ(ierr); 507 jrow++; 508 } 509 ierr = VecResetArray(bmat->left);CHKERRQ(ierr); 510 } 511 ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr); 512 ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr); 513 CHKMEMQ; 514 PetscFunctionReturn(0); 515 } 516 517 #undef __FUNCT__ 518 #define __FUNCT__ "MatMult_BlockMat_Symmetric" 519 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y) 520 { 521 Mat_BlockMat *bmat = (Mat_BlockMat*)A->data; 522 PetscErrorCode ierr; 523 PetscScalar *xx,*yy; 524 PetscInt *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j; 525 Mat *aa; 526 527 PetscFunctionBegin; 528 CHKMEMQ; 529 /* 530 Standard CSR multiply except each entry is a Mat 531 */ 532 ierr = VecGetArray(x,&xx);CHKERRQ(ierr); 533 534 ierr = VecSet(y,0.0);CHKERRQ(ierr); 535 ierr = VecGetArray(y,&yy);CHKERRQ(ierr); 536 aj = bmat->j; 537 aa = bmat->a; 538 ii = bmat->i; 539 for (i=0; i<m; i++) { 540 jrow = ii[i]; 541 n = ii[i+1] - jrow; 542 ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr); 543 ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr); 544 /* if we ALWAYS required a diagonal entry then could remove this if test */ 545 if (aj[jrow] == i) { 546 ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr); 547 ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr); 548 ierr = VecResetArray(bmat->right);CHKERRQ(ierr); 549 jrow++; 550 n--; 551 } 552 for (j=0; j<n; j++) { 553 ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr); /* upper triangular part */ 554 ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr); 555 ierr = VecResetArray(bmat->right);CHKERRQ(ierr); 556 557 ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr); /* lower triangular part */ 558 ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr); 559 ierr = VecResetArray(bmat->right);CHKERRQ(ierr); 560 jrow++; 561 } 562 ierr = VecResetArray(bmat->left);CHKERRQ(ierr); 563 ierr = VecResetArray(bmat->middle);CHKERRQ(ierr); 564 } 565 ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr); 566 ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr); 567 CHKMEMQ; 568 PetscFunctionReturn(0); 569 } 570 571 #undef __FUNCT__ 572 #define __FUNCT__ "MatMultAdd_BlockMat" 573 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z) 574 { 575 PetscFunctionBegin; 576 PetscFunctionReturn(0); 577 } 578 579 #undef __FUNCT__ 580 #define __FUNCT__ "MatMultTranspose_BlockMat" 581 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y) 582 { 583 PetscFunctionBegin; 584 PetscFunctionReturn(0); 585 } 586 587 #undef __FUNCT__ 588 #define __FUNCT__ "MatMultTransposeAdd_BlockMat" 589 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z) 590 { 591 PetscFunctionBegin; 592 PetscFunctionReturn(0); 593 } 594 595 /* 596 Adds diagonal pointers to sparse matrix structure. 597 */ 598 #undef __FUNCT__ 599 #define __FUNCT__ "MatMarkDiagonal_BlockMat" 600 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A) 601 { 602 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 603 PetscErrorCode ierr; 604 PetscInt i,j,mbs = A->rmap->n/A->rmap->bs; 605 606 PetscFunctionBegin; 607 if (!a->diag) { 608 ierr = PetscMalloc(mbs*sizeof(PetscInt),&a->diag);CHKERRQ(ierr); 609 } 610 for (i=0; i<mbs; i++) { 611 a->diag[i] = a->i[i+1]; 612 for (j=a->i[i]; j<a->i[i+1]; j++) { 613 if (a->j[j] == i) { 614 a->diag[i] = j; 615 break; 616 } 617 } 618 } 619 PetscFunctionReturn(0); 620 } 621 622 #undef __FUNCT__ 623 #define __FUNCT__ "MatGetSubMatrix_BlockMat" 624 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *B) 625 { 626 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 627 Mat_SeqAIJ *c; 628 PetscErrorCode ierr; 629 PetscInt i,k,first,step,lensi,nrows,ncols; 630 PetscInt *j_new,*i_new,*aj = a->j,*ailen = a->ilen; 631 PetscScalar *a_new; 632 Mat C,*aa = a->a; 633 PetscBool stride,equal; 634 635 PetscFunctionBegin; 636 ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr); 637 if (!equal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for idential column and row indices"); 638 ierr = PetscObjectTypeCompare((PetscObject)iscol,ISSTRIDE,&stride);CHKERRQ(ierr); 639 if (!stride) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for stride indices"); 640 ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr); 641 if (step != A->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only select one entry from each block"); 642 643 ierr = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr); 644 ncols = nrows; 645 646 /* create submatrix */ 647 if (scall == MAT_REUSE_MATRIX) { 648 PetscInt n_cols,n_rows; 649 C = *B; 650 ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr); 651 if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size"); 652 ierr = MatZeroEntries(C);CHKERRQ(ierr); 653 } else { 654 ierr = MatCreate(((PetscObject)A)->comm,&C);CHKERRQ(ierr); 655 ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 656 if (A->symmetric) { 657 ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr); 658 } else { 659 ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr); 660 } 661 ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr); 662 ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr); 663 } 664 c = (Mat_SeqAIJ*)C->data; 665 666 /* loop over rows inserting into submatrix */ 667 a_new = c->a; 668 j_new = c->j; 669 i_new = c->i; 670 671 for (i=0; i<nrows; i++) { 672 lensi = ailen[i]; 673 for (k=0; k<lensi; k++) { 674 *j_new++ = *aj++; 675 ierr = MatGetValue(*aa++,first,first,a_new++);CHKERRQ(ierr); 676 } 677 i_new[i+1] = i_new[i] + lensi; 678 c->ilen[i] = lensi; 679 } 680 681 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 682 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 683 *B = C; 684 PetscFunctionReturn(0); 685 } 686 687 #undef __FUNCT__ 688 #define __FUNCT__ "MatAssemblyEnd_BlockMat" 689 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode) 690 { 691 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 692 PetscErrorCode ierr; 693 PetscInt fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax; 694 PetscInt m = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0; 695 Mat *aa = a->a,*ap; 696 697 PetscFunctionBegin; 698 if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0); 699 700 if (m) rmax = ailen[0]; /* determine row with most nonzeros */ 701 for (i=1; i<m; i++) { 702 /* move each row back by the amount of empty slots (fshift) before it*/ 703 fshift += imax[i-1] - ailen[i-1]; 704 rmax = PetscMax(rmax,ailen[i]); 705 if (fshift) { 706 ip = aj + ai[i] ; 707 ap = aa + ai[i] ; 708 N = ailen[i]; 709 for (j=0; j<N; j++) { 710 ip[j-fshift] = ip[j]; 711 ap[j-fshift] = ap[j]; 712 } 713 } 714 ai[i] = ai[i-1] + ailen[i-1]; 715 } 716 if (m) { 717 fshift += imax[m-1] - ailen[m-1]; 718 ai[m] = ai[m-1] + ailen[m-1]; 719 } 720 /* reset ilen and imax for each row */ 721 for (i=0; i<m; i++) { 722 ailen[i] = imax[i] = ai[i+1] - ai[i]; 723 } 724 a->nz = ai[m]; 725 for (i=0; i<a->nz; i++) { 726 #if defined(PETSC_USE_DEBUG) 727 if (!aa[i]) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz); 728 #endif 729 ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 730 ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 731 } 732 CHKMEMQ; 733 ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap->n/A->cmap->bs,fshift,a->nz);CHKERRQ(ierr); 734 ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr); 735 ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr); 736 A->info.mallocs += a->reallocs; 737 a->reallocs = 0; 738 A->info.nz_unneeded = (double)fshift; 739 a->rmax = rmax; 740 741 A->same_nonzero = PETSC_TRUE; 742 ierr = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr); 743 PetscFunctionReturn(0); 744 } 745 746 #undef __FUNCT__ 747 #define __FUNCT__ "MatSetOption_BlockMat" 748 PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt,PetscBool flg) 749 { 750 PetscFunctionBegin; 751 if (opt == MAT_SYMMETRIC && flg) { 752 A->ops->sor = MatSOR_BlockMat_Symmetric; 753 A->ops->mult = MatMult_BlockMat_Symmetric; 754 } else { 755 PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]); 756 } 757 PetscFunctionReturn(0); 758 } 759 760 761 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat, 762 0, 763 0, 764 MatMult_BlockMat, 765 /* 4*/ MatMultAdd_BlockMat, 766 MatMultTranspose_BlockMat, 767 MatMultTransposeAdd_BlockMat, 768 0, 769 0, 770 0, 771 /*10*/ 0, 772 0, 773 0, 774 MatSOR_BlockMat, 775 0, 776 /*15*/ 0, 777 0, 778 0, 779 0, 780 0, 781 /*20*/ 0, 782 MatAssemblyEnd_BlockMat, 783 MatSetOption_BlockMat, 784 0, 785 /*24*/ 0, 786 0, 787 0, 788 0, 789 0, 790 /*29*/ 0, 791 0, 792 0, 793 0, 794 0, 795 /*34*/ 0, 796 0, 797 0, 798 0, 799 0, 800 /*39*/ 0, 801 0, 802 0, 803 0, 804 0, 805 /*44*/ 0, 806 0, 807 0, 808 0, 809 0, 810 /*49*/ 0, 811 0, 812 0, 813 0, 814 0, 815 /*54*/ 0, 816 0, 817 0, 818 0, 819 0, 820 /*59*/ MatGetSubMatrix_BlockMat, 821 MatDestroy_BlockMat, 822 MatView_BlockMat, 823 0, 824 0, 825 /*64*/ 0, 826 0, 827 0, 828 0, 829 0, 830 /*69*/ 0, 831 0, 832 0, 833 0, 834 0, 835 /*74*/ 0, 836 0, 837 0, 838 0, 839 0, 840 /*79*/ 0, 841 0, 842 0, 843 0, 844 MatLoad_BlockMat, 845 /*84*/ 0, 846 0, 847 0, 848 0, 849 0, 850 /*89*/ 0, 851 0, 852 0, 853 0, 854 0, 855 /*94*/ 0, 856 0, 857 0, 858 0, 859 0, 860 /*99*/ 0, 861 0, 862 0, 863 0, 864 0, 865 /*104*/0, 866 0, 867 0, 868 0, 869 0, 870 /*109*/0, 871 0, 872 0, 873 0, 874 0, 875 /*114*/0, 876 0, 877 0, 878 0, 879 0, 880 /*119*/0, 881 0, 882 0, 883 0 884 }; 885 886 #undef __FUNCT__ 887 #define __FUNCT__ "MatBlockMatSetPreallocation" 888 /*@C 889 MatBlockMatSetPreallocation - For good matrix assembly performance 890 the user should preallocate the matrix storage by setting the parameter nz 891 (or the array nnz). By setting these parameters accurately, performance 892 during matrix assembly can be increased by more than a factor of 50. 893 894 Collective on MPI_Comm 895 896 Input Parameters: 897 + B - The matrix 898 . bs - size of each block in matrix 899 . nz - number of nonzeros per block row (same for all rows) 900 - nnz - array containing the number of nonzeros in the various block rows 901 (possibly different for each row) or PETSC_NULL 902 903 Notes: 904 If nnz is given then nz is ignored 905 906 Specify the preallocated storage with either nz or nnz (not both). 907 Set nz=PETSC_DEFAULT and nnz=PETSC_NULL for PETSc to control dynamic memory 908 allocation. For large problems you MUST preallocate memory or you 909 will get TERRIBLE performance, see the users' manual chapter on matrices. 910 911 Level: intermediate 912 913 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues() 914 915 @*/ 916 PetscErrorCode MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[]) 917 { 918 PetscErrorCode ierr; 919 920 PetscFunctionBegin; 921 ierr = PetscTryMethod(B,"MatBlockMatSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[]),(B,bs,nz,nnz));CHKERRQ(ierr); 922 PetscFunctionReturn(0); 923 } 924 925 EXTERN_C_BEGIN 926 #undef __FUNCT__ 927 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat" 928 PetscErrorCode MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz) 929 { 930 Mat_BlockMat *bmat = (Mat_BlockMat*)A->data; 931 PetscErrorCode ierr; 932 PetscInt i; 933 934 PetscFunctionBegin; 935 ierr = PetscLayoutSetBlockSize(A->rmap,bs);CHKERRQ(ierr); 936 ierr = PetscLayoutSetBlockSize(A->cmap,bs);CHKERRQ(ierr); 937 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 938 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 939 ierr = PetscLayoutGetBlockSize(A->rmap,&bs);CHKERRQ(ierr); 940 941 if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5; 942 if (nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz); 943 if (nnz) { 944 for (i=0; i<A->rmap->n/bs; i++) { 945 if (nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]); 946 if (nnz[i] > A->cmap->n/bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap->n/bs); 947 } 948 } 949 bmat->mbs = A->rmap->n/bs; 950 951 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,PETSC_NULL,&bmat->right);CHKERRQ(ierr); 952 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,PETSC_NULL,&bmat->middle);CHKERRQ(ierr); 953 ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr); 954 955 if (!bmat->imax) { 956 ierr = PetscMalloc2(A->rmap->n,PetscInt,&bmat->imax,A->rmap->n,PetscInt,&bmat->ilen);CHKERRQ(ierr); 957 ierr = PetscLogObjectMemory(A,2*A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr); 958 } 959 if (nnz) { 960 nz = 0; 961 for (i=0; i<A->rmap->n/A->rmap->bs; i++) { 962 bmat->imax[i] = nnz[i]; 963 nz += nnz[i]; 964 } 965 } else { 966 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently requires block row by row preallocation"); 967 } 968 969 /* bmat->ilen will count nonzeros in each row so far. */ 970 for (i=0; i<bmat->mbs; i++) { bmat->ilen[i] = 0;} 971 972 /* allocate the matrix space */ 973 ierr = MatSeqXAIJFreeAIJ(A,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr); 974 ierr = PetscMalloc3(nz,Mat,&bmat->a,nz,PetscInt,&bmat->j,A->rmap->n+1,PetscInt,&bmat->i);CHKERRQ(ierr); 975 ierr = PetscLogObjectMemory(A,(A->rmap->n+1)*sizeof(PetscInt)+nz*(sizeof(PetscScalar)+sizeof(PetscInt)));CHKERRQ(ierr); 976 bmat->i[0] = 0; 977 for (i=1; i<bmat->mbs+1; i++) { 978 bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1]; 979 } 980 bmat->singlemalloc = PETSC_TRUE; 981 bmat->free_a = PETSC_TRUE; 982 bmat->free_ij = PETSC_TRUE; 983 984 bmat->nz = 0; 985 bmat->maxnz = nz; 986 A->info.nz_unneeded = (double)bmat->maxnz; 987 ierr = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr); 988 PetscFunctionReturn(0); 989 } 990 EXTERN_C_END 991 992 /*MC 993 MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix 994 consisting of (usually) sparse blocks. 995 996 Level: advanced 997 998 .seealso: MatCreateBlockMat() 999 1000 M*/ 1001 1002 EXTERN_C_BEGIN 1003 #undef __FUNCT__ 1004 #define __FUNCT__ "MatCreate_BlockMat" 1005 PetscErrorCode MatCreate_BlockMat(Mat A) 1006 { 1007 Mat_BlockMat *b; 1008 PetscErrorCode ierr; 1009 1010 PetscFunctionBegin; 1011 ierr = PetscNewLog(A,Mat_BlockMat,&b);CHKERRQ(ierr); 1012 A->data = (void*)b; 1013 ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 1014 1015 A->assembled = PETSC_TRUE; 1016 A->preallocated = PETSC_FALSE; 1017 ierr = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr); 1018 1019 ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatBlockMatSetPreallocation_C", 1020 "MatBlockMatSetPreallocation_BlockMat", 1021 MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr); 1022 1023 PetscFunctionReturn(0); 1024 } 1025 EXTERN_C_END 1026 1027 #undef __FUNCT__ 1028 #define __FUNCT__ "MatCreateBlockMat" 1029 /*@C 1030 MatCreateBlockMat - Creates a new matrix based sparse Mat storage 1031 1032 Collective on MPI_Comm 1033 1034 Input Parameters: 1035 + comm - MPI communicator 1036 . m - number of rows 1037 . n - number of columns 1038 . bs - size of each submatrix 1039 . nz - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known) 1040 - nnz - expected number of nonzers per block row if known (use PETSC_NULL otherwise) 1041 1042 1043 Output Parameter: 1044 . A - the matrix 1045 1046 Level: intermediate 1047 1048 PETSc requires that matrices and vectors being used for certain 1049 operations are partitioned accordingly. For example, when 1050 creating a bmat matrix, A, that supports parallel matrix-vector 1051 products using MatMult(A,x,y) the user should set the number 1052 of local matrix rows to be the number of local elements of the 1053 corresponding result vector, y. Note that this is information is 1054 required for use of the matrix interface routines, even though 1055 the bmat matrix may not actually be physically partitioned. 1056 For example, 1057 1058 .keywords: matrix, bmat, create 1059 1060 .seealso: MATBLOCKMAT 1061 @*/ 1062 PetscErrorCode MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A) 1063 { 1064 PetscErrorCode ierr; 1065 1066 PetscFunctionBegin; 1067 ierr = MatCreate(comm,A);CHKERRQ(ierr); 1068 ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 1069 ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr); 1070 ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr); 1071 PetscFunctionReturn(0); 1072 } 1073 1074 1075 1076