xref: /petsc/src/mat/impls/blockmat/seq/blockmat.c (revision 4fc747eaadbeca11629f314a99edccbc2ed7b3d3) !
1 
2 /*
3    This provides a matrix that consists of Mats
4 */
5 
6 #include <petsc/private/matimpl.h>              /*I "petscmat.h" I*/
7 #include <../src/mat/impls/baij/seq/baij.h>    /* use the common AIJ data-structure */
8 
9 typedef struct {
10   SEQAIJHEADER(Mat);
11   SEQBAIJHEADER;
12   Mat *diags;
13 
14   Vec left,right,middle,workb;                 /* dummy vectors to perform local parts of product */
15 } Mat_BlockMat;
16 
17 extern PetscErrorCode  MatBlockMatSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt*);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatSOR_BlockMat_Symmetric"
21 static PetscErrorCode MatSOR_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
22 {
23   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
24   PetscScalar       *x;
25   const Mat         *v;
26   const PetscScalar *b;
27   PetscErrorCode    ierr;
28   PetscInt          n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
29   const PetscInt    *idx;
30   IS                row,col;
31   MatFactorInfo     info;
32   Vec               left = a->left,right = a->right, middle = a->middle;
33   Mat               *diag;
34 
35   PetscFunctionBegin;
36   its = its*lits;
37   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
38   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
39   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
40   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
41   if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP)) {
42     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep");
43   }
44 
45   if (!a->diags) {
46     ierr = PetscMalloc1(mbs,&a->diags);CHKERRQ(ierr);
47     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
48     for (i=0; i<mbs; i++) {
49       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
50       ierr = MatCholeskyFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,&info);CHKERRQ(ierr);
51       ierr = MatCholeskyFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
52       ierr = ISDestroy(&row);CHKERRQ(ierr);
53       ierr = ISDestroy(&col);CHKERRQ(ierr);
54     }
55     ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr);
56   }
57   diag = a->diags;
58 
59   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
60   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
61   /* copy right hand side because it must be modified during iteration */
62   ierr = VecCopy(bb,a->workb);CHKERRQ(ierr);
63   ierr = VecGetArrayRead(a->workb,&b);CHKERRQ(ierr);
64 
65   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
66   while (its--) {
67     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) {
68 
69       for (i=0; i<mbs; i++) {
70         n   = a->i[i+1] - a->i[i] - 1;
71         idx = a->j + a->i[i] + 1;
72         v   = a->a + a->i[i] + 1;
73 
74         ierr = VecSet(left,0.0);CHKERRQ(ierr);
75         for (j=0; j<n; j++) {
76           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
77           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
78           ierr = VecResetArray(right);CHKERRQ(ierr);
79         }
80         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
81         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
82         ierr = VecResetArray(right);CHKERRQ(ierr);
83 
84         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
85         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
86 
87         /* now adjust right hand side, see MatSOR_SeqSBAIJ */
88         for (j=0; j<n; j++) {
89           ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr);
90           ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr);
91           ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr);
92           ierr = VecResetArray(middle);CHKERRQ(ierr);
93         }
94         ierr = VecResetArray(right);CHKERRQ(ierr);
95 
96       }
97     }
98     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) {
99 
100       for (i=mbs-1; i>=0; i--) {
101         n   = a->i[i+1] - a->i[i] - 1;
102         idx = a->j + a->i[i] + 1;
103         v   = a->a + a->i[i] + 1;
104 
105         ierr = VecSet(left,0.0);CHKERRQ(ierr);
106         for (j=0; j<n; j++) {
107           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
108           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
109           ierr = VecResetArray(right);CHKERRQ(ierr);
110         }
111         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
112         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
113         ierr = VecResetArray(right);CHKERRQ(ierr);
114 
115         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
116         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
117         ierr = VecResetArray(right);CHKERRQ(ierr);
118 
119       }
120     }
121   }
122   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
123   ierr = VecRestoreArrayRead(a->workb,&b);CHKERRQ(ierr);
124   PetscFunctionReturn(0);
125 }
126 
127 #undef __FUNCT__
128 #define __FUNCT__ "MatSOR_BlockMat"
129 static PetscErrorCode MatSOR_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
130 {
131   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
132   PetscScalar       *x;
133   const Mat         *v;
134   const PetscScalar *b;
135   PetscErrorCode    ierr;
136   PetscInt          n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
137   const PetscInt    *idx;
138   IS                row,col;
139   MatFactorInfo     info;
140   Vec               left = a->left,right = a->right;
141   Mat               *diag;
142 
143   PetscFunctionBegin;
144   its = its*lits;
145   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
146   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
147   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
148   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
149 
150   if (!a->diags) {
151     ierr = PetscMalloc1(mbs,&a->diags);CHKERRQ(ierr);
152     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
153     for (i=0; i<mbs; i++) {
154       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
155       ierr = MatLUFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,col,&info);CHKERRQ(ierr);
156       ierr = MatLUFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
157       ierr = ISDestroy(&row);CHKERRQ(ierr);
158       ierr = ISDestroy(&col);CHKERRQ(ierr);
159     }
160   }
161   diag = a->diags;
162 
163   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
164   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
165   ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
166 
167   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
168   while (its--) {
169     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) {
170 
171       for (i=0; i<mbs; i++) {
172         n   = a->i[i+1] - a->i[i];
173         idx = a->j + a->i[i];
174         v   = a->a + a->i[i];
175 
176         ierr = VecSet(left,0.0);CHKERRQ(ierr);
177         for (j=0; j<n; j++) {
178           if (idx[j] != i) {
179             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
180             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
181             ierr = VecResetArray(right);CHKERRQ(ierr);
182           }
183         }
184         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
185         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
186         ierr = VecResetArray(right);CHKERRQ(ierr);
187 
188         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
189         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
190         ierr = VecResetArray(right);CHKERRQ(ierr);
191       }
192     }
193     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) {
194 
195       for (i=mbs-1; i>=0; i--) {
196         n   = a->i[i+1] - a->i[i];
197         idx = a->j + a->i[i];
198         v   = a->a + a->i[i];
199 
200         ierr = VecSet(left,0.0);CHKERRQ(ierr);
201         for (j=0; j<n; j++) {
202           if (idx[j] != i) {
203             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
204             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
205             ierr = VecResetArray(right);CHKERRQ(ierr);
206           }
207         }
208         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
209         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
210         ierr = VecResetArray(right);CHKERRQ(ierr);
211 
212         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
213         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
214         ierr = VecResetArray(right);CHKERRQ(ierr);
215 
216       }
217     }
218   }
219   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
220   ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
221   PetscFunctionReturn(0);
222 }
223 
224 #undef __FUNCT__
225 #define __FUNCT__ "MatSetValues_BlockMat"
226 static PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is)
227 {
228   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
229   PetscInt       *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1;
230   PetscInt       *imax=a->imax,*ai=a->i,*ailen=a->ilen;
231   PetscInt       *aj  =a->j,nonew=a->nonew,bs=A->rmap->bs,brow,bcol;
232   PetscErrorCode ierr;
233   PetscInt       ridx,cidx;
234   PetscBool      roworiented=a->roworiented;
235   MatScalar      value;
236   Mat            *ap,*aa = a->a;
237 
238   PetscFunctionBegin;
239   for (k=0; k<m; k++) { /* loop over added rows */
240     row  = im[k];
241     brow = row/bs;
242     if (row < 0) continue;
243 #if defined(PETSC_USE_DEBUG)
244     if (row >= A->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap->N-1);
245 #endif
246     rp   = aj + ai[brow];
247     ap   = aa + ai[brow];
248     rmax = imax[brow];
249     nrow = ailen[brow];
250     low  = 0;
251     high = nrow;
252     for (l=0; l<n; l++) { /* loop over added columns */
253       if (in[l] < 0) continue;
254 #if defined(PETSC_USE_DEBUG)
255       if (in[l] >= A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap->n-1);
256 #endif
257       col = in[l]; bcol = col/bs;
258       if (A->symmetric && brow > bcol) continue;
259       ridx = row % bs; cidx = col % bs;
260       if (roworiented) value = v[l + k*n];
261       else value = v[k + l*m];
262 
263       if (col <= lastcol) low = 0;
264       else high = nrow;
265       lastcol = col;
266       while (high-low > 7) {
267         t = (low+high)/2;
268         if (rp[t] > bcol) high = t;
269         else              low  = t;
270       }
271       for (i=low; i<high; i++) {
272         if (rp[i] > bcol) break;
273         if (rp[i] == bcol) goto noinsert1;
274       }
275       if (nonew == 1) goto noinsert1;
276       if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col);
277       MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat);
278       N = nrow++ - 1; high++;
279       /* shift up all the later entries in this row */
280       for (ii=N; ii>=i; ii--) {
281         rp[ii+1] = rp[ii];
282         ap[ii+1] = ap[ii];
283       }
284       if (N>=i) ap[i] = 0;
285       rp[i] = bcol;
286       a->nz++;
287       A->nonzerostate++;
288 noinsert1:;
289       if (!*(ap+i)) {
290         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr);
291       }
292       ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr);
293       low  = i;
294     }
295     ailen[brow] = nrow;
296   }
297   PetscFunctionReturn(0);
298 }
299 
300 #undef __FUNCT__
301 #define __FUNCT__ "MatLoad_BlockMat"
302 static PetscErrorCode MatLoad_BlockMat(Mat newmat, PetscViewer viewer)
303 {
304   PetscErrorCode    ierr;
305   Mat               tmpA;
306   PetscInt          i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0;
307   const PetscInt    *cols;
308   const PetscScalar *values;
309   PetscBool         flg = PETSC_FALSE,notdone;
310   Mat_SeqAIJ        *a;
311   Mat_BlockMat      *amat;
312 
313   PetscFunctionBegin;
314   /* force binary viewer to load .info file if it has not yet done so */
315   ierr = PetscViewerSetUp(viewer);CHKERRQ(ierr);
316   ierr = MatCreate(PETSC_COMM_SELF,&tmpA);CHKERRQ(ierr);
317   ierr = MatSetType(tmpA,MATSEQAIJ);CHKERRQ(ierr);
318   ierr = MatLoad_SeqAIJ(tmpA,viewer);CHKERRQ(ierr);
319 
320   ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr);
321   ierr = PetscOptionsBegin(PETSC_COMM_SELF,NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr);
322   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
323   ierr = PetscOptionsBool("-matload_symmetric","Store the matrix as symmetric","MatLoad",flg,&flg,NULL);CHKERRQ(ierr);
324   ierr = PetscOptionsEnd();CHKERRQ(ierr);
325 
326   /* Determine number of nonzero blocks for each block row */
327   a    = (Mat_SeqAIJ*) tmpA->data;
328   mbs  = m/bs;
329   ierr = PetscMalloc3(mbs,&lens,bs,&ii,bs,&ilens);CHKERRQ(ierr);
330   ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr);
331 
332   for (i=0; i<mbs; i++) {
333     for (j=0; j<bs; j++) {
334       ii[j]    = a->j + a->i[i*bs + j];
335       ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j];
336     }
337 
338     currentcol = -1;
339     while (PETSC_TRUE) {
340       notdone = PETSC_FALSE;
341       nextcol = 1000000000;
342       for (j=0; j<bs; j++) {
343         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) {
344           ii[j]++;
345           ilens[j]--;
346         }
347         if (ilens[j] > 0) {
348           notdone = PETSC_TRUE;
349           nextcol = PetscMin(nextcol,ii[j][0]/bs);
350         }
351       }
352       if (!notdone) break;
353       if (!flg || (nextcol >= i)) lens[i]++;
354       currentcol = nextcol;
355     }
356   }
357 
358   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) {
359     ierr = MatSetSizes(newmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
360   }
361   ierr = MatBlockMatSetPreallocation(newmat,bs,0,lens);CHKERRQ(ierr);
362   if (flg) {
363     ierr = MatSetOption(newmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
364   }
365   amat = (Mat_BlockMat*)(newmat)->data;
366 
367   /* preallocate the submatrices */
368   ierr = PetscMalloc1(bs,&llens);CHKERRQ(ierr);
369   for (i=0; i<mbs; i++) { /* loops for block rows */
370     for (j=0; j<bs; j++) {
371       ii[j]    = a->j + a->i[i*bs + j];
372       ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j];
373     }
374 
375     currentcol = 1000000000;
376     for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */
377       if (ilens[j] > 0) {
378         currentcol = PetscMin(currentcol,ii[j][0]/bs);
379       }
380     }
381 
382     while (PETSC_TRUE) {  /* loops over blocks in block row */
383       notdone = PETSC_FALSE;
384       nextcol = 1000000000;
385       ierr    = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr);
386       for (j=0; j<bs; j++) { /* loop over rows in block */
387         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */
388           ii[j]++;
389           ilens[j]--;
390           llens[j]++;
391         }
392         if (ilens[j] > 0) {
393           notdone = PETSC_TRUE;
394           nextcol = PetscMin(nextcol,ii[j][0]/bs);
395         }
396       }
397       if (cnt >= amat->maxnz) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt);
398       if (!flg || currentcol >= i) {
399         amat->j[cnt] = currentcol;
400         ierr         = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr);
401       }
402 
403       if (!notdone) break;
404       currentcol = nextcol;
405     }
406     amat->ilen[i] = lens[i];
407   }
408 
409   ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr);
410   ierr = PetscFree(llens);CHKERRQ(ierr);
411 
412   /* copy over the matrix, one row at a time */
413   for (i=0; i<m; i++) {
414     ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
415     ierr = MatSetValues(newmat,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr);
416     ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
417   }
418   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
419   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
420   PetscFunctionReturn(0);
421 }
422 
423 #undef __FUNCT__
424 #define __FUNCT__ "MatView_BlockMat"
425 static PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer)
426 {
427   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
428   PetscErrorCode    ierr;
429   const char        *name;
430   PetscViewerFormat format;
431 
432   PetscFunctionBegin;
433   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
434   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
435   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) {
436     ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr);
437     if (A->symmetric) {
438       ierr = PetscViewerASCIIPrintf(viewer,"Only upper triangular part of symmetric matrix is stored\n");CHKERRQ(ierr);
439     }
440   }
441   PetscFunctionReturn(0);
442 }
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatDestroy_BlockMat"
446 static PetscErrorCode MatDestroy_BlockMat(Mat mat)
447 {
448   PetscErrorCode ierr;
449   Mat_BlockMat   *bmat = (Mat_BlockMat*)mat->data;
450   PetscInt       i;
451 
452   PetscFunctionBegin;
453   ierr = VecDestroy(&bmat->right);CHKERRQ(ierr);
454   ierr = VecDestroy(&bmat->left);CHKERRQ(ierr);
455   ierr = VecDestroy(&bmat->middle);CHKERRQ(ierr);
456   ierr = VecDestroy(&bmat->workb);CHKERRQ(ierr);
457   if (bmat->diags) {
458     for (i=0; i<mat->rmap->n/mat->rmap->bs; i++) {
459       ierr = MatDestroy(&bmat->diags[i]);CHKERRQ(ierr);
460     }
461   }
462   if (bmat->a) {
463     for (i=0; i<bmat->nz; i++) {
464       ierr = MatDestroy(&bmat->a[i]);CHKERRQ(ierr);
465     }
466   }
467   ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
468   ierr = PetscFree(mat->data);CHKERRQ(ierr);
469   PetscFunctionReturn(0);
470 }
471 
472 #undef __FUNCT__
473 #define __FUNCT__ "MatMult_BlockMat"
474 static PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y)
475 {
476   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
477   PetscErrorCode ierr;
478   PetscScalar    *xx,*yy;
479   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
480   Mat            *aa;
481 
482   PetscFunctionBegin;
483   /*
484      Standard CSR multiply except each entry is a Mat
485   */
486   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
487 
488   ierr = VecSet(y,0.0);CHKERRQ(ierr);
489   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
490   aj   = bmat->j;
491   aa   = bmat->a;
492   ii   = bmat->i;
493   for (i=0; i<m; i++) {
494     jrow = ii[i];
495     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
496     n    = ii[i+1] - jrow;
497     for (j=0; j<n; j++) {
498       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
499       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
500       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
501       jrow++;
502     }
503     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
504   }
505   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
506   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
507   PetscFunctionReturn(0);
508 }
509 
510 #undef __FUNCT__
511 #define __FUNCT__ "MatMult_BlockMat_Symmetric"
512 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y)
513 {
514   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
515   PetscErrorCode ierr;
516   PetscScalar    *xx,*yy;
517   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
518   Mat            *aa;
519 
520   PetscFunctionBegin;
521   /*
522      Standard CSR multiply except each entry is a Mat
523   */
524   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
525 
526   ierr = VecSet(y,0.0);CHKERRQ(ierr);
527   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
528   aj   = bmat->j;
529   aa   = bmat->a;
530   ii   = bmat->i;
531   for (i=0; i<m; i++) {
532     jrow = ii[i];
533     n    = ii[i+1] - jrow;
534     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
535     ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr);
536     /* if we ALWAYS required a diagonal entry then could remove this if test */
537     if (aj[jrow] == i) {
538       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
539       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
540       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
541       jrow++;
542       n--;
543     }
544     for (j=0; j<n; j++) {
545       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);            /* upper triangular part */
546       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
547       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
548 
549       ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr);            /* lower triangular part */
550       ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr);
551       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
552       jrow++;
553     }
554     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
555     ierr = VecResetArray(bmat->middle);CHKERRQ(ierr);
556   }
557   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
558   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
559   PetscFunctionReturn(0);
560 }
561 
562 #undef __FUNCT__
563 #define __FUNCT__ "MatMultAdd_BlockMat"
564 static PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
565 {
566   PetscFunctionBegin;
567   PetscFunctionReturn(0);
568 }
569 
570 #undef __FUNCT__
571 #define __FUNCT__ "MatMultTranspose_BlockMat"
572 static PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y)
573 {
574   PetscFunctionBegin;
575   PetscFunctionReturn(0);
576 }
577 
578 #undef __FUNCT__
579 #define __FUNCT__ "MatMultTransposeAdd_BlockMat"
580 static PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
581 {
582   PetscFunctionBegin;
583   PetscFunctionReturn(0);
584 }
585 
586 /*
587      Adds diagonal pointers to sparse matrix structure.
588 */
589 #undef __FUNCT__
590 #define __FUNCT__ "MatMarkDiagonal_BlockMat"
591 static PetscErrorCode MatMarkDiagonal_BlockMat(Mat A)
592 {
593   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
594   PetscErrorCode ierr;
595   PetscInt       i,j,mbs = A->rmap->n/A->rmap->bs;
596 
597   PetscFunctionBegin;
598   if (!a->diag) {
599     ierr = PetscMalloc1(mbs,&a->diag);CHKERRQ(ierr);
600   }
601   for (i=0; i<mbs; i++) {
602     a->diag[i] = a->i[i+1];
603     for (j=a->i[i]; j<a->i[i+1]; j++) {
604       if (a->j[j] == i) {
605         a->diag[i] = j;
606         break;
607       }
608     }
609   }
610   PetscFunctionReturn(0);
611 }
612 
613 #undef __FUNCT__
614 #define __FUNCT__ "MatGetSubMatrix_BlockMat"
615 static PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *B)
616 {
617   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
618   Mat_SeqAIJ     *c;
619   PetscErrorCode ierr;
620   PetscInt       i,k,first,step,lensi,nrows,ncols;
621   PetscInt       *j_new,*i_new,*aj = a->j,*ailen = a->ilen;
622   PetscScalar    *a_new;
623   Mat            C,*aa = a->a;
624   PetscBool      stride,equal;
625 
626   PetscFunctionBegin;
627   ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr);
628   if (!equal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for idential column and row indices");
629   ierr = PetscObjectTypeCompare((PetscObject)iscol,ISSTRIDE,&stride);CHKERRQ(ierr);
630   if (!stride) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for stride indices");
631   ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr);
632   if (step != A->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only select one entry from each block");
633 
634   ierr  = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr);
635   ncols = nrows;
636 
637   /* create submatrix */
638   if (scall == MAT_REUSE_MATRIX) {
639     PetscInt n_cols,n_rows;
640     C    = *B;
641     ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr);
642     if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size");
643     ierr = MatZeroEntries(C);CHKERRQ(ierr);
644   } else {
645     ierr = MatCreate(PetscObjectComm((PetscObject)A),&C);CHKERRQ(ierr);
646     ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
647     if (A->symmetric) {
648       ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr);
649     } else {
650       ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr);
651     }
652     ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr);
653     ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr);
654   }
655   c = (Mat_SeqAIJ*)C->data;
656 
657   /* loop over rows inserting into submatrix */
658   a_new = c->a;
659   j_new = c->j;
660   i_new = c->i;
661 
662   for (i=0; i<nrows; i++) {
663     lensi = ailen[i];
664     for (k=0; k<lensi; k++) {
665       *j_new++ = *aj++;
666       ierr     = MatGetValue(*aa++,first,first,a_new++);CHKERRQ(ierr);
667     }
668     i_new[i+1] = i_new[i] + lensi;
669     c->ilen[i] = lensi;
670   }
671 
672   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
673   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
674   *B   = C;
675   PetscFunctionReturn(0);
676 }
677 
678 #undef __FUNCT__
679 #define __FUNCT__ "MatAssemblyEnd_BlockMat"
680 static PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode)
681 {
682   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
683   PetscErrorCode ierr;
684   PetscInt       fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax;
685   PetscInt       m      = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0;
686   Mat            *aa    = a->a,*ap;
687 
688   PetscFunctionBegin;
689   if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0);
690 
691   if (m) rmax = ailen[0]; /* determine row with most nonzeros */
692   for (i=1; i<m; i++) {
693     /* move each row back by the amount of empty slots (fshift) before it*/
694     fshift += imax[i-1] - ailen[i-1];
695     rmax    = PetscMax(rmax,ailen[i]);
696     if (fshift) {
697       ip = aj + ai[i];
698       ap = aa + ai[i];
699       N  = ailen[i];
700       for (j=0; j<N; j++) {
701         ip[j-fshift] = ip[j];
702         ap[j-fshift] = ap[j];
703       }
704     }
705     ai[i] = ai[i-1] + ailen[i-1];
706   }
707   if (m) {
708     fshift += imax[m-1] - ailen[m-1];
709     ai[m]   = ai[m-1] + ailen[m-1];
710   }
711   /* reset ilen and imax for each row */
712   for (i=0; i<m; i++) {
713     ailen[i] = imax[i] = ai[i+1] - ai[i];
714   }
715   a->nz = ai[m];
716   for (i=0; i<a->nz; i++) {
717 #if defined(PETSC_USE_DEBUG)
718     if (!aa[i]) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz);
719 #endif
720     ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
721     ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
722   }
723   ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap->n/A->cmap->bs,fshift,a->nz);CHKERRQ(ierr);
724   ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr);
725   ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr);
726 
727   A->info.mallocs    += a->reallocs;
728   a->reallocs         = 0;
729   A->info.nz_unneeded = (double)fshift;
730   a->rmax             = rmax;
731   ierr                = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr);
732   PetscFunctionReturn(0);
733 }
734 
735 #undef __FUNCT__
736 #define __FUNCT__ "MatSetOption_BlockMat"
737 static PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt,PetscBool flg)
738 {
739   PetscFunctionBegin;
740   if (opt == MAT_SYMMETRIC && flg) {
741     A->ops->sor  = MatSOR_BlockMat_Symmetric;
742     A->ops->mult = MatMult_BlockMat_Symmetric;
743   } else {
744     PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]);
745   }
746   PetscFunctionReturn(0);
747 }
748 
749 
750 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat,
751                                        0,
752                                        0,
753                                        MatMult_BlockMat,
754                                /*  4*/ MatMultAdd_BlockMat,
755                                        MatMultTranspose_BlockMat,
756                                        MatMultTransposeAdd_BlockMat,
757                                        0,
758                                        0,
759                                        0,
760                                /* 10*/ 0,
761                                        0,
762                                        0,
763                                        MatSOR_BlockMat,
764                                        0,
765                                /* 15*/ 0,
766                                        0,
767                                        0,
768                                        0,
769                                        0,
770                                /* 20*/ 0,
771                                        MatAssemblyEnd_BlockMat,
772                                        MatSetOption_BlockMat,
773                                        0,
774                                /* 24*/ 0,
775                                        0,
776                                        0,
777                                        0,
778                                        0,
779                                /* 29*/ 0,
780                                        0,
781                                        0,
782                                        0,
783                                        0,
784                                /* 34*/ 0,
785                                        0,
786                                        0,
787                                        0,
788                                        0,
789                                /* 39*/ 0,
790                                        0,
791                                        0,
792                                        0,
793                                        0,
794                                /* 44*/ 0,
795                                        0,
796                                        MatShift_Basic,
797                                        0,
798                                        0,
799                                /* 49*/ 0,
800                                        0,
801                                        0,
802                                        0,
803                                        0,
804                                /* 54*/ 0,
805                                        0,
806                                        0,
807                                        0,
808                                        0,
809                                /* 59*/ MatGetSubMatrix_BlockMat,
810                                        MatDestroy_BlockMat,
811                                        MatView_BlockMat,
812                                        0,
813                                        0,
814                                /* 64*/ 0,
815                                        0,
816                                        0,
817                                        0,
818                                        0,
819                                /* 69*/ 0,
820                                        0,
821                                        0,
822                                        0,
823                                        0,
824                                /* 74*/ 0,
825                                        0,
826                                        0,
827                                        0,
828                                        0,
829                                /* 79*/ 0,
830                                        0,
831                                        0,
832                                        0,
833                                        MatLoad_BlockMat,
834                                /* 84*/ 0,
835                                        0,
836                                        0,
837                                        0,
838                                        0,
839                                /* 89*/ 0,
840                                        0,
841                                        0,
842                                        0,
843                                        0,
844                                /* 94*/ 0,
845                                        0,
846                                        0,
847                                        0,
848                                        0,
849                                /* 99*/ 0,
850                                        0,
851                                        0,
852                                        0,
853                                        0,
854                                /*104*/ 0,
855                                        0,
856                                        0,
857                                        0,
858                                        0,
859                                /*109*/ 0,
860                                        0,
861                                        0,
862                                        0,
863                                        0,
864                                /*114*/ 0,
865                                        0,
866                                        0,
867                                        0,
868                                        0,
869                                /*119*/ 0,
870                                        0,
871                                        0,
872                                        0,
873                                        0,
874                                /*124*/ 0,
875                                        0,
876                                        0,
877                                        0,
878                                        0,
879                                /*129*/ 0,
880                                        0,
881                                        0,
882                                        0,
883                                        0,
884                                /*134*/ 0,
885                                        0,
886                                        0,
887                                        0,
888                                        0,
889                                /*139*/ 0,
890                                        0,
891                                        0
892 };
893 
894 #undef __FUNCT__
895 #define __FUNCT__ "MatBlockMatSetPreallocation"
896 /*@C
897    MatBlockMatSetPreallocation - For good matrix assembly performance
898    the user should preallocate the matrix storage by setting the parameter nz
899    (or the array nnz).  By setting these parameters accurately, performance
900    during matrix assembly can be increased by more than a factor of 50.
901 
902    Collective on MPI_Comm
903 
904    Input Parameters:
905 +  B - The matrix
906 .  bs - size of each block in matrix
907 .  nz - number of nonzeros per block row (same for all rows)
908 -  nnz - array containing the number of nonzeros in the various block rows
909          (possibly different for each row) or NULL
910 
911    Notes:
912      If nnz is given then nz is ignored
913 
914    Specify the preallocated storage with either nz or nnz (not both).
915    Set nz=PETSC_DEFAULT and nnz=NULL for PETSc to control dynamic memory
916    allocation.  For large problems you MUST preallocate memory or you
917    will get TERRIBLE performance, see the users' manual chapter on matrices.
918 
919    Level: intermediate
920 
921 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues()
922 
923 @*/
924 PetscErrorCode  MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[])
925 {
926   PetscErrorCode ierr;
927 
928   PetscFunctionBegin;
929   ierr = PetscTryMethod(B,"MatBlockMatSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[]),(B,bs,nz,nnz));CHKERRQ(ierr);
930   PetscFunctionReturn(0);
931 }
932 
933 #undef __FUNCT__
934 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat"
935 static PetscErrorCode  MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz)
936 {
937   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
938   PetscErrorCode ierr;
939   PetscInt       i;
940 
941   PetscFunctionBegin;
942   ierr = PetscLayoutSetBlockSize(A->rmap,bs);CHKERRQ(ierr);
943   ierr = PetscLayoutSetBlockSize(A->cmap,bs);CHKERRQ(ierr);
944   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
945   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
946   ierr = PetscLayoutGetBlockSize(A->rmap,&bs);CHKERRQ(ierr);
947 
948   if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5;
949   if (nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz);
950   if (nnz) {
951     for (i=0; i<A->rmap->n/bs; i++) {
952       if (nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]);
953       if (nnz[i] > A->cmap->n/bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap->n/bs);
954     }
955   }
956   bmat->mbs = A->rmap->n/bs;
957 
958   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,NULL,&bmat->right);CHKERRQ(ierr);
959   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,NULL,&bmat->middle);CHKERRQ(ierr);
960   ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr);
961 
962   if (!bmat->imax) {
963     ierr = PetscMalloc2(A->rmap->n,&bmat->imax,A->rmap->n,&bmat->ilen);CHKERRQ(ierr);
964     ierr = PetscLogObjectMemory((PetscObject)A,2*A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
965   }
966   if (nnz) {
967     nz = 0;
968     for (i=0; i<A->rmap->n/A->rmap->bs; i++) {
969       bmat->imax[i] = nnz[i];
970       nz           += nnz[i];
971     }
972   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently requires block row by row preallocation");
973 
974   /* bmat->ilen will count nonzeros in each row so far. */
975   for (i=0; i<bmat->mbs; i++) bmat->ilen[i] = 0;
976 
977   /* allocate the matrix space */
978   ierr       = MatSeqXAIJFreeAIJ(A,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
979   ierr       = PetscMalloc3(nz,&bmat->a,nz,&bmat->j,A->rmap->n+1,&bmat->i);CHKERRQ(ierr);
980   ierr       = PetscLogObjectMemory((PetscObject)A,(A->rmap->n+1)*sizeof(PetscInt)+nz*(sizeof(PetscScalar)+sizeof(PetscInt)));CHKERRQ(ierr);
981   bmat->i[0] = 0;
982   for (i=1; i<bmat->mbs+1; i++) {
983     bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1];
984   }
985   bmat->singlemalloc = PETSC_TRUE;
986   bmat->free_a       = PETSC_TRUE;
987   bmat->free_ij      = PETSC_TRUE;
988 
989   bmat->nz            = 0;
990   bmat->maxnz         = nz;
991   A->info.nz_unneeded = (double)bmat->maxnz;
992   ierr                = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
993   PetscFunctionReturn(0);
994 }
995 
996 /*MC
997    MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix
998                  consisting of (usually) sparse blocks.
999 
1000   Level: advanced
1001 
1002 .seealso: MatCreateBlockMat()
1003 
1004 M*/
1005 
1006 #undef __FUNCT__
1007 #define __FUNCT__ "MatCreate_BlockMat"
1008 PETSC_EXTERN PetscErrorCode MatCreate_BlockMat(Mat A)
1009 {
1010   Mat_BlockMat   *b;
1011   PetscErrorCode ierr;
1012 
1013   PetscFunctionBegin;
1014   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1015   A->data = (void*)b;
1016   ierr    = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1017 
1018   A->assembled    = PETSC_TRUE;
1019   A->preallocated = PETSC_FALSE;
1020   ierr            = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr);
1021 
1022   ierr = PetscObjectComposeFunction((PetscObject)A,"MatBlockMatSetPreallocation_C",MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr);
1023   PetscFunctionReturn(0);
1024 }
1025 
1026 #undef __FUNCT__
1027 #define __FUNCT__ "MatCreateBlockMat"
1028 /*@C
1029    MatCreateBlockMat - Creates a new matrix in which each block contains a uniform-size sequential Mat object
1030 
1031   Collective on MPI_Comm
1032 
1033    Input Parameters:
1034 +  comm - MPI communicator
1035 .  m - number of rows
1036 .  n  - number of columns
1037 .  bs - size of each submatrix
1038 .  nz  - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known)
1039 -  nnz - expected number of nonzers per block row if known (use NULL otherwise)
1040 
1041 
1042    Output Parameter:
1043 .  A - the matrix
1044 
1045    Level: intermediate
1046 
1047    Notes: Matrices of this type are nominally-sparse matrices in which each "entry" is a Mat object.  Each Mat must
1048    have the same size and be sequential.  The local and global sizes must be compatible with this decomposition.
1049 
1050    For matrices containing parallel submatrices and variable block sizes, see MATNEST.
1051 
1052 .keywords: matrix, bmat, create
1053 
1054 .seealso: MATBLOCKMAT, MatCreateNest()
1055 @*/
1056 PetscErrorCode  MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A)
1057 {
1058   PetscErrorCode ierr;
1059 
1060   PetscFunctionBegin;
1061   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1062   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1063   ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr);
1064   ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr);
1065   PetscFunctionReturn(0);
1066 }
1067