xref: /petsc/src/mat/impls/blockmat/seq/blockmat.c (revision 2c9ec6dfe874b911fa49ef7e759d29a8430d6aff)
1 
2 /*
3    This provides a matrix that consists of Mats
4 */
5 
6 #include <petsc-private/matimpl.h>              /*I "petscmat.h" I*/
7 #include <../src/mat/impls/baij/seq/baij.h>    /* use the common AIJ data-structure */
8 
9 typedef struct {
10   SEQAIJHEADER(Mat);
11   SEQBAIJHEADER;
12   Mat *diags;
13 
14   Vec left,right,middle,workb;                 /* dummy vectors to perform local parts of product */
15 } Mat_BlockMat;
16 
17 extern PetscErrorCode  MatBlockMatSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt*);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatSOR_BlockMat_Symmetric"
21 PetscErrorCode MatSOR_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
22 {
23   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
24   PetscScalar       *x;
25   const Mat         *v;
26   const PetscScalar *b;
27   PetscErrorCode    ierr;
28   PetscInt          n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
29   const PetscInt    *idx;
30   IS                row,col;
31   MatFactorInfo     info;
32   Vec               left = a->left,right = a->right, middle = a->middle;
33   Mat               *diag;
34 
35   PetscFunctionBegin;
36   its = its*lits;
37   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
38   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
39   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
40   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
41   if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP)) {
42     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep");
43   }
44 
45   if (!a->diags) {
46     ierr = PetscMalloc1(mbs,&a->diags);CHKERRQ(ierr);
47     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
48     for (i=0; i<mbs; i++) {
49       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
50       ierr = MatCholeskyFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,&info);CHKERRQ(ierr);
51       ierr = MatCholeskyFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
52       ierr = ISDestroy(&row);CHKERRQ(ierr);
53       ierr = ISDestroy(&col);CHKERRQ(ierr);
54     }
55     ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr);
56   }
57   diag = a->diags;
58 
59   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
60   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
61   /* copy right hand side because it must be modified during iteration */
62   ierr = VecCopy(bb,a->workb);CHKERRQ(ierr);
63   ierr = VecGetArrayRead(a->workb,&b);CHKERRQ(ierr);
64 
65   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
66   while (its--) {
67     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) {
68 
69       for (i=0; i<mbs; i++) {
70         n   = a->i[i+1] - a->i[i] - 1;
71         idx = a->j + a->i[i] + 1;
72         v   = a->a + a->i[i] + 1;
73 
74         ierr = VecSet(left,0.0);CHKERRQ(ierr);
75         for (j=0; j<n; j++) {
76           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
77           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
78           ierr = VecResetArray(right);CHKERRQ(ierr);
79         }
80         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
81         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
82         ierr = VecResetArray(right);CHKERRQ(ierr);
83 
84         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
85         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
86 
87         /* now adjust right hand side, see MatSOR_SeqSBAIJ */
88         for (j=0; j<n; j++) {
89           ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr);
90           ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr);
91           ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr);
92           ierr = VecResetArray(middle);CHKERRQ(ierr);
93         }
94         ierr = VecResetArray(right);CHKERRQ(ierr);
95 
96       }
97     }
98     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) {
99 
100       for (i=mbs-1; i>=0; i--) {
101         n   = a->i[i+1] - a->i[i] - 1;
102         idx = a->j + a->i[i] + 1;
103         v   = a->a + a->i[i] + 1;
104 
105         ierr = VecSet(left,0.0);CHKERRQ(ierr);
106         for (j=0; j<n; j++) {
107           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
108           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
109           ierr = VecResetArray(right);CHKERRQ(ierr);
110         }
111         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
112         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
113         ierr = VecResetArray(right);CHKERRQ(ierr);
114 
115         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
116         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
117         ierr = VecResetArray(right);CHKERRQ(ierr);
118 
119       }
120     }
121   }
122   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
123   ierr = VecRestoreArrayRead(a->workb,&b);CHKERRQ(ierr);
124   PetscFunctionReturn(0);
125 }
126 
127 #undef __FUNCT__
128 #define __FUNCT__ "MatSOR_BlockMat"
129 PetscErrorCode MatSOR_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
130 {
131   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
132   PetscScalar       *x;
133   const Mat         *v;
134   const PetscScalar *b;
135   PetscErrorCode    ierr;
136   PetscInt          n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
137   const PetscInt    *idx;
138   IS                row,col;
139   MatFactorInfo     info;
140   Vec               left = a->left,right = a->right;
141   Mat               *diag;
142 
143   PetscFunctionBegin;
144   its = its*lits;
145   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
146   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
147   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
148   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
149 
150   if (!a->diags) {
151     ierr = PetscMalloc1(mbs,&a->diags);CHKERRQ(ierr);
152     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
153     for (i=0; i<mbs; i++) {
154       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
155       ierr = MatLUFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,col,&info);CHKERRQ(ierr);
156       ierr = MatLUFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
157       ierr = ISDestroy(&row);CHKERRQ(ierr);
158       ierr = ISDestroy(&col);CHKERRQ(ierr);
159     }
160   }
161   diag = a->diags;
162 
163   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
164   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
165   ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
166 
167   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
168   while (its--) {
169     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) {
170 
171       for (i=0; i<mbs; i++) {
172         n   = a->i[i+1] - a->i[i];
173         idx = a->j + a->i[i];
174         v   = a->a + a->i[i];
175 
176         ierr = VecSet(left,0.0);CHKERRQ(ierr);
177         for (j=0; j<n; j++) {
178           if (idx[j] != i) {
179             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
180             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
181             ierr = VecResetArray(right);CHKERRQ(ierr);
182           }
183         }
184         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
185         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
186         ierr = VecResetArray(right);CHKERRQ(ierr);
187 
188         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
189         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
190         ierr = VecResetArray(right);CHKERRQ(ierr);
191       }
192     }
193     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) {
194 
195       for (i=mbs-1; i>=0; i--) {
196         n   = a->i[i+1] - a->i[i];
197         idx = a->j + a->i[i];
198         v   = a->a + a->i[i];
199 
200         ierr = VecSet(left,0.0);CHKERRQ(ierr);
201         for (j=0; j<n; j++) {
202           if (idx[j] != i) {
203             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
204             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
205             ierr = VecResetArray(right);CHKERRQ(ierr);
206           }
207         }
208         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
209         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
210         ierr = VecResetArray(right);CHKERRQ(ierr);
211 
212         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
213         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
214         ierr = VecResetArray(right);CHKERRQ(ierr);
215 
216       }
217     }
218   }
219   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
220   ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
221   PetscFunctionReturn(0);
222 }
223 
224 #undef __FUNCT__
225 #define __FUNCT__ "MatSetValues_BlockMat"
226 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is)
227 {
228   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
229   PetscInt       *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1;
230   PetscInt       *imax=a->imax,*ai=a->i,*ailen=a->ilen;
231   PetscInt       *aj  =a->j,nonew=a->nonew,bs=A->rmap->bs,brow,bcol;
232   PetscErrorCode ierr;
233   PetscInt       ridx,cidx;
234   PetscBool      roworiented=a->roworiented;
235   MatScalar      value;
236   Mat            *ap,*aa = a->a;
237 
238   PetscFunctionBegin;
239   for (k=0; k<m; k++) { /* loop over added rows */
240     row  = im[k];
241     brow = row/bs;
242     if (row < 0) continue;
243 #if defined(PETSC_USE_DEBUG)
244     if (row >= A->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap->N-1);
245 #endif
246     rp   = aj + ai[brow];
247     ap   = aa + ai[brow];
248     rmax = imax[brow];
249     nrow = ailen[brow];
250     low  = 0;
251     high = nrow;
252     for (l=0; l<n; l++) { /* loop over added columns */
253       if (in[l] < 0) continue;
254 #if defined(PETSC_USE_DEBUG)
255       if (in[l] >= A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap->n-1);
256 #endif
257       col = in[l]; bcol = col/bs;
258       if (A->symmetric && brow > bcol) continue;
259       ridx = row % bs; cidx = col % bs;
260       if (roworiented) value = v[l + k*n];
261       else value = v[k + l*m];
262 
263       if (col <= lastcol) low = 0;
264       else high = nrow;
265       lastcol = col;
266       while (high-low > 7) {
267         t = (low+high)/2;
268         if (rp[t] > bcol) high = t;
269         else              low  = t;
270       }
271       for (i=low; i<high; i++) {
272         if (rp[i] > bcol) break;
273         if (rp[i] == bcol) goto noinsert1;
274       }
275       if (nonew == 1) goto noinsert1;
276       if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col);
277       MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat);
278       N = nrow++ - 1; high++;
279       /* shift up all the later entries in this row */
280       for (ii=N; ii>=i; ii--) {
281         rp[ii+1] = rp[ii];
282         ap[ii+1] = ap[ii];
283       }
284       if (N>=i) ap[i] = 0;
285       rp[i] = bcol;
286       a->nz++;
287       A->nonzerostate++;
288 noinsert1:;
289       if (!*(ap+i)) {
290         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr);
291       }
292       ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr);
293       low  = i;
294     }
295     ailen[brow] = nrow;
296   }
297   PetscFunctionReturn(0);
298 }
299 
300 #undef __FUNCT__
301 #define __FUNCT__ "MatLoad_BlockMat"
302 PetscErrorCode MatLoad_BlockMat(Mat newmat, PetscViewer viewer)
303 {
304   PetscErrorCode    ierr;
305   Mat               tmpA;
306   PetscInt          i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0;
307   const PetscInt    *cols;
308   const PetscScalar *values;
309   PetscBool         flg = PETSC_FALSE,notdone;
310   Mat_SeqAIJ        *a;
311   Mat_BlockMat      *amat;
312 
313   PetscFunctionBegin;
314   ierr = MatCreate(PETSC_COMM_SELF,&tmpA);CHKERRQ(ierr);
315   ierr = MatSetType(tmpA,MATSEQAIJ);CHKERRQ(ierr);
316   ierr = MatLoad_SeqAIJ(tmpA,viewer);CHKERRQ(ierr);
317 
318   ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr);
319   ierr = PetscOptionsBegin(PETSC_COMM_SELF,NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr);
320   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
321   ierr = PetscOptionsBool("-matload_symmetric","Store the matrix as symmetric","MatLoad",flg,&flg,NULL);CHKERRQ(ierr);
322   ierr = PetscOptionsEnd();CHKERRQ(ierr);
323 
324   /* Determine number of nonzero blocks for each block row */
325   a    = (Mat_SeqAIJ*) tmpA->data;
326   mbs  = m/bs;
327   ierr = PetscMalloc3(mbs,&lens,bs,&ii,bs,&ilens);CHKERRQ(ierr);
328   ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr);
329 
330   for (i=0; i<mbs; i++) {
331     for (j=0; j<bs; j++) {
332       ii[j]    = a->j + a->i[i*bs + j];
333       ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j];
334     }
335 
336     currentcol = -1;
337     notdone    = PETSC_TRUE;
338     while (PETSC_TRUE) {
339       notdone = PETSC_FALSE;
340       nextcol = 1000000000;
341       for (j=0; j<bs; j++) {
342         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) {
343           ii[j]++;
344           ilens[j]--;
345         }
346         if (ilens[j] > 0) {
347           notdone = PETSC_TRUE;
348           nextcol = PetscMin(nextcol,ii[j][0]/bs);
349         }
350       }
351       if (!notdone) break;
352       if (!flg || (nextcol >= i)) lens[i]++;
353       currentcol = nextcol;
354     }
355   }
356 
357   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) {
358     ierr = MatSetSizes(newmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
359   }
360   ierr = MatBlockMatSetPreallocation(newmat,bs,0,lens);CHKERRQ(ierr);
361   if (flg) {
362     ierr = MatSetOption(newmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
363   }
364   amat = (Mat_BlockMat*)(newmat)->data;
365 
366   /* preallocate the submatrices */
367   ierr = PetscMalloc1(bs,&llens);CHKERRQ(ierr);
368   for (i=0; i<mbs; i++) { /* loops for block rows */
369     for (j=0; j<bs; j++) {
370       ii[j]    = a->j + a->i[i*bs + j];
371       ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j];
372     }
373 
374     currentcol = 1000000000;
375     for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */
376       if (ilens[j] > 0) {
377         currentcol = PetscMin(currentcol,ii[j][0]/bs);
378       }
379     }
380 
381     notdone = PETSC_TRUE;
382     while (PETSC_TRUE) {  /* loops over blocks in block row */
383 
384       notdone = PETSC_FALSE;
385       nextcol = 1000000000;
386       ierr    = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr);
387       for (j=0; j<bs; j++) { /* loop over rows in block */
388         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */
389           ii[j]++;
390           ilens[j]--;
391           llens[j]++;
392         }
393         if (ilens[j] > 0) {
394           notdone = PETSC_TRUE;
395           nextcol = PetscMin(nextcol,ii[j][0]/bs);
396         }
397       }
398       if (cnt >= amat->maxnz) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt);
399       if (!flg || currentcol >= i) {
400         amat->j[cnt] = currentcol;
401         ierr         = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr);
402       }
403 
404       if (!notdone) break;
405       currentcol = nextcol;
406     }
407     amat->ilen[i] = lens[i];
408   }
409 
410   ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr);
411   ierr = PetscFree(llens);CHKERRQ(ierr);
412 
413   /* copy over the matrix, one row at a time */
414   for (i=0; i<m; i++) {
415     ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
416     ierr = MatSetValues(newmat,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr);
417     ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
418   }
419   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
420   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
421   PetscFunctionReturn(0);
422 }
423 
424 #undef __FUNCT__
425 #define __FUNCT__ "MatView_BlockMat"
426 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer)
427 {
428   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
429   PetscErrorCode    ierr;
430   const char        *name;
431   PetscViewerFormat format;
432 
433   PetscFunctionBegin;
434   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
435   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
436   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) {
437     ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr);
438     if (A->symmetric) {
439       ierr = PetscViewerASCIIPrintf(viewer,"Only upper triangular part of symmetric matrix is stored\n");CHKERRQ(ierr);
440     }
441   }
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "MatDestroy_BlockMat"
447 PetscErrorCode MatDestroy_BlockMat(Mat mat)
448 {
449   PetscErrorCode ierr;
450   Mat_BlockMat   *bmat = (Mat_BlockMat*)mat->data;
451   PetscInt       i;
452 
453   PetscFunctionBegin;
454   ierr = VecDestroy(&bmat->right);CHKERRQ(ierr);
455   ierr = VecDestroy(&bmat->left);CHKERRQ(ierr);
456   ierr = VecDestroy(&bmat->middle);CHKERRQ(ierr);
457   ierr = VecDestroy(&bmat->workb);CHKERRQ(ierr);
458   if (bmat->diags) {
459     for (i=0; i<mat->rmap->n/mat->rmap->bs; i++) {
460       ierr = MatDestroy(&bmat->diags[i]);CHKERRQ(ierr);
461     }
462   }
463   if (bmat->a) {
464     for (i=0; i<bmat->nz; i++) {
465       ierr = MatDestroy(&bmat->a[i]);CHKERRQ(ierr);
466     }
467   }
468   ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
469   ierr = PetscFree(mat->data);CHKERRQ(ierr);
470   PetscFunctionReturn(0);
471 }
472 
473 #undef __FUNCT__
474 #define __FUNCT__ "MatMult_BlockMat"
475 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y)
476 {
477   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
478   PetscErrorCode ierr;
479   PetscScalar    *xx,*yy;
480   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
481   Mat            *aa;
482 
483   PetscFunctionBegin;
484   /*
485      Standard CSR multiply except each entry is a Mat
486   */
487   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
488 
489   ierr = VecSet(y,0.0);CHKERRQ(ierr);
490   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
491   aj   = bmat->j;
492   aa   = bmat->a;
493   ii   = bmat->i;
494   for (i=0; i<m; i++) {
495     jrow = ii[i];
496     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
497     n    = ii[i+1] - jrow;
498     for (j=0; j<n; j++) {
499       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
500       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
501       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
502       jrow++;
503     }
504     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
505   }
506   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
507   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 
511 #undef __FUNCT__
512 #define __FUNCT__ "MatMult_BlockMat_Symmetric"
513 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y)
514 {
515   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
516   PetscErrorCode ierr;
517   PetscScalar    *xx,*yy;
518   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
519   Mat            *aa;
520 
521   PetscFunctionBegin;
522   /*
523      Standard CSR multiply except each entry is a Mat
524   */
525   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
526 
527   ierr = VecSet(y,0.0);CHKERRQ(ierr);
528   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
529   aj   = bmat->j;
530   aa   = bmat->a;
531   ii   = bmat->i;
532   for (i=0; i<m; i++) {
533     jrow = ii[i];
534     n    = ii[i+1] - jrow;
535     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
536     ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr);
537     /* if we ALWAYS required a diagonal entry then could remove this if test */
538     if (aj[jrow] == i) {
539       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
540       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
541       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
542       jrow++;
543       n--;
544     }
545     for (j=0; j<n; j++) {
546       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);            /* upper triangular part */
547       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
548       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
549 
550       ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr);            /* lower triangular part */
551       ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr);
552       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
553       jrow++;
554     }
555     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
556     ierr = VecResetArray(bmat->middle);CHKERRQ(ierr);
557   }
558   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
559   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
560   PetscFunctionReturn(0);
561 }
562 
563 #undef __FUNCT__
564 #define __FUNCT__ "MatMultAdd_BlockMat"
565 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
566 {
567   PetscFunctionBegin;
568   PetscFunctionReturn(0);
569 }
570 
571 #undef __FUNCT__
572 #define __FUNCT__ "MatMultTranspose_BlockMat"
573 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y)
574 {
575   PetscFunctionBegin;
576   PetscFunctionReturn(0);
577 }
578 
579 #undef __FUNCT__
580 #define __FUNCT__ "MatMultTransposeAdd_BlockMat"
581 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
582 {
583   PetscFunctionBegin;
584   PetscFunctionReturn(0);
585 }
586 
587 /*
588      Adds diagonal pointers to sparse matrix structure.
589 */
590 #undef __FUNCT__
591 #define __FUNCT__ "MatMarkDiagonal_BlockMat"
592 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A)
593 {
594   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
595   PetscErrorCode ierr;
596   PetscInt       i,j,mbs = A->rmap->n/A->rmap->bs;
597 
598   PetscFunctionBegin;
599   if (!a->diag) {
600     ierr = PetscMalloc1(mbs,&a->diag);CHKERRQ(ierr);
601   }
602   for (i=0; i<mbs; i++) {
603     a->diag[i] = a->i[i+1];
604     for (j=a->i[i]; j<a->i[i+1]; j++) {
605       if (a->j[j] == i) {
606         a->diag[i] = j;
607         break;
608       }
609     }
610   }
611   PetscFunctionReturn(0);
612 }
613 
614 #undef __FUNCT__
615 #define __FUNCT__ "MatGetSubMatrix_BlockMat"
616 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *B)
617 {
618   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
619   Mat_SeqAIJ     *c;
620   PetscErrorCode ierr;
621   PetscInt       i,k,first,step,lensi,nrows,ncols;
622   PetscInt       *j_new,*i_new,*aj = a->j,*ailen = a->ilen;
623   PetscScalar    *a_new;
624   Mat            C,*aa = a->a;
625   PetscBool      stride,equal;
626 
627   PetscFunctionBegin;
628   ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr);
629   if (!equal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for idential column and row indices");
630   ierr = PetscObjectTypeCompare((PetscObject)iscol,ISSTRIDE,&stride);CHKERRQ(ierr);
631   if (!stride) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for stride indices");
632   ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr);
633   if (step != A->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only select one entry from each block");
634 
635   ierr  = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr);
636   ncols = nrows;
637 
638   /* create submatrix */
639   if (scall == MAT_REUSE_MATRIX) {
640     PetscInt n_cols,n_rows;
641     C    = *B;
642     ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr);
643     if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size");
644     ierr = MatZeroEntries(C);CHKERRQ(ierr);
645   } else {
646     ierr = MatCreate(PetscObjectComm((PetscObject)A),&C);CHKERRQ(ierr);
647     ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
648     if (A->symmetric) {
649       ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr);
650     } else {
651       ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr);
652     }
653     ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr);
654     ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr);
655   }
656   c = (Mat_SeqAIJ*)C->data;
657 
658   /* loop over rows inserting into submatrix */
659   a_new = c->a;
660   j_new = c->j;
661   i_new = c->i;
662 
663   for (i=0; i<nrows; i++) {
664     lensi = ailen[i];
665     for (k=0; k<lensi; k++) {
666       *j_new++ = *aj++;
667       ierr     = MatGetValue(*aa++,first,first,a_new++);CHKERRQ(ierr);
668     }
669     i_new[i+1] = i_new[i] + lensi;
670     c->ilen[i] = lensi;
671   }
672 
673   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
674   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
675   *B   = C;
676   PetscFunctionReturn(0);
677 }
678 
679 #undef __FUNCT__
680 #define __FUNCT__ "MatAssemblyEnd_BlockMat"
681 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode)
682 {
683   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
684   PetscErrorCode ierr;
685   PetscInt       fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax;
686   PetscInt       m      = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0;
687   Mat            *aa    = a->a,*ap;
688 
689   PetscFunctionBegin;
690   if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0);
691 
692   if (m) rmax = ailen[0]; /* determine row with most nonzeros */
693   for (i=1; i<m; i++) {
694     /* move each row back by the amount of empty slots (fshift) before it*/
695     fshift += imax[i-1] - ailen[i-1];
696     rmax    = PetscMax(rmax,ailen[i]);
697     if (fshift) {
698       ip = aj + ai[i];
699       ap = aa + ai[i];
700       N  = ailen[i];
701       for (j=0; j<N; j++) {
702         ip[j-fshift] = ip[j];
703         ap[j-fshift] = ap[j];
704       }
705     }
706     ai[i] = ai[i-1] + ailen[i-1];
707   }
708   if (m) {
709     fshift += imax[m-1] - ailen[m-1];
710     ai[m]   = ai[m-1] + ailen[m-1];
711   }
712   /* reset ilen and imax for each row */
713   for (i=0; i<m; i++) {
714     ailen[i] = imax[i] = ai[i+1] - ai[i];
715   }
716   a->nz = ai[m];
717   for (i=0; i<a->nz; i++) {
718 #if defined(PETSC_USE_DEBUG)
719     if (!aa[i]) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz);
720 #endif
721     ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
722     ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
723   }
724   ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap->n/A->cmap->bs,fshift,a->nz);CHKERRQ(ierr);
725   ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr);
726   ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr);
727 
728   A->info.mallocs    += a->reallocs;
729   a->reallocs         = 0;
730   A->info.nz_unneeded = (double)fshift;
731   a->rmax             = rmax;
732   ierr                = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr);
733   PetscFunctionReturn(0);
734 }
735 
736 #undef __FUNCT__
737 #define __FUNCT__ "MatSetOption_BlockMat"
738 PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt,PetscBool flg)
739 {
740   PetscFunctionBegin;
741   if (opt == MAT_SYMMETRIC && flg) {
742     A->ops->sor  = MatSOR_BlockMat_Symmetric;
743     A->ops->mult = MatMult_BlockMat_Symmetric;
744   } else {
745     PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]);
746   }
747   PetscFunctionReturn(0);
748 }
749 
750 
751 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat,
752                                        0,
753                                        0,
754                                        MatMult_BlockMat,
755                                /*  4*/ MatMultAdd_BlockMat,
756                                        MatMultTranspose_BlockMat,
757                                        MatMultTransposeAdd_BlockMat,
758                                        0,
759                                        0,
760                                        0,
761                                /* 10*/ 0,
762                                        0,
763                                        0,
764                                        MatSOR_BlockMat,
765                                        0,
766                                /* 15*/ 0,
767                                        0,
768                                        0,
769                                        0,
770                                        0,
771                                /* 20*/ 0,
772                                        MatAssemblyEnd_BlockMat,
773                                        MatSetOption_BlockMat,
774                                        0,
775                                /* 24*/ 0,
776                                        0,
777                                        0,
778                                        0,
779                                        0,
780                                /* 29*/ 0,
781                                        0,
782                                        0,
783                                        0,
784                                        0,
785                                /* 34*/ 0,
786                                        0,
787                                        0,
788                                        0,
789                                        0,
790                                /* 39*/ 0,
791                                        0,
792                                        0,
793                                        0,
794                                        0,
795                                /* 44*/ 0,
796                                        0,
797                                        0,
798                                        0,
799                                        0,
800                                /* 49*/ 0,
801                                        0,
802                                        0,
803                                        0,
804                                        0,
805                                /* 54*/ 0,
806                                        0,
807                                        0,
808                                        0,
809                                        0,
810                                /* 59*/ MatGetSubMatrix_BlockMat,
811                                        MatDestroy_BlockMat,
812                                        MatView_BlockMat,
813                                        0,
814                                        0,
815                                /* 64*/ 0,
816                                        0,
817                                        0,
818                                        0,
819                                        0,
820                                /* 69*/ 0,
821                                        0,
822                                        0,
823                                        0,
824                                        0,
825                                /* 74*/ 0,
826                                        0,
827                                        0,
828                                        0,
829                                        0,
830                                /* 79*/ 0,
831                                        0,
832                                        0,
833                                        0,
834                                        MatLoad_BlockMat,
835                                /* 84*/ 0,
836                                        0,
837                                        0,
838                                        0,
839                                        0,
840                                /* 89*/ 0,
841                                        0,
842                                        0,
843                                        0,
844                                        0,
845                                /* 94*/ 0,
846                                        0,
847                                        0,
848                                        0,
849                                        0,
850                                /* 99*/ 0,
851                                        0,
852                                        0,
853                                        0,
854                                        0,
855                                /*104*/ 0,
856                                        0,
857                                        0,
858                                        0,
859                                        0,
860                                /*109*/ 0,
861                                        0,
862                                        0,
863                                        0,
864                                        0,
865                                /*114*/ 0,
866                                        0,
867                                        0,
868                                        0,
869                                        0,
870                                /*119*/ 0,
871                                        0,
872                                        0,
873                                        0,
874                                        0,
875                                /*124*/ 0,
876                                        0,
877                                        0,
878                                        0,
879                                        0,
880                                /*129*/ 0,
881                                        0,
882                                        0,
883                                        0,
884                                        0,
885                                /*134*/ 0,
886                                        0,
887                                        0,
888                                        0,
889                                        0,
890                                /*139*/ 0,
891                                        0,
892                                        0
893 };
894 
895 #undef __FUNCT__
896 #define __FUNCT__ "MatBlockMatSetPreallocation"
897 /*@C
898    MatBlockMatSetPreallocation - For good matrix assembly performance
899    the user should preallocate the matrix storage by setting the parameter nz
900    (or the array nnz).  By setting these parameters accurately, performance
901    during matrix assembly can be increased by more than a factor of 50.
902 
903    Collective on MPI_Comm
904 
905    Input Parameters:
906 +  B - The matrix
907 .  bs - size of each block in matrix
908 .  nz - number of nonzeros per block row (same for all rows)
909 -  nnz - array containing the number of nonzeros in the various block rows
910          (possibly different for each row) or NULL
911 
912    Notes:
913      If nnz is given then nz is ignored
914 
915    Specify the preallocated storage with either nz or nnz (not both).
916    Set nz=PETSC_DEFAULT and nnz=NULL for PETSc to control dynamic memory
917    allocation.  For large problems you MUST preallocate memory or you
918    will get TERRIBLE performance, see the users' manual chapter on matrices.
919 
920    Level: intermediate
921 
922 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues()
923 
924 @*/
925 PetscErrorCode  MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[])
926 {
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   ierr = PetscTryMethod(B,"MatBlockMatSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[]),(B,bs,nz,nnz));CHKERRQ(ierr);
931   PetscFunctionReturn(0);
932 }
933 
934 #undef __FUNCT__
935 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat"
936 PetscErrorCode  MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz)
937 {
938   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
939   PetscErrorCode ierr;
940   PetscInt       i;
941 
942   PetscFunctionBegin;
943   ierr = PetscLayoutSetBlockSize(A->rmap,bs);CHKERRQ(ierr);
944   ierr = PetscLayoutSetBlockSize(A->cmap,bs);CHKERRQ(ierr);
945   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
946   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
947   ierr = PetscLayoutGetBlockSize(A->rmap,&bs);CHKERRQ(ierr);
948 
949   if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5;
950   if (nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz);
951   if (nnz) {
952     for (i=0; i<A->rmap->n/bs; i++) {
953       if (nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]);
954       if (nnz[i] > A->cmap->n/bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap->n/bs);
955     }
956   }
957   bmat->mbs = A->rmap->n/bs;
958 
959   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,NULL,&bmat->right);CHKERRQ(ierr);
960   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,NULL,&bmat->middle);CHKERRQ(ierr);
961   ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr);
962 
963   if (!bmat->imax) {
964     ierr = PetscMalloc2(A->rmap->n,&bmat->imax,A->rmap->n,&bmat->ilen);CHKERRQ(ierr);
965     ierr = PetscLogObjectMemory((PetscObject)A,2*A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
966   }
967   if (nnz) {
968     nz = 0;
969     for (i=0; i<A->rmap->n/A->rmap->bs; i++) {
970       bmat->imax[i] = nnz[i];
971       nz           += nnz[i];
972     }
973   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently requires block row by row preallocation");
974 
975   /* bmat->ilen will count nonzeros in each row so far. */
976   for (i=0; i<bmat->mbs; i++) bmat->ilen[i] = 0;
977 
978   /* allocate the matrix space */
979   ierr       = MatSeqXAIJFreeAIJ(A,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
980   ierr       = PetscMalloc3(nz,&bmat->a,nz,&bmat->j,A->rmap->n+1,&bmat->i);CHKERRQ(ierr);
981   ierr       = PetscLogObjectMemory((PetscObject)A,(A->rmap->n+1)*sizeof(PetscInt)+nz*(sizeof(PetscScalar)+sizeof(PetscInt)));CHKERRQ(ierr);
982   bmat->i[0] = 0;
983   for (i=1; i<bmat->mbs+1; i++) {
984     bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1];
985   }
986   bmat->singlemalloc = PETSC_TRUE;
987   bmat->free_a       = PETSC_TRUE;
988   bmat->free_ij      = PETSC_TRUE;
989 
990   bmat->nz            = 0;
991   bmat->maxnz         = nz;
992   A->info.nz_unneeded = (double)bmat->maxnz;
993   ierr                = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
994   PetscFunctionReturn(0);
995 }
996 
997 /*MC
998    MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix
999                  consisting of (usually) sparse blocks.
1000 
1001   Level: advanced
1002 
1003 .seealso: MatCreateBlockMat()
1004 
1005 M*/
1006 
1007 #undef __FUNCT__
1008 #define __FUNCT__ "MatCreate_BlockMat"
1009 PETSC_EXTERN PetscErrorCode MatCreate_BlockMat(Mat A)
1010 {
1011   Mat_BlockMat   *b;
1012   PetscErrorCode ierr;
1013 
1014   PetscFunctionBegin;
1015   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1016   A->data = (void*)b;
1017   ierr    = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1018 
1019   A->assembled    = PETSC_TRUE;
1020   A->preallocated = PETSC_FALSE;
1021   ierr            = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr);
1022 
1023   ierr = PetscObjectComposeFunction((PetscObject)A,"MatBlockMatSetPreallocation_C",MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr);
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 #undef __FUNCT__
1028 #define __FUNCT__ "MatCreateBlockMat"
1029 /*@C
1030    MatCreateBlockMat - Creates a new matrix based sparse Mat storage
1031 
1032   Collective on MPI_Comm
1033 
1034    Input Parameters:
1035 +  comm - MPI communicator
1036 .  m - number of rows
1037 .  n  - number of columns
1038 .  bs - size of each submatrix
1039 .  nz  - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known)
1040 -  nnz - expected number of nonzers per block row if known (use NULL otherwise)
1041 
1042 
1043    Output Parameter:
1044 .  A - the matrix
1045 
1046    Level: intermediate
1047 
1048    PETSc requires that matrices and vectors being used for certain
1049    operations are partitioned accordingly.  For example, when
1050    creating a bmat matrix, A, that supports parallel matrix-vector
1051    products using MatMult(A,x,y) the user should set the number
1052    of local matrix rows to be the number of local elements of the
1053    corresponding result vector, y. Note that this is information is
1054    required for use of the matrix interface routines, even though
1055    the bmat matrix may not actually be physically partitioned.
1056    For example,
1057 
1058 .keywords: matrix, bmat, create
1059 
1060 .seealso: MATBLOCKMAT
1061 @*/
1062 PetscErrorCode  MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A)
1063 {
1064   PetscErrorCode ierr;
1065 
1066   PetscFunctionBegin;
1067   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1068   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1069   ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr);
1070   ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr);
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 
1075 
1076