xref: /petsc/src/mat/impls/blockmat/seq/blockmat.c (revision 316061d122dfc42e71d39f282d7e3fc82cd1e410)
1 #define PETSCMAT_DLL
2 
3 /*
4    This provides a matrix that consists of Mats
5 */
6 
7 #include "src/mat/matimpl.h"              /*I "petscmat.h" I*/
8 #include "src/mat/impls/baij/seq/baij.h"    /* use the common AIJ data-structure */
9 #include "petscksp.h"
10 
11 #define CHUNKSIZE   15
12 
13 typedef struct {
14   SEQAIJHEADER(Mat);
15   SEQBAIJHEADER;
16   Mat               *diags;
17 
18   Vec               left,right,middle,workb;   /* dummy vectors to perform local parts of product */
19 } Mat_BlockMat;
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "MatRelax_BlockMat_Symmetric"
23 PetscErrorCode MatRelax_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
24 {
25   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
26   PetscScalar        *x;
27   const Mat          *v = a->a;
28   const PetscScalar  *b;
29   PetscErrorCode     ierr;
30   PetscInt           n = A->cmap.n,i,mbs = n/A->rmap.bs,j,bs = A->rmap.bs;
31   const PetscInt     *idx;
32   IS                 row,col;
33   MatFactorInfo      info;
34   Vec                left = a->left,right = a->right, middle = a->middle;
35   Mat                *diag;
36 
37   PetscFunctionBegin;
38   its = its*lits;
39   if (its <= 0) SETERRQ2(PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
40   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_ERR_SUP,"No support yet for Eisenstat");
41   if (omega != 1.0) SETERRQ(PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
42   if (fshift) SETERRQ(PETSC_ERR_SUP,"No support yet for fshift");
43   if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP))
44     SETERRQ(PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep");
45 
46   if (!a->diags) {
47     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
48     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
49     for (i=0; i<mbs; i++) {
50       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERING_ND,&row,&col);CHKERRQ(ierr);
51       ierr = MatCholeskyFactorSymbolic(a->a[a->diag[i]],row,&info,a->diags+i);CHKERRQ(ierr);
52       ierr = MatCholeskyFactorNumeric(a->a[a->diag[i]],&info,a->diags+i);CHKERRQ(ierr);
53       ierr = ISDestroy(row);CHKERRQ(ierr);
54       ierr = ISDestroy(col);CHKERRQ(ierr);
55     }
56     ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr);
57   }
58   diag    = a->diags;
59 
60   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
61   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
62   /* copy right hand side because it must be modified during iteration */
63   ierr = VecCopy(bb,a->workb);CHKERRQ(ierr);
64   ierr = VecGetArray(a->workb,(PetscScalar**)&b);CHKERRQ(ierr);
65 
66   /* need to add code for when initial guess is zero, see MatRelax_SeqAIJ */
67   while (its--) {
68     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
69 
70       for (i=0; i<mbs; i++) {
71         n    = a->i[i+1] - a->i[i] - 1;
72         idx  = a->j + a->i[i] + 1;
73         v    = a->a + a->i[i] + 1;
74 
75         ierr = VecSet(left,0.0);CHKERRQ(ierr);
76         for (j=0; j<n; j++) {
77           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
78           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
79           ierr = VecResetArray(right);CHKERRQ(ierr);
80         }
81         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
82         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
83         ierr = VecResetArray(right);CHKERRQ(ierr);
84 
85         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
86         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
87 
88         /* now adjust right hand side, see MatRelax_SeqSBAIJ */
89         for (j=0; j<n; j++) {
90           ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr);
91           ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr);
92           ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr);
93           ierr = VecResetArray(middle);CHKERRQ(ierr);
94         }
95         ierr = VecResetArray(right);CHKERRQ(ierr);
96 
97       }
98     }
99     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
100 
101       for (i=mbs-1; i>=0; i--) {
102         n    = a->i[i+1] - a->i[i] - 1;
103         idx  = a->j + a->i[i] + 1;
104         v    = a->a + a->i[i] + 1;
105 
106         ierr = VecSet(left,0.0);CHKERRQ(ierr);
107         for (j=0; j<n; j++) {
108           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
109           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
110           ierr = VecResetArray(right);CHKERRQ(ierr);
111         }
112         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
113         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
114         ierr = VecResetArray(right);CHKERRQ(ierr);
115 
116         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
117         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
118         ierr = VecResetArray(right);CHKERRQ(ierr);
119 
120       }
121     }
122   }
123   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
124   ierr = VecRestoreArray(a->workb,(PetscScalar**)&b);CHKERRQ(ierr);
125   PetscFunctionReturn(0);
126 }
127 
128 #undef __FUNCT__
129 #define __FUNCT__ "MatRelax_BlockMat"
130 PetscErrorCode MatRelax_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
131 {
132   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
133   PetscScalar        *x;
134   const Mat          *v = a->a;
135   const PetscScalar  *b;
136   PetscErrorCode     ierr;
137   PetscInt           n = A->cmap.n,i,mbs = n/A->rmap.bs,j,bs = A->rmap.bs;
138   const PetscInt     *idx;
139   IS                 row,col;
140   MatFactorInfo      info;
141   Vec                left = a->left,right = a->right;
142   Mat                *diag;
143 
144   PetscFunctionBegin;
145   its = its*lits;
146   if (its <= 0) SETERRQ2(PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
147   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_ERR_SUP,"No support yet for Eisenstat");
148   if (omega != 1.0) SETERRQ(PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
149   if (fshift) SETERRQ(PETSC_ERR_SUP,"No support yet for fshift");
150 
151   if (!a->diags) {
152     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
153     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
154     for (i=0; i<mbs; i++) {
155       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERING_ND,&row,&col);CHKERRQ(ierr);
156       ierr = MatLUFactorSymbolic(a->a[a->diag[i]],row,col,&info,a->diags+i);CHKERRQ(ierr);
157       ierr = MatLUFactorNumeric(a->a[a->diag[i]],&info,a->diags+i);CHKERRQ(ierr);
158       ierr = ISDestroy(row);CHKERRQ(ierr);
159       ierr = ISDestroy(col);CHKERRQ(ierr);
160     }
161   }
162   diag = a->diags;
163 
164   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
165   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
166   ierr = VecGetArray(bb,(PetscScalar**)&b);CHKERRQ(ierr);
167 
168   /* need to add code for when initial guess is zero, see MatRelax_SeqAIJ */
169   while (its--) {
170     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
171 
172       for (i=0; i<mbs; i++) {
173         n    = a->i[i+1] - a->i[i];
174         idx  = a->j + a->i[i];
175         v    = a->a + a->i[i];
176 
177         ierr = VecSet(left,0.0);CHKERRQ(ierr);
178         for (j=0; j<n; j++) {
179           if (idx[j] != i) {
180             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
181             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
182             ierr = VecResetArray(right);CHKERRQ(ierr);
183           }
184         }
185         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
186         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
187         ierr = VecResetArray(right);CHKERRQ(ierr);
188 
189         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
190         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
191         ierr = VecResetArray(right);CHKERRQ(ierr);
192       }
193     }
194     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
195 
196       for (i=mbs-1; i>=0; i--) {
197         n    = a->i[i+1] - a->i[i];
198         idx  = a->j + a->i[i];
199         v    = a->a + a->i[i];
200 
201         ierr = VecSet(left,0.0);CHKERRQ(ierr);
202         for (j=0; j<n; j++) {
203           if (idx[j] != i) {
204             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
205             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
206             ierr = VecResetArray(right);CHKERRQ(ierr);
207           }
208         }
209         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
210         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
211         ierr = VecResetArray(right);CHKERRQ(ierr);
212 
213         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
214         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
215         ierr = VecResetArray(right);CHKERRQ(ierr);
216 
217       }
218     }
219   }
220   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
221   ierr = VecRestoreArray(bb,(PetscScalar**)&b);CHKERRQ(ierr);
222   PetscFunctionReturn(0);
223 }
224 
225 #undef __FUNCT__
226 #define __FUNCT__ "MatSetValues_BlockMat"
227 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is)
228 {
229   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
230   PetscInt       *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1;
231   PetscInt       *imax=a->imax,*ai=a->i,*ailen=a->ilen;
232   PetscInt       *aj=a->j,nonew=a->nonew,bs=A->rmap.bs,brow,bcol;
233   PetscErrorCode ierr;
234   PetscInt       ridx,cidx;
235   PetscTruth     roworiented=a->roworiented;
236   MatScalar      value;
237   Mat            *ap,*aa = a->a;
238 
239   PetscFunctionBegin;
240   for (k=0; k<m; k++) { /* loop over added rows */
241     row  = im[k];
242     brow = row/bs;
243     if (row < 0) continue;
244 #if defined(PETSC_USE_DEBUG)
245     if (row >= A->rmap.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap.N-1);
246 #endif
247     rp   = aj + ai[brow];
248     ap   = aa + ai[brow];
249     rmax = imax[brow];
250     nrow = ailen[brow];
251     low  = 0;
252     high = nrow;
253     for (l=0; l<n; l++) { /* loop over added columns */
254       if (in[l] < 0) continue;
255 #if defined(PETSC_USE_DEBUG)
256       if (in[l] >= A->cmap.n) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap.n-1);
257 #endif
258       col = in[l]; bcol = col/bs;
259       if (A->symmetric && brow > bcol) continue;
260       ridx = row % bs; cidx = col % bs;
261       if (roworiented) {
262         value = v[l + k*n];
263       } else {
264         value = v[k + l*m];
265       }
266       if (col <= lastcol) low = 0; else high = nrow;
267       lastcol = col;
268       while (high-low > 7) {
269         t = (low+high)/2;
270         if (rp[t] > bcol) high = t;
271         else              low  = t;
272       }
273       for (i=low; i<high; i++) {
274         if (rp[i] > bcol) break;
275         if (rp[i] == bcol) {
276           goto noinsert1;
277         }
278       }
279       if (nonew == 1) goto noinsert1;
280       if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col);
281       MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat);
282       N = nrow++ - 1; high++;
283       /* shift up all the later entries in this row */
284       for (ii=N; ii>=i; ii--) {
285         rp[ii+1] = rp[ii];
286         ap[ii+1] = ap[ii];
287       }
288       if (N>=i) ap[i] = 0;
289       rp[i]           = bcol;
290       a->nz++;
291       noinsert1:;
292       if (!*(ap+i)) {
293         if (A->symmetric && brow == bcol) {
294           /* don't use SBAIJ since want to reorder in sparse factorization */
295           ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr);
296         } else {
297           ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr);
298         }
299       }
300       ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr);
301       low = i;
302     }
303     ailen[brow] = nrow;
304   }
305   A->same_nonzero = PETSC_FALSE;
306   PetscFunctionReturn(0);
307 }
308 
309 #undef __FUNCT__
310 #define __FUNCT__ "MatLoad_BlockMat"
311 PetscErrorCode MatLoad_BlockMat(PetscViewer viewer, MatType type,Mat *A)
312 {
313   PetscErrorCode    ierr;
314   Mat               tmpA;
315   PetscInt          i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0;
316   const PetscInt    *cols;
317   const PetscScalar *values;
318   PetscTruth        flg,notdone;
319   Mat_SeqAIJ        *a;
320   Mat_BlockMat      *amat;
321 
322   PetscFunctionBegin;
323   ierr = MatLoad_SeqAIJ(viewer,MATSEQAIJ,&tmpA);CHKERRQ(ierr);
324 
325   ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr);
326   ierr = PetscOptionsBegin(PETSC_COMM_SELF,PETSC_NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr);
327     ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr);
328     ierr = PetscOptionsName("-matload_symmetric","Store the matrix as symmetric","MatLoad",&flg);CHKERRQ(ierr);
329   ierr = PetscOptionsEnd();CHKERRQ(ierr);
330 
331   /* Determine number of nonzero blocks for each block row */
332   a    = (Mat_SeqAIJ*) tmpA->data;
333   mbs  = m/bs;
334   ierr = PetscMalloc3(mbs,PetscInt,&lens,bs,PetscInt*,&ii,bs,PetscInt,&ilens);CHKERRQ(ierr);
335   ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr);
336 
337   for (i=0; i<mbs; i++) {
338     for (j=0; j<bs; j++) {
339       ii[j]         = a->j + a->i[i*bs + j];
340       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
341     }
342 
343     currentcol = -1;
344     notdone = PETSC_TRUE;
345     while (PETSC_TRUE) {
346       notdone = PETSC_FALSE;
347       nextcol = 1000000000;
348       for (j=0; j<bs; j++) {
349         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) {
350           ii[j]++;
351           ilens[j]--;
352         }
353         if (ilens[j] > 0) {
354           notdone = PETSC_TRUE;
355           nextcol = PetscMin(nextcol,ii[j][0]/bs);
356         }
357       }
358       if (!notdone) break;
359       if (!flg || (nextcol >= i)) lens[i]++;
360       currentcol = nextcol;
361     }
362   }
363 
364   ierr = MatCreateBlockMat(PETSC_COMM_SELF,m,n,bs,0,lens,A);CHKERRQ(ierr);
365   if (flg) {
366     ierr = MatSetOption(*A,MAT_SYMMETRIC);CHKERRQ(ierr);
367   }
368   amat = (Mat_BlockMat*)(*A)->data;
369 
370   /* preallocate the submatrices */
371   ierr = PetscMalloc(bs*sizeof(PetscInt),&llens);CHKERRQ(ierr);
372   for (i=0; i<mbs; i++) { /* loops for block rows */
373     for (j=0; j<bs; j++) {
374       ii[j]         = a->j + a->i[i*bs + j];
375       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
376     }
377 
378     currentcol = 1000000000;
379     for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */
380       if (ilens[j] > 0) {
381 	currentcol = PetscMin(currentcol,ii[j][0]/bs);
382       }
383     }
384 
385     notdone = PETSC_TRUE;
386     while (PETSC_TRUE) {  /* loops over blocks in block row */
387 
388       notdone = PETSC_FALSE;
389       nextcol = 1000000000;
390       ierr = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr);
391       for (j=0; j<bs; j++) { /* loop over rows in block */
392         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */
393           ii[j]++;
394           ilens[j]--;
395           llens[j]++;
396         }
397         if (ilens[j] > 0) {
398           notdone = PETSC_TRUE;
399           nextcol = PetscMin(nextcol,ii[j][0]/bs);
400         }
401       }
402       if (cnt >= amat->maxnz) SETERRQ1(PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt);
403       if (!flg || currentcol >= i) {
404         amat->j[cnt] = currentcol;
405         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr);
406       }
407 
408       if (!notdone) break;
409       currentcol = nextcol;
410     }
411     amat->ilen[i] = lens[i];
412   }
413   CHKMEMQ;
414 
415   ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr);
416   ierr = PetscFree(llens);CHKERRQ(ierr);
417 
418   /* copy over the matrix, one row at a time */
419   for (i=0; i<m; i++) {
420     ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
421     ierr = MatSetValues(*A,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr);
422     ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
423   }
424   ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
425   ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
426   PetscFunctionReturn(0);
427 }
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "MatView_BlockMat"
431 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer)
432 {
433   Mat_SeqAIJ        *a = (Mat_SeqAIJ*)A->data;
434   PetscErrorCode    ierr;
435   const char        *name;
436   PetscViewerFormat format;
437 
438   PetscFunctionBegin;
439   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
440   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
441   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) {
442     ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr);
443   }
444   PetscFunctionReturn(0);
445 }
446 
447 #undef __FUNCT__
448 #define __FUNCT__ "MatDestroy_BlockMat"
449 PetscErrorCode MatDestroy_BlockMat(Mat mat)
450 {
451   PetscErrorCode ierr;
452   Mat_BlockMat   *bmat = (Mat_BlockMat*)mat->data;
453   PetscInt       i;
454 
455   PetscFunctionBegin;
456   if (bmat->right) {
457     ierr = VecDestroy(bmat->right);CHKERRQ(ierr);
458   }
459   if (bmat->left) {
460     ierr = VecDestroy(bmat->left);CHKERRQ(ierr);
461   }
462   if (bmat->middle) {
463     ierr = VecDestroy(bmat->middle);CHKERRQ(ierr);
464   }
465   if (bmat->workb) {
466     ierr = VecDestroy(bmat->workb);CHKERRQ(ierr);
467   }
468   if (bmat->diags) {
469     for (i=0; i<mat->rmap.n/mat->rmap.bs; i++) {
470       if (bmat->diags[i]) {ierr = MatDestroy(bmat->diags[i]);CHKERRQ(ierr);}
471     }
472   }
473   if (bmat->a) {
474     for (i=0; i<bmat->nz; i++) {
475       if (bmat->a[i]) {ierr = MatDestroy(bmat->a[i]);CHKERRQ(ierr);}
476     }
477   }
478   ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
479   ierr = PetscFree(bmat);CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 #undef __FUNCT__
484 #define __FUNCT__ "MatMult_BlockMat"
485 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y)
486 {
487   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
488   PetscErrorCode ierr;
489   PetscScalar    *xx,*yy;
490   PetscInt       *aj,i,*ii,jrow,m = A->rmap.n/A->rmap.bs,bs = A->rmap.bs,n,j;
491   Mat            *aa;
492 
493   PetscFunctionBegin;
494   CHKMEMQ;
495   /*
496      Standard CSR multiply except each entry is a Mat
497   */
498   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
499 
500   ierr = VecSet(y,0.0);CHKERRQ(ierr);
501   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
502   aj  = bmat->j;
503   aa  = bmat->a;
504   ii  = bmat->i;
505   for (i=0; i<m; i++) {
506     jrow = ii[i];
507     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
508     n    = ii[i+1] - jrow;
509     for (j=0; j<n; j++) {
510       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
511       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
512       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
513       jrow++;
514     }
515     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
516   }
517   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
518   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
519   CHKMEMQ;
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "MatMult_BlockMat_Symmetric"
525 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y)
526 {
527   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
528   PetscErrorCode ierr;
529   PetscScalar    *xx,*yy;
530   PetscInt       *aj,i,*ii,jrow,m = A->rmap.n/A->rmap.bs,bs = A->rmap.bs,n,j;
531   Mat            *aa;
532 
533   PetscFunctionBegin;
534   CHKMEMQ;
535   /*
536      Standard CSR multiply except each entry is a Mat
537   */
538   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
539 
540   ierr = VecSet(y,0.0);CHKERRQ(ierr);
541   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
542   aj  = bmat->j;
543   aa  = bmat->a;
544   ii  = bmat->i;
545   for (i=0; i<m; i++) {
546     jrow = ii[i];
547     n    = ii[i+1] - jrow;
548     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
549     ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr);
550     /* if we ALWAYS required a diagonal entry then could remove this if test */
551     if (aj[jrow] == i) {
552       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
553       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
554       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
555       jrow++;
556       n--;
557     }
558     for (j=0; j<n; j++) {
559       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);            /* upper triangular part */
560       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
561       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
562 
563       ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr);            /* lower triangular part */
564       ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr);
565       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
566       jrow++;
567     }
568     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
569     ierr = VecResetArray(bmat->middle);CHKERRQ(ierr);
570   }
571   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
572   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
573   CHKMEMQ;
574   PetscFunctionReturn(0);
575 }
576 
577 #undef __FUNCT__
578 #define __FUNCT__ "MatMultAdd_BlockMat"
579 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
580 {
581   PetscFunctionBegin;
582   PetscFunctionReturn(0);
583 }
584 
585 #undef __FUNCT__
586 #define __FUNCT__ "MatMultTranspose_BlockMat"
587 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y)
588 {
589   PetscFunctionBegin;
590   PetscFunctionReturn(0);
591 }
592 
593 #undef __FUNCT__
594 #define __FUNCT__ "MatMultTransposeAdd_BlockMat"
595 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
596 {
597   PetscFunctionBegin;
598   PetscFunctionReturn(0);
599 }
600 
601 /*
602      Adds diagonal pointers to sparse matrix structure.
603 */
604 #undef __FUNCT__
605 #define __FUNCT__ "MatMarkDiagonal_BlockMat"
606 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A)
607 {
608   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
609   PetscErrorCode ierr;
610   PetscInt       i,j,mbs = A->rmap.n/A->rmap.bs;
611 
612   PetscFunctionBegin;
613   if (!a->diag) {
614     ierr = PetscMalloc(mbs*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
615   }
616   for (i=0; i<mbs; i++) {
617     a->diag[i] = a->i[i+1];
618     for (j=a->i[i]; j<a->i[i+1]; j++) {
619       if (a->j[j] == i) {
620         a->diag[i] = j;
621         break;
622       }
623     }
624   }
625   PetscFunctionReturn(0);
626 }
627 
628 #undef __FUNCT__
629 #define __FUNCT__ "MatGetSubMatrix_BlockMat"
630 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,PetscInt csize,MatReuse scall,Mat *B)
631 {
632   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
633   Mat_SeqAIJ     *c;
634   PetscErrorCode ierr;
635   PetscInt       i,k,first,step,lensi,nrows,ncols;
636   PetscInt       *j_new,*i_new,*aj = a->j,*ai = a->i,ii,*ailen = a->ilen;
637   PetscScalar    *a_new,value;
638   Mat            C,*aa = a->a;
639   PetscTruth     stride,equal;
640 
641   PetscFunctionBegin;
642   ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr);
643   if (!equal) SETERRQ(PETSC_ERR_SUP,"Only for idential column and row indices");
644   ierr = ISStride(iscol,&stride);CHKERRQ(ierr);
645   if (!stride) SETERRQ(PETSC_ERR_SUP,"Only for stride indices");
646   ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr);
647   if (step != A->rmap.bs) SETERRQ(PETSC_ERR_SUP,"Can only select one entry from each block");
648 
649   ierr = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr);
650   ncols = nrows;
651 
652   /* create submatrix */
653   if (scall == MAT_REUSE_MATRIX) {
654     PetscInt n_cols,n_rows;
655     C = *B;
656     ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr);
657     if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size");
658     ierr = MatZeroEntries(C);CHKERRQ(ierr);
659   } else {
660     ierr = MatCreate(A->comm,&C);CHKERRQ(ierr);
661     ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
662     if (A->symmetric) {
663       ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr);
664     } else {
665       ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr);
666     }
667     ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr);
668     ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr);
669   }
670   c = (Mat_SeqAIJ*)C->data;
671 
672   /* loop over rows inserting into submatrix */
673   a_new    = c->a;
674   j_new    = c->j;
675   i_new    = c->i;
676 
677   for (i=0; i<nrows; i++) {
678     ii    = ai[i];
679     lensi = ailen[i];
680     for (k=0; k<lensi; k++) {
681       *j_new++ = *aj++;
682       ierr     = MatGetValue(*aa++,first,first,value);CHKERRQ(ierr);
683       *a_new++ = value;
684     }
685     i_new[i+1]  = i_new[i] + lensi;
686     c->ilen[i]  = lensi;
687   }
688 
689   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
690   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
691   *B = C;
692   PetscFunctionReturn(0);
693 }
694 
695 #undef __FUNCT__
696 #define __FUNCT__ "MatAssemblyEnd_BlockMat"
697 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode)
698 {
699   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
700   PetscErrorCode ierr;
701   PetscInt       fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax;
702   PetscInt       m = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0;
703   Mat            *aa = a->a,*ap;
704 
705   PetscFunctionBegin;
706   if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0);
707 
708   if (m) rmax = ailen[0]; /* determine row with most nonzeros */
709   for (i=1; i<m; i++) {
710     /* move each row back by the amount of empty slots (fshift) before it*/
711     fshift += imax[i-1] - ailen[i-1];
712     rmax   = PetscMax(rmax,ailen[i]);
713     if (fshift) {
714       ip = aj + ai[i] ;
715       ap = aa + ai[i] ;
716       N  = ailen[i];
717       for (j=0; j<N; j++) {
718         ip[j-fshift] = ip[j];
719         ap[j-fshift] = ap[j];
720       }
721     }
722     ai[i] = ai[i-1] + ailen[i-1];
723   }
724   if (m) {
725     fshift += imax[m-1] - ailen[m-1];
726     ai[m]  = ai[m-1] + ailen[m-1];
727   }
728   /* reset ilen and imax for each row */
729   for (i=0; i<m; i++) {
730     ailen[i] = imax[i] = ai[i+1] - ai[i];
731   }
732   a->nz = ai[m];
733   for (i=0; i<a->nz; i++) {
734 #if defined(PETSC_USE_DEBUG)
735     if (!aa[i]) SETERRQ3(PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz);
736 #endif
737     ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
738     ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
739   }
740   CHKMEMQ;
741   ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap.n/A->cmap.bs,fshift,a->nz);CHKERRQ(ierr);
742   ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr);
743   ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr);
744   a->reallocs          = 0;
745   A->info.nz_unneeded  = (double)fshift;
746   a->rmax              = rmax;
747 
748   A->same_nonzero = PETSC_TRUE;
749   ierr = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr);
750   PetscFunctionReturn(0);
751 }
752 
753 #undef __FUNCT__
754 #define __FUNCT__ "MatSetOption_BlockMat"
755 PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt)
756 {
757   PetscFunctionBegin;
758   if (opt == MAT_SYMMETRIC) {
759     A->ops->relax = MatRelax_BlockMat_Symmetric;
760     A->ops->mult  = MatMult_BlockMat_Symmetric;
761   } else {
762     PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]);
763   }
764   PetscFunctionReturn(0);
765 }
766 
767 
768 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat,
769        0,
770        0,
771        MatMult_BlockMat,
772 /* 4*/ MatMultAdd_BlockMat,
773        MatMultTranspose_BlockMat,
774        MatMultTransposeAdd_BlockMat,
775        0,
776        0,
777        0,
778 /*10*/ 0,
779        0,
780        0,
781        MatRelax_BlockMat,
782        0,
783 /*15*/ 0,
784        0,
785        0,
786        0,
787        0,
788 /*20*/ 0,
789        MatAssemblyEnd_BlockMat,
790        0,
791        MatSetOption_BlockMat,
792        0,
793 /*25*/ 0,
794        0,
795        0,
796        0,
797        0,
798 /*30*/ 0,
799        0,
800        0,
801        0,
802        0,
803 /*35*/ 0,
804        0,
805        0,
806        0,
807        0,
808 /*40*/ 0,
809        0,
810        0,
811        0,
812        0,
813 /*45*/ 0,
814        0,
815        0,
816        0,
817        0,
818 /*50*/ 0,
819        0,
820        0,
821        0,
822        0,
823 /*55*/ 0,
824        0,
825        0,
826        0,
827        0,
828 /*60*/ MatGetSubMatrix_BlockMat,
829        MatDestroy_BlockMat,
830        MatView_BlockMat,
831        0,
832        0,
833 /*65*/ 0,
834        0,
835        0,
836        0,
837        0,
838 /*70*/ 0,
839        0,
840        0,
841        0,
842        0,
843 /*75*/ 0,
844        0,
845        0,
846        0,
847        0,
848 /*80*/ 0,
849        0,
850        0,
851        0,
852        MatLoad_BlockMat,
853 /*85*/ 0,
854        0,
855        0,
856        0,
857        0,
858 /*90*/ 0,
859        0,
860        0,
861        0,
862        0,
863 /*95*/ 0,
864        0,
865        0,
866        0};
867 
868 #undef __FUNCT__
869 #define __FUNCT__ "MatBlockMatSetPreallocation"
870 /*@C
871    MatBlockMatSetPreallocation - For good matrix assembly performance
872    the user should preallocate the matrix storage by setting the parameter nz
873    (or the array nnz).  By setting these parameters accurately, performance
874    during matrix assembly can be increased by more than a factor of 50.
875 
876    Collective on MPI_Comm
877 
878    Input Parameters:
879 +  B - The matrix
880 .  bs - size of each block in matrix
881 .  nz - number of nonzeros per block row (same for all rows)
882 -  nnz - array containing the number of nonzeros in the various block rows
883          (possibly different for each row) or PETSC_NULL
884 
885    Notes:
886      If nnz is given then nz is ignored
887 
888    Specify the preallocated storage with either nz or nnz (not both).
889    Set nz=PETSC_DEFAULT and nnz=PETSC_NULL for PETSc to control dynamic memory
890    allocation.  For large problems you MUST preallocate memory or you
891    will get TERRIBLE performance, see the users' manual chapter on matrices.
892 
893    Level: intermediate
894 
895 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues()
896 
897 @*/
898 PetscErrorCode PETSCMAT_DLLEXPORT MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[])
899 {
900   PetscErrorCode ierr,(*f)(Mat,PetscInt,PetscInt,const PetscInt[]);
901 
902   PetscFunctionBegin;
903   ierr = PetscObjectQueryFunction((PetscObject)B,"MatBlockMatSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
904   if (f) {
905     ierr = (*f)(B,bs,nz,nnz);CHKERRQ(ierr);
906   }
907   PetscFunctionReturn(0);
908 }
909 
910 EXTERN_C_BEGIN
911 #undef __FUNCT__
912 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat"
913 PetscErrorCode PETSCMAT_DLLEXPORT MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz)
914 {
915   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
916   PetscErrorCode ierr;
917   PetscInt       i;
918 
919   PetscFunctionBegin;
920   if (bs < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Block size given %D must be great than zero",bs);
921   if (A->rmap.n % bs) SETERRQ2(PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of rows %D",bs,A->rmap.n);
922   if (A->cmap.n % bs) SETERRQ2(PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of columns %D",bs,A->cmap.n);
923   if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5;
924   if (nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz);
925   if (nnz) {
926     for (i=0; i<A->rmap.n/bs; i++) {
927       if (nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]);
928       if (nnz[i] > A->cmap.n/bs) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap.n/bs);
929     }
930   }
931   A->rmap.bs = A->cmap.bs = bs;
932   bmat->mbs  = A->rmap.n/bs;
933 
934   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->right);CHKERRQ(ierr);
935   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->middle);CHKERRQ(ierr);
936   ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr);
937 
938   ierr = PetscMalloc2(A->rmap.n,PetscInt,&bmat->imax,A->rmap.n,PetscInt,&bmat->ilen);CHKERRQ(ierr);
939   if (nnz) {
940     nz = 0;
941     for (i=0; i<A->rmap.n/A->rmap.bs; i++) {
942       bmat->imax[i] = nnz[i];
943       nz           += nnz[i];
944     }
945   } else {
946     SETERRQ(PETSC_ERR_SUP,"Currently requires block row by row preallocation");
947   }
948 
949   /* bmat->ilen will count nonzeros in each row so far. */
950   for (i=0; i<bmat->mbs; i++) { bmat->ilen[i] = 0;}
951 
952   /* allocate the matrix space */
953   ierr = PetscMalloc3(nz,Mat,&bmat->a,nz,PetscInt,&bmat->j,A->rmap.n+1,PetscInt,&bmat->i);CHKERRQ(ierr);
954   bmat->i[0] = 0;
955   for (i=1; i<bmat->mbs+1; i++) {
956     bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1];
957   }
958   bmat->singlemalloc = PETSC_TRUE;
959   bmat->free_a       = PETSC_TRUE;
960   bmat->free_ij      = PETSC_TRUE;
961 
962   bmat->nz                = 0;
963   bmat->maxnz             = nz;
964   A->info.nz_unneeded  = (double)bmat->maxnz;
965 
966   PetscFunctionReturn(0);
967 }
968 EXTERN_C_END
969 
970 /*MC
971    MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix
972                  consisting of (usually) sparse blocks.
973 
974   Level: advanced
975 
976 .seealso: MatCreateBlockMat()
977 
978 M*/
979 
980 EXTERN_C_BEGIN
981 #undef __FUNCT__
982 #define __FUNCT__ "MatCreate_BlockMat"
983 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_BlockMat(Mat A)
984 {
985   Mat_BlockMat   *b;
986   PetscErrorCode ierr;
987 
988   PetscFunctionBegin;
989   ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
990   ierr = PetscNew(Mat_BlockMat,&b);CHKERRQ(ierr);
991 
992   A->data = (void*)b;
993 
994   ierr = PetscMapInitialize(A->comm,&A->rmap);CHKERRQ(ierr);
995   ierr = PetscMapInitialize(A->comm,&A->cmap);CHKERRQ(ierr);
996 
997   A->assembled     = PETSC_TRUE;
998   A->preallocated  = PETSC_FALSE;
999   ierr = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr);
1000 
1001   ierr = PetscOptionsBegin(A->comm,A->prefix,"Matrix Option","Mat");CHKERRQ(ierr);
1002   ierr = PetscOptionsEnd();
1003 
1004   ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatBlockMatSetPreallocation_C",
1005                                      "MatBlockMatSetPreallocation_BlockMat",
1006                                       MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr);
1007 
1008   PetscFunctionReturn(0);
1009 }
1010 EXTERN_C_END
1011 
1012 #undef __FUNCT__
1013 #define __FUNCT__ "MatCreateBlockMat"
1014 /*@C
1015    MatCreateBlockMat - Creates a new matrix based sparse Mat storage
1016 
1017   Collective on MPI_Comm
1018 
1019    Input Parameters:
1020 +  comm - MPI communicator
1021 .  m - number of rows
1022 .  n  - number of columns
1023 .  bs - size of each submatrix
1024 .  nz  - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known)
1025 -  nnz - expected number of nonzers per block row if known (use PETSC_NULL otherwise)
1026 
1027 
1028    Output Parameter:
1029 .  A - the matrix
1030 
1031    Level: intermediate
1032 
1033    PETSc requires that matrices and vectors being used for certain
1034    operations are partitioned accordingly.  For example, when
1035    creating a bmat matrix, A, that supports parallel matrix-vector
1036    products using MatMult(A,x,y) the user should set the number
1037    of local matrix rows to be the number of local elements of the
1038    corresponding result vector, y. Note that this is information is
1039    required for use of the matrix interface routines, even though
1040    the bmat matrix may not actually be physically partitioned.
1041    For example,
1042 
1043 .keywords: matrix, bmat, create
1044 
1045 .seealso: MATBLOCKMAT
1046 @*/
1047 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A)
1048 {
1049   PetscErrorCode ierr;
1050 
1051   PetscFunctionBegin;
1052   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1053   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1054   ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr);
1055   ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr);
1056   PetscFunctionReturn(0);
1057 }
1058 
1059 
1060 
1061