xref: /petsc/src/mat/impls/blockmat/seq/blockmat.c (revision a58c3bc3391eee32bc3fd94ac7edeea38fe57aae)
1 #define PETSCMAT_DLL
2 
3 /*
4    This provides a matrix that consists of Mats
5 */
6 
7 #include "include/private/matimpl.h"              /*I "petscmat.h" I*/
8 #include "src/mat/impls/baij/seq/baij.h"    /* use the common AIJ data-structure */
9 #include "petscksp.h"
10 
11 #define CHUNKSIZE   15
12 
13 typedef struct {
14   SEQAIJHEADER(Mat);
15   SEQBAIJHEADER;
16   Mat               *diags;
17 
18   Vec               left,right,middle,workb;   /* dummy vectors to perform local parts of product */
19 } Mat_BlockMat;
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "MatRelax_BlockMat_Symmetric"
23 PetscErrorCode MatRelax_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
24 {
25   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
26   PetscScalar        *x;
27   const Mat          *v = a->a;
28   const PetscScalar  *b;
29   PetscErrorCode     ierr;
30   PetscInt           n = A->cmap.n,i,mbs = n/A->rmap.bs,j,bs = A->rmap.bs;
31   const PetscInt     *idx;
32   IS                 row,col;
33   MatFactorInfo      info;
34   Vec                left = a->left,right = a->right, middle = a->middle;
35   Mat                *diag;
36 
37   PetscFunctionBegin;
38   its = its*lits;
39   if (its <= 0) SETERRQ2(PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
40   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_ERR_SUP,"No support yet for Eisenstat");
41   if (omega != 1.0) SETERRQ(PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
42   if (fshift) SETERRQ(PETSC_ERR_SUP,"No support yet for fshift");
43   if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP))
44     SETERRQ(PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep");
45 
46   if (!a->diags) {
47     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
48     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
49     for (i=0; i<mbs; i++) {
50       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERING_ND,&row,&col);CHKERRQ(ierr);
51       ierr = MatCholeskyFactorSymbolic(a->a[a->diag[i]],row,&info,a->diags+i);CHKERRQ(ierr);
52       ierr = MatCholeskyFactorNumeric(a->a[a->diag[i]],&info,a->diags+i);CHKERRQ(ierr);
53       ierr = ISDestroy(row);CHKERRQ(ierr);
54       ierr = ISDestroy(col);CHKERRQ(ierr);
55     }
56     ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr);
57   }
58   diag    = a->diags;
59 
60   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
61   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
62   /* copy right hand side because it must be modified during iteration */
63   ierr = VecCopy(bb,a->workb);CHKERRQ(ierr);
64   ierr = VecGetArray(a->workb,(PetscScalar**)&b);CHKERRQ(ierr);
65 
66   /* need to add code for when initial guess is zero, see MatRelax_SeqAIJ */
67   while (its--) {
68     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
69 
70       for (i=0; i<mbs; i++) {
71         n    = a->i[i+1] - a->i[i] - 1;
72         idx  = a->j + a->i[i] + 1;
73         v    = a->a + a->i[i] + 1;
74 
75         ierr = VecSet(left,0.0);CHKERRQ(ierr);
76         for (j=0; j<n; j++) {
77           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
78           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
79           ierr = VecResetArray(right);CHKERRQ(ierr);
80         }
81         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
82         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
83         ierr = VecResetArray(right);CHKERRQ(ierr);
84 
85         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
86         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
87 
88         /* now adjust right hand side, see MatRelax_SeqSBAIJ */
89         for (j=0; j<n; j++) {
90           ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr);
91           ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr);
92           ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr);
93           ierr = VecResetArray(middle);CHKERRQ(ierr);
94         }
95         ierr = VecResetArray(right);CHKERRQ(ierr);
96 
97       }
98     }
99     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
100 
101       for (i=mbs-1; i>=0; i--) {
102         n    = a->i[i+1] - a->i[i] - 1;
103         idx  = a->j + a->i[i] + 1;
104         v    = a->a + a->i[i] + 1;
105 
106         ierr = VecSet(left,0.0);CHKERRQ(ierr);
107         for (j=0; j<n; j++) {
108           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
109           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
110           ierr = VecResetArray(right);CHKERRQ(ierr);
111         }
112         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
113         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
114         ierr = VecResetArray(right);CHKERRQ(ierr);
115 
116         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
117         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
118         ierr = VecResetArray(right);CHKERRQ(ierr);
119 
120       }
121     }
122   }
123   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
124   ierr = VecRestoreArray(a->workb,(PetscScalar**)&b);CHKERRQ(ierr);
125   PetscFunctionReturn(0);
126 }
127 
128 #undef __FUNCT__
129 #define __FUNCT__ "MatRelax_BlockMat"
130 PetscErrorCode MatRelax_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
131 {
132   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
133   PetscScalar        *x;
134   const Mat          *v = a->a;
135   const PetscScalar  *b;
136   PetscErrorCode     ierr;
137   PetscInt           n = A->cmap.n,i,mbs = n/A->rmap.bs,j,bs = A->rmap.bs;
138   const PetscInt     *idx;
139   IS                 row,col;
140   MatFactorInfo      info;
141   Vec                left = a->left,right = a->right;
142   Mat                *diag;
143 
144   PetscFunctionBegin;
145   its = its*lits;
146   if (its <= 0) SETERRQ2(PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
147   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_ERR_SUP,"No support yet for Eisenstat");
148   if (omega != 1.0) SETERRQ(PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
149   if (fshift) SETERRQ(PETSC_ERR_SUP,"No support yet for fshift");
150 
151   if (!a->diags) {
152     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
153     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
154     for (i=0; i<mbs; i++) {
155       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERING_ND,&row,&col);CHKERRQ(ierr);
156       ierr = MatLUFactorSymbolic(a->a[a->diag[i]],row,col,&info,a->diags+i);CHKERRQ(ierr);
157       ierr = MatLUFactorNumeric(a->a[a->diag[i]],&info,a->diags+i);CHKERRQ(ierr);
158       ierr = ISDestroy(row);CHKERRQ(ierr);
159       ierr = ISDestroy(col);CHKERRQ(ierr);
160     }
161   }
162   diag = a->diags;
163 
164   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
165   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
166   ierr = VecGetArray(bb,(PetscScalar**)&b);CHKERRQ(ierr);
167 
168   /* need to add code for when initial guess is zero, see MatRelax_SeqAIJ */
169   while (its--) {
170     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
171 
172       for (i=0; i<mbs; i++) {
173         n    = a->i[i+1] - a->i[i];
174         idx  = a->j + a->i[i];
175         v    = a->a + a->i[i];
176 
177         ierr = VecSet(left,0.0);CHKERRQ(ierr);
178         for (j=0; j<n; j++) {
179           if (idx[j] != i) {
180             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
181             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
182             ierr = VecResetArray(right);CHKERRQ(ierr);
183           }
184         }
185         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
186         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
187         ierr = VecResetArray(right);CHKERRQ(ierr);
188 
189         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
190         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
191         ierr = VecResetArray(right);CHKERRQ(ierr);
192       }
193     }
194     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
195 
196       for (i=mbs-1; i>=0; i--) {
197         n    = a->i[i+1] - a->i[i];
198         idx  = a->j + a->i[i];
199         v    = a->a + a->i[i];
200 
201         ierr = VecSet(left,0.0);CHKERRQ(ierr);
202         for (j=0; j<n; j++) {
203           if (idx[j] != i) {
204             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
205             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
206             ierr = VecResetArray(right);CHKERRQ(ierr);
207           }
208         }
209         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
210         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
211         ierr = VecResetArray(right);CHKERRQ(ierr);
212 
213         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
214         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
215         ierr = VecResetArray(right);CHKERRQ(ierr);
216 
217       }
218     }
219   }
220   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
221   ierr = VecRestoreArray(bb,(PetscScalar**)&b);CHKERRQ(ierr);
222   PetscFunctionReturn(0);
223 }
224 
225 #undef __FUNCT__
226 #define __FUNCT__ "MatSetValues_BlockMat"
227 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is)
228 {
229   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
230   PetscInt       *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1;
231   PetscInt       *imax=a->imax,*ai=a->i,*ailen=a->ilen;
232   PetscInt       *aj=a->j,nonew=a->nonew,bs=A->rmap.bs,brow,bcol;
233   PetscErrorCode ierr;
234   PetscInt       ridx,cidx;
235   PetscTruth     roworiented=a->roworiented;
236   MatScalar      value;
237   Mat            *ap,*aa = a->a;
238 
239   PetscFunctionBegin;
240   for (k=0; k<m; k++) { /* loop over added rows */
241     row  = im[k];
242     brow = row/bs;
243     if (row < 0) continue;
244 #if defined(PETSC_USE_DEBUG)
245     if (row >= A->rmap.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap.N-1);
246 #endif
247     rp   = aj + ai[brow];
248     ap   = aa + ai[brow];
249     rmax = imax[brow];
250     nrow = ailen[brow];
251     low  = 0;
252     high = nrow;
253     for (l=0; l<n; l++) { /* loop over added columns */
254       if (in[l] < 0) continue;
255 #if defined(PETSC_USE_DEBUG)
256       if (in[l] >= A->cmap.n) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap.n-1);
257 #endif
258       col = in[l]; bcol = col/bs;
259       if (A->symmetric && brow > bcol) continue;
260       ridx = row % bs; cidx = col % bs;
261       if (roworiented) {
262         value = v[l + k*n];
263       } else {
264         value = v[k + l*m];
265       }
266       if (col <= lastcol) low = 0; else high = nrow;
267       lastcol = col;
268       while (high-low > 7) {
269         t = (low+high)/2;
270         if (rp[t] > bcol) high = t;
271         else              low  = t;
272       }
273       for (i=low; i<high; i++) {
274         if (rp[i] > bcol) break;
275         if (rp[i] == bcol) {
276           goto noinsert1;
277         }
278       }
279       if (nonew == 1) goto noinsert1;
280       if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col);
281       MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat);
282       N = nrow++ - 1; high++;
283       /* shift up all the later entries in this row */
284       for (ii=N; ii>=i; ii--) {
285         rp[ii+1] = rp[ii];
286         ap[ii+1] = ap[ii];
287       }
288       if (N>=i) ap[i] = 0;
289       rp[i]           = bcol;
290       a->nz++;
291       noinsert1:;
292       if (!*(ap+i)) {
293         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr);
294       }
295       ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr);
296       low = i;
297     }
298     ailen[brow] = nrow;
299   }
300   A->same_nonzero = PETSC_FALSE;
301   PetscFunctionReturn(0);
302 }
303 
304 #undef __FUNCT__
305 #define __FUNCT__ "MatLoad_BlockMat"
306 PetscErrorCode MatLoad_BlockMat(PetscViewer viewer, MatType type,Mat *A)
307 {
308   PetscErrorCode    ierr;
309   Mat               tmpA;
310   PetscInt          i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0;
311   const PetscInt    *cols;
312   const PetscScalar *values;
313   PetscTruth        flg,notdone;
314   Mat_SeqAIJ        *a;
315   Mat_BlockMat      *amat;
316 
317   PetscFunctionBegin;
318   ierr = MatLoad_SeqAIJ(viewer,MATSEQAIJ,&tmpA);CHKERRQ(ierr);
319 
320   ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr);
321   ierr = PetscOptionsBegin(PETSC_COMM_SELF,PETSC_NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr);
322     ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr);
323     ierr = PetscOptionsName("-matload_symmetric","Store the matrix as symmetric","MatLoad",&flg);CHKERRQ(ierr);
324   ierr = PetscOptionsEnd();CHKERRQ(ierr);
325 
326   /* Determine number of nonzero blocks for each block row */
327   a    = (Mat_SeqAIJ*) tmpA->data;
328   mbs  = m/bs;
329   ierr = PetscMalloc3(mbs,PetscInt,&lens,bs,PetscInt*,&ii,bs,PetscInt,&ilens);CHKERRQ(ierr);
330   ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr);
331 
332   for (i=0; i<mbs; i++) {
333     for (j=0; j<bs; j++) {
334       ii[j]         = a->j + a->i[i*bs + j];
335       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
336     }
337 
338     currentcol = -1;
339     notdone = PETSC_TRUE;
340     while (PETSC_TRUE) {
341       notdone = PETSC_FALSE;
342       nextcol = 1000000000;
343       for (j=0; j<bs; j++) {
344         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) {
345           ii[j]++;
346           ilens[j]--;
347         }
348         if (ilens[j] > 0) {
349           notdone = PETSC_TRUE;
350           nextcol = PetscMin(nextcol,ii[j][0]/bs);
351         }
352       }
353       if (!notdone) break;
354       if (!flg || (nextcol >= i)) lens[i]++;
355       currentcol = nextcol;
356     }
357   }
358 
359   ierr = MatCreateBlockMat(PETSC_COMM_SELF,m,n,bs,0,lens,A);CHKERRQ(ierr);
360   if (flg) {
361     ierr = MatSetOption(*A,MAT_SYMMETRIC);CHKERRQ(ierr);
362   }
363   amat = (Mat_BlockMat*)(*A)->data;
364 
365   /* preallocate the submatrices */
366   ierr = PetscMalloc(bs*sizeof(PetscInt),&llens);CHKERRQ(ierr);
367   for (i=0; i<mbs; i++) { /* loops for block rows */
368     for (j=0; j<bs; j++) {
369       ii[j]         = a->j + a->i[i*bs + j];
370       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
371     }
372 
373     currentcol = 1000000000;
374     for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */
375       if (ilens[j] > 0) {
376 	currentcol = PetscMin(currentcol,ii[j][0]/bs);
377       }
378     }
379 
380     notdone = PETSC_TRUE;
381     while (PETSC_TRUE) {  /* loops over blocks in block row */
382 
383       notdone = PETSC_FALSE;
384       nextcol = 1000000000;
385       ierr = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr);
386       for (j=0; j<bs; j++) { /* loop over rows in block */
387         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */
388           ii[j]++;
389           ilens[j]--;
390           llens[j]++;
391         }
392         if (ilens[j] > 0) {
393           notdone = PETSC_TRUE;
394           nextcol = PetscMin(nextcol,ii[j][0]/bs);
395         }
396       }
397       if (cnt >= amat->maxnz) SETERRQ1(PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt);
398       if (!flg || currentcol >= i) {
399         amat->j[cnt] = currentcol;
400         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr);
401       }
402 
403       if (!notdone) break;
404       currentcol = nextcol;
405     }
406     amat->ilen[i] = lens[i];
407   }
408   CHKMEMQ;
409 
410   ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr);
411   ierr = PetscFree(llens);CHKERRQ(ierr);
412 
413   /* copy over the matrix, one row at a time */
414   for (i=0; i<m; i++) {
415     ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
416     ierr = MatSetValues(*A,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr);
417     ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
418   }
419   ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
420   ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
421   PetscFunctionReturn(0);
422 }
423 
424 #undef __FUNCT__
425 #define __FUNCT__ "MatView_BlockMat"
426 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer)
427 {
428   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
429   PetscErrorCode    ierr;
430   const char        *name;
431   PetscViewerFormat format;
432 
433   PetscFunctionBegin;
434   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
435   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
436   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) {
437     ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr);
438     if (A->symmetric) {
439       ierr = PetscViewerASCIIPrintf(viewer,"Only upper triangular part of symmetric matrix is stored\n");CHKERRQ(ierr);
440     }
441   }
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "MatDestroy_BlockMat"
447 PetscErrorCode MatDestroy_BlockMat(Mat mat)
448 {
449   PetscErrorCode ierr;
450   Mat_BlockMat   *bmat = (Mat_BlockMat*)mat->data;
451   PetscInt       i;
452 
453   PetscFunctionBegin;
454   if (bmat->right) {
455     ierr = VecDestroy(bmat->right);CHKERRQ(ierr);
456   }
457   if (bmat->left) {
458     ierr = VecDestroy(bmat->left);CHKERRQ(ierr);
459   }
460   if (bmat->middle) {
461     ierr = VecDestroy(bmat->middle);CHKERRQ(ierr);
462   }
463   if (bmat->workb) {
464     ierr = VecDestroy(bmat->workb);CHKERRQ(ierr);
465   }
466   if (bmat->diags) {
467     for (i=0; i<mat->rmap.n/mat->rmap.bs; i++) {
468       if (bmat->diags[i]) {ierr = MatDestroy(bmat->diags[i]);CHKERRQ(ierr);}
469     }
470   }
471   if (bmat->a) {
472     for (i=0; i<bmat->nz; i++) {
473       if (bmat->a[i]) {ierr = MatDestroy(bmat->a[i]);CHKERRQ(ierr);}
474     }
475   }
476   ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
477   ierr = PetscFree(bmat);CHKERRQ(ierr);
478   PetscFunctionReturn(0);
479 }
480 
481 #undef __FUNCT__
482 #define __FUNCT__ "MatMult_BlockMat"
483 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y)
484 {
485   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
486   PetscErrorCode ierr;
487   PetscScalar    *xx,*yy;
488   PetscInt       *aj,i,*ii,jrow,m = A->rmap.n/A->rmap.bs,bs = A->rmap.bs,n,j;
489   Mat            *aa;
490 
491   PetscFunctionBegin;
492   CHKMEMQ;
493   /*
494      Standard CSR multiply except each entry is a Mat
495   */
496   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
497 
498   ierr = VecSet(y,0.0);CHKERRQ(ierr);
499   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
500   aj  = bmat->j;
501   aa  = bmat->a;
502   ii  = bmat->i;
503   for (i=0; i<m; i++) {
504     jrow = ii[i];
505     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
506     n    = ii[i+1] - jrow;
507     for (j=0; j<n; j++) {
508       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
509       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
510       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
511       jrow++;
512     }
513     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
514   }
515   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
516   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
517   CHKMEMQ;
518   PetscFunctionReturn(0);
519 }
520 
521 #undef __FUNCT__
522 #define __FUNCT__ "MatMult_BlockMat_Symmetric"
523 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y)
524 {
525   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
526   PetscErrorCode ierr;
527   PetscScalar    *xx,*yy;
528   PetscInt       *aj,i,*ii,jrow,m = A->rmap.n/A->rmap.bs,bs = A->rmap.bs,n,j;
529   Mat            *aa;
530 
531   PetscFunctionBegin;
532   CHKMEMQ;
533   /*
534      Standard CSR multiply except each entry is a Mat
535   */
536   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
537 
538   ierr = VecSet(y,0.0);CHKERRQ(ierr);
539   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
540   aj  = bmat->j;
541   aa  = bmat->a;
542   ii  = bmat->i;
543   for (i=0; i<m; i++) {
544     jrow = ii[i];
545     n    = ii[i+1] - jrow;
546     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
547     ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr);
548     /* if we ALWAYS required a diagonal entry then could remove this if test */
549     if (aj[jrow] == i) {
550       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
551       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
552       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
553       jrow++;
554       n--;
555     }
556     for (j=0; j<n; j++) {
557       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);            /* upper triangular part */
558       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
559       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
560 
561       ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr);            /* lower triangular part */
562       ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr);
563       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
564       jrow++;
565     }
566     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
567     ierr = VecResetArray(bmat->middle);CHKERRQ(ierr);
568   }
569   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
570   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
571   CHKMEMQ;
572   PetscFunctionReturn(0);
573 }
574 
575 #undef __FUNCT__
576 #define __FUNCT__ "MatMultAdd_BlockMat"
577 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
578 {
579   PetscFunctionBegin;
580   PetscFunctionReturn(0);
581 }
582 
583 #undef __FUNCT__
584 #define __FUNCT__ "MatMultTranspose_BlockMat"
585 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y)
586 {
587   PetscFunctionBegin;
588   PetscFunctionReturn(0);
589 }
590 
591 #undef __FUNCT__
592 #define __FUNCT__ "MatMultTransposeAdd_BlockMat"
593 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
594 {
595   PetscFunctionBegin;
596   PetscFunctionReturn(0);
597 }
598 
599 /*
600      Adds diagonal pointers to sparse matrix structure.
601 */
602 #undef __FUNCT__
603 #define __FUNCT__ "MatMarkDiagonal_BlockMat"
604 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A)
605 {
606   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
607   PetscErrorCode ierr;
608   PetscInt       i,j,mbs = A->rmap.n/A->rmap.bs;
609 
610   PetscFunctionBegin;
611   if (!a->diag) {
612     ierr = PetscMalloc(mbs*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
613   }
614   for (i=0; i<mbs; i++) {
615     a->diag[i] = a->i[i+1];
616     for (j=a->i[i]; j<a->i[i+1]; j++) {
617       if (a->j[j] == i) {
618         a->diag[i] = j;
619         break;
620       }
621     }
622   }
623   PetscFunctionReturn(0);
624 }
625 
626 #undef __FUNCT__
627 #define __FUNCT__ "MatGetSubMatrix_BlockMat"
628 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,PetscInt csize,MatReuse scall,Mat *B)
629 {
630   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
631   Mat_SeqAIJ     *c;
632   PetscErrorCode ierr;
633   PetscInt       i,k,first,step,lensi,nrows,ncols;
634   PetscInt       *j_new,*i_new,*aj = a->j,*ai = a->i,ii,*ailen = a->ilen;
635   PetscScalar    *a_new,value;
636   Mat            C,*aa = a->a;
637   PetscTruth     stride,equal;
638 
639   PetscFunctionBegin;
640   ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr);
641   if (!equal) SETERRQ(PETSC_ERR_SUP,"Only for idential column and row indices");
642   ierr = ISStride(iscol,&stride);CHKERRQ(ierr);
643   if (!stride) SETERRQ(PETSC_ERR_SUP,"Only for stride indices");
644   ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr);
645   if (step != A->rmap.bs) SETERRQ(PETSC_ERR_SUP,"Can only select one entry from each block");
646 
647   ierr = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr);
648   ncols = nrows;
649 
650   /* create submatrix */
651   if (scall == MAT_REUSE_MATRIX) {
652     PetscInt n_cols,n_rows;
653     C = *B;
654     ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr);
655     if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size");
656     ierr = MatZeroEntries(C);CHKERRQ(ierr);
657   } else {
658     ierr = MatCreate(A->comm,&C);CHKERRQ(ierr);
659     ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
660     if (A->symmetric) {
661       ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr);
662     } else {
663       ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr);
664     }
665     ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr);
666     ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr);
667   }
668   c = (Mat_SeqAIJ*)C->data;
669 
670   /* loop over rows inserting into submatrix */
671   a_new    = c->a;
672   j_new    = c->j;
673   i_new    = c->i;
674 
675   for (i=0; i<nrows; i++) {
676     ii    = ai[i];
677     lensi = ailen[i];
678     for (k=0; k<lensi; k++) {
679       *j_new++ = *aj++;
680       ierr     = MatGetValue(*aa++,first,first,value);CHKERRQ(ierr);
681       *a_new++ = value;
682     }
683     i_new[i+1]  = i_new[i] + lensi;
684     c->ilen[i]  = lensi;
685   }
686 
687   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
688   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
689   *B = C;
690   PetscFunctionReturn(0);
691 }
692 
693 #undef __FUNCT__
694 #define __FUNCT__ "MatAssemblyEnd_BlockMat"
695 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode)
696 {
697   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
698   PetscErrorCode ierr;
699   PetscInt       fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax;
700   PetscInt       m = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0;
701   Mat            *aa = a->a,*ap;
702 
703   PetscFunctionBegin;
704   if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0);
705 
706   if (m) rmax = ailen[0]; /* determine row with most nonzeros */
707   for (i=1; i<m; i++) {
708     /* move each row back by the amount of empty slots (fshift) before it*/
709     fshift += imax[i-1] - ailen[i-1];
710     rmax   = PetscMax(rmax,ailen[i]);
711     if (fshift) {
712       ip = aj + ai[i] ;
713       ap = aa + ai[i] ;
714       N  = ailen[i];
715       for (j=0; j<N; j++) {
716         ip[j-fshift] = ip[j];
717         ap[j-fshift] = ap[j];
718       }
719     }
720     ai[i] = ai[i-1] + ailen[i-1];
721   }
722   if (m) {
723     fshift += imax[m-1] - ailen[m-1];
724     ai[m]  = ai[m-1] + ailen[m-1];
725   }
726   /* reset ilen and imax for each row */
727   for (i=0; i<m; i++) {
728     ailen[i] = imax[i] = ai[i+1] - ai[i];
729   }
730   a->nz = ai[m];
731   for (i=0; i<a->nz; i++) {
732 #if defined(PETSC_USE_DEBUG)
733     if (!aa[i]) SETERRQ3(PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz);
734 #endif
735     ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
736     ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
737   }
738   CHKMEMQ;
739   ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap.n/A->cmap.bs,fshift,a->nz);CHKERRQ(ierr);
740   ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr);
741   ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr);
742   a->reallocs          = 0;
743   A->info.nz_unneeded  = (double)fshift;
744   a->rmax              = rmax;
745 
746   A->same_nonzero = PETSC_TRUE;
747   ierr = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr);
748   PetscFunctionReturn(0);
749 }
750 
751 #undef __FUNCT__
752 #define __FUNCT__ "MatSetOption_BlockMat"
753 PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt)
754 {
755   PetscFunctionBegin;
756   if (opt == MAT_SYMMETRIC) {
757     A->ops->relax = MatRelax_BlockMat_Symmetric;
758     A->ops->mult  = MatMult_BlockMat_Symmetric;
759   } else {
760     PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]);
761   }
762   PetscFunctionReturn(0);
763 }
764 
765 
766 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat,
767        0,
768        0,
769        MatMult_BlockMat,
770 /* 4*/ MatMultAdd_BlockMat,
771        MatMultTranspose_BlockMat,
772        MatMultTransposeAdd_BlockMat,
773        0,
774        0,
775        0,
776 /*10*/ 0,
777        0,
778        0,
779        MatRelax_BlockMat,
780        0,
781 /*15*/ 0,
782        0,
783        0,
784        0,
785        0,
786 /*20*/ 0,
787        MatAssemblyEnd_BlockMat,
788        0,
789        MatSetOption_BlockMat,
790        0,
791 /*25*/ 0,
792        0,
793        0,
794        0,
795        0,
796 /*30*/ 0,
797        0,
798        0,
799        0,
800        0,
801 /*35*/ 0,
802        0,
803        0,
804        0,
805        0,
806 /*40*/ 0,
807        0,
808        0,
809        0,
810        0,
811 /*45*/ 0,
812        0,
813        0,
814        0,
815        0,
816 /*50*/ 0,
817        0,
818        0,
819        0,
820        0,
821 /*55*/ 0,
822        0,
823        0,
824        0,
825        0,
826 /*60*/ MatGetSubMatrix_BlockMat,
827        MatDestroy_BlockMat,
828        MatView_BlockMat,
829        0,
830        0,
831 /*65*/ 0,
832        0,
833        0,
834        0,
835        0,
836 /*70*/ 0,
837        0,
838        0,
839        0,
840        0,
841 /*75*/ 0,
842        0,
843        0,
844        0,
845        0,
846 /*80*/ 0,
847        0,
848        0,
849        0,
850        MatLoad_BlockMat,
851 /*85*/ 0,
852        0,
853        0,
854        0,
855        0,
856 /*90*/ 0,
857        0,
858        0,
859        0,
860        0,
861 /*95*/ 0,
862        0,
863        0,
864        0};
865 
866 #undef __FUNCT__
867 #define __FUNCT__ "MatBlockMatSetPreallocation"
868 /*@C
869    MatBlockMatSetPreallocation - For good matrix assembly performance
870    the user should preallocate the matrix storage by setting the parameter nz
871    (or the array nnz).  By setting these parameters accurately, performance
872    during matrix assembly can be increased by more than a factor of 50.
873 
874    Collective on MPI_Comm
875 
876    Input Parameters:
877 +  B - The matrix
878 .  bs - size of each block in matrix
879 .  nz - number of nonzeros per block row (same for all rows)
880 -  nnz - array containing the number of nonzeros in the various block rows
881          (possibly different for each row) or PETSC_NULL
882 
883    Notes:
884      If nnz is given then nz is ignored
885 
886    Specify the preallocated storage with either nz or nnz (not both).
887    Set nz=PETSC_DEFAULT and nnz=PETSC_NULL for PETSc to control dynamic memory
888    allocation.  For large problems you MUST preallocate memory or you
889    will get TERRIBLE performance, see the users' manual chapter on matrices.
890 
891    Level: intermediate
892 
893 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues()
894 
895 @*/
896 PetscErrorCode PETSCMAT_DLLEXPORT MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[])
897 {
898   PetscErrorCode ierr,(*f)(Mat,PetscInt,PetscInt,const PetscInt[]);
899 
900   PetscFunctionBegin;
901   ierr = PetscObjectQueryFunction((PetscObject)B,"MatBlockMatSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
902   if (f) {
903     ierr = (*f)(B,bs,nz,nnz);CHKERRQ(ierr);
904   }
905   PetscFunctionReturn(0);
906 }
907 
908 EXTERN_C_BEGIN
909 #undef __FUNCT__
910 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat"
911 PetscErrorCode PETSCMAT_DLLEXPORT MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz)
912 {
913   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
914   PetscErrorCode ierr;
915   PetscInt       i;
916 
917   PetscFunctionBegin;
918   if (bs < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Block size given %D must be great than zero",bs);
919   if (A->rmap.n % bs) SETERRQ2(PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of rows %D",bs,A->rmap.n);
920   if (A->cmap.n % bs) SETERRQ2(PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of columns %D",bs,A->cmap.n);
921   if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5;
922   if (nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz);
923   if (nnz) {
924     for (i=0; i<A->rmap.n/bs; i++) {
925       if (nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]);
926       if (nnz[i] > A->cmap.n/bs) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap.n/bs);
927     }
928   }
929   A->rmap.bs = A->cmap.bs = bs;
930   bmat->mbs  = A->rmap.n/bs;
931 
932   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->right);CHKERRQ(ierr);
933   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->middle);CHKERRQ(ierr);
934   ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr);
935 
936   ierr = PetscMalloc2(A->rmap.n,PetscInt,&bmat->imax,A->rmap.n,PetscInt,&bmat->ilen);CHKERRQ(ierr);
937   if (nnz) {
938     nz = 0;
939     for (i=0; i<A->rmap.n/A->rmap.bs; i++) {
940       bmat->imax[i] = nnz[i];
941       nz           += nnz[i];
942     }
943   } else {
944     SETERRQ(PETSC_ERR_SUP,"Currently requires block row by row preallocation");
945   }
946 
947   /* bmat->ilen will count nonzeros in each row so far. */
948   for (i=0; i<bmat->mbs; i++) { bmat->ilen[i] = 0;}
949 
950   /* allocate the matrix space */
951   ierr = PetscMalloc3(nz,Mat,&bmat->a,nz,PetscInt,&bmat->j,A->rmap.n+1,PetscInt,&bmat->i);CHKERRQ(ierr);
952   bmat->i[0] = 0;
953   for (i=1; i<bmat->mbs+1; i++) {
954     bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1];
955   }
956   bmat->singlemalloc = PETSC_TRUE;
957   bmat->free_a       = PETSC_TRUE;
958   bmat->free_ij      = PETSC_TRUE;
959 
960   bmat->nz                = 0;
961   bmat->maxnz             = nz;
962   A->info.nz_unneeded  = (double)bmat->maxnz;
963 
964   PetscFunctionReturn(0);
965 }
966 EXTERN_C_END
967 
968 /*MC
969    MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix
970                  consisting of (usually) sparse blocks.
971 
972   Level: advanced
973 
974 .seealso: MatCreateBlockMat()
975 
976 M*/
977 
978 EXTERN_C_BEGIN
979 #undef __FUNCT__
980 #define __FUNCT__ "MatCreate_BlockMat"
981 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_BlockMat(Mat A)
982 {
983   Mat_BlockMat   *b;
984   PetscErrorCode ierr;
985 
986   PetscFunctionBegin;
987   ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
988   ierr = PetscNew(Mat_BlockMat,&b);CHKERRQ(ierr);
989 
990   A->data = (void*)b;
991 
992   ierr = PetscMapSetBlockSize(&A->rmap,1);CHKERRQ(ierr);
993   ierr = PetscMapSetBlockSize(&A->cmap,1);CHKERRQ(ierr);
994   ierr = PetscMapSetUp(&A->rmap);CHKERRQ(ierr);
995   ierr = PetscMapSetUp(&A->cmap);CHKERRQ(ierr);
996 
997   A->assembled     = PETSC_TRUE;
998   A->preallocated  = PETSC_FALSE;
999   ierr = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr);
1000 
1001   ierr = PetscOptionsBegin(A->comm,A->prefix,"Matrix Option","Mat");CHKERRQ(ierr);
1002   ierr = PetscOptionsEnd();
1003 
1004   ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatBlockMatSetPreallocation_C",
1005                                      "MatBlockMatSetPreallocation_BlockMat",
1006                                       MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr);
1007 
1008   PetscFunctionReturn(0);
1009 }
1010 EXTERN_C_END
1011 
1012 #undef __FUNCT__
1013 #define __FUNCT__ "MatCreateBlockMat"
1014 /*@C
1015    MatCreateBlockMat - Creates a new matrix based sparse Mat storage
1016 
1017   Collective on MPI_Comm
1018 
1019    Input Parameters:
1020 +  comm - MPI communicator
1021 .  m - number of rows
1022 .  n  - number of columns
1023 .  bs - size of each submatrix
1024 .  nz  - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known)
1025 -  nnz - expected number of nonzers per block row if known (use PETSC_NULL otherwise)
1026 
1027 
1028    Output Parameter:
1029 .  A - the matrix
1030 
1031    Level: intermediate
1032 
1033    PETSc requires that matrices and vectors being used for certain
1034    operations are partitioned accordingly.  For example, when
1035    creating a bmat matrix, A, that supports parallel matrix-vector
1036    products using MatMult(A,x,y) the user should set the number
1037    of local matrix rows to be the number of local elements of the
1038    corresponding result vector, y. Note that this is information is
1039    required for use of the matrix interface routines, even though
1040    the bmat matrix may not actually be physically partitioned.
1041    For example,
1042 
1043 .keywords: matrix, bmat, create
1044 
1045 .seealso: MATBLOCKMAT
1046 @*/
1047 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A)
1048 {
1049   PetscErrorCode ierr;
1050 
1051   PetscFunctionBegin;
1052   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1053   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1054   ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr);
1055   ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr);
1056   PetscFunctionReturn(0);
1057 }
1058 
1059 
1060 
1061