xref: /petsc/src/mat/impls/blockmat/seq/blockmat.c (revision 7d0a6c19129e7069c8a40e210b34ed62989173db)
1 
2 /*
3    This provides a matrix that consists of Mats
4 */
5 
6 #include "private/matimpl.h"              /*I "petscmat.h" I*/
7 #include "../src/mat/impls/baij/seq/baij.h"    /* use the common AIJ data-structure */
8 #include "petscksp.h"
9 
10 typedef struct {
11   SEQAIJHEADER(Mat);
12   SEQBAIJHEADER;
13   Mat               *diags;
14 
15   Vec               left,right,middle,workb;   /* dummy vectors to perform local parts of product */
16 } Mat_BlockMat;
17 
18 extern PetscErrorCode  MatBlockMatSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt*);
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "MatSOR_BlockMat_Symmetric"
22 PetscErrorCode MatSOR_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
23 {
24   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
25   PetscScalar        *x;
26   const Mat          *v = a->a;
27   const PetscScalar  *b;
28   PetscErrorCode     ierr;
29   PetscInt           n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
30   const PetscInt     *idx;
31   IS                 row,col;
32   MatFactorInfo      info;
33   Vec                left = a->left,right = a->right, middle = a->middle;
34   Mat                *diag;
35 
36   PetscFunctionBegin;
37   its = its*lits;
38   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
39   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
40   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
41   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
42   if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP))
43     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep");
44 
45   if (!a->diags) {
46     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
47     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
48     for (i=0; i<mbs; i++) {
49       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
50       ierr = MatCholeskyFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,&info);CHKERRQ(ierr);
51       ierr = MatCholeskyFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
52       ierr = ISDestroy(row);CHKERRQ(ierr);
53       ierr = ISDestroy(col);CHKERRQ(ierr);
54     }
55     ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr);
56   }
57   diag    = a->diags;
58 
59   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
60   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
61   /* copy right hand side because it must be modified during iteration */
62   ierr = VecCopy(bb,a->workb);CHKERRQ(ierr);
63   ierr = VecGetArrayRead(a->workb,&b);CHKERRQ(ierr);
64 
65   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
66   while (its--) {
67     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
68 
69       for (i=0; i<mbs; i++) {
70         n    = a->i[i+1] - a->i[i] - 1;
71         idx  = a->j + a->i[i] + 1;
72         v    = a->a + a->i[i] + 1;
73 
74         ierr = VecSet(left,0.0);CHKERRQ(ierr);
75         for (j=0; j<n; j++) {
76           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
77           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
78           ierr = VecResetArray(right);CHKERRQ(ierr);
79         }
80         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
81         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
82         ierr = VecResetArray(right);CHKERRQ(ierr);
83 
84         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
85         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
86 
87         /* now adjust right hand side, see MatSOR_SeqSBAIJ */
88         for (j=0; j<n; j++) {
89           ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr);
90           ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr);
91           ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr);
92           ierr = VecResetArray(middle);CHKERRQ(ierr);
93         }
94         ierr = VecResetArray(right);CHKERRQ(ierr);
95 
96       }
97     }
98     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
99 
100       for (i=mbs-1; i>=0; i--) {
101         n    = a->i[i+1] - a->i[i] - 1;
102         idx  = a->j + a->i[i] + 1;
103         v    = a->a + a->i[i] + 1;
104 
105         ierr = VecSet(left,0.0);CHKERRQ(ierr);
106         for (j=0; j<n; j++) {
107           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
108           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
109           ierr = VecResetArray(right);CHKERRQ(ierr);
110         }
111         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
112         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
113         ierr = VecResetArray(right);CHKERRQ(ierr);
114 
115         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
116         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
117         ierr = VecResetArray(right);CHKERRQ(ierr);
118 
119       }
120     }
121   }
122   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
123   ierr = VecRestoreArrayRead(a->workb,&b);CHKERRQ(ierr);
124   PetscFunctionReturn(0);
125 }
126 
127 #undef __FUNCT__
128 #define __FUNCT__ "MatSOR_BlockMat"
129 PetscErrorCode MatSOR_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
130 {
131   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
132   PetscScalar        *x;
133   const Mat          *v = a->a;
134   const PetscScalar  *b;
135   PetscErrorCode     ierr;
136   PetscInt           n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
137   const PetscInt     *idx;
138   IS                 row,col;
139   MatFactorInfo      info;
140   Vec                left = a->left,right = a->right;
141   Mat                *diag;
142 
143   PetscFunctionBegin;
144   its = its*lits;
145   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
146   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
147   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
148   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
149 
150   if (!a->diags) {
151     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
152     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
153     for (i=0; i<mbs; i++) {
154       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
155       ierr = MatLUFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,col,&info);CHKERRQ(ierr);
156       ierr = MatLUFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
157       ierr = ISDestroy(row);CHKERRQ(ierr);
158       ierr = ISDestroy(col);CHKERRQ(ierr);
159     }
160   }
161   diag = a->diags;
162 
163   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
164   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
165   ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
166 
167   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
168   while (its--) {
169     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
170 
171       for (i=0; i<mbs; i++) {
172         n    = a->i[i+1] - a->i[i];
173         idx  = a->j + a->i[i];
174         v    = a->a + a->i[i];
175 
176         ierr = VecSet(left,0.0);CHKERRQ(ierr);
177         for (j=0; j<n; j++) {
178           if (idx[j] != i) {
179             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
180             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
181             ierr = VecResetArray(right);CHKERRQ(ierr);
182           }
183         }
184         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
185         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
186         ierr = VecResetArray(right);CHKERRQ(ierr);
187 
188         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
189         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
190         ierr = VecResetArray(right);CHKERRQ(ierr);
191       }
192     }
193     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
194 
195       for (i=mbs-1; i>=0; i--) {
196         n    = a->i[i+1] - a->i[i];
197         idx  = a->j + a->i[i];
198         v    = a->a + a->i[i];
199 
200         ierr = VecSet(left,0.0);CHKERRQ(ierr);
201         for (j=0; j<n; j++) {
202           if (idx[j] != i) {
203             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
204             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
205             ierr = VecResetArray(right);CHKERRQ(ierr);
206           }
207         }
208         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
209         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
210         ierr = VecResetArray(right);CHKERRQ(ierr);
211 
212         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
213         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
214         ierr = VecResetArray(right);CHKERRQ(ierr);
215 
216       }
217     }
218   }
219   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
220   ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
221   PetscFunctionReturn(0);
222 }
223 
224 #undef __FUNCT__
225 #define __FUNCT__ "MatSetValues_BlockMat"
226 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is)
227 {
228   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
229   PetscInt       *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1;
230   PetscInt       *imax=a->imax,*ai=a->i,*ailen=a->ilen;
231   PetscInt       *aj=a->j,nonew=a->nonew,bs=A->rmap->bs,brow,bcol;
232   PetscErrorCode ierr;
233   PetscInt       ridx,cidx;
234   PetscBool      roworiented=a->roworiented;
235   MatScalar      value;
236   Mat            *ap,*aa = a->a;
237 
238   PetscFunctionBegin;
239   if (v) PetscValidScalarPointer(v,6);
240   for (k=0; k<m; k++) { /* loop over added rows */
241     row  = im[k];
242     brow = row/bs;
243     if (row < 0) continue;
244 #if defined(PETSC_USE_DEBUG)
245     if (row >= A->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap->N-1);
246 #endif
247     rp   = aj + ai[brow];
248     ap   = aa + ai[brow];
249     rmax = imax[brow];
250     nrow = ailen[brow];
251     low  = 0;
252     high = nrow;
253     for (l=0; l<n; l++) { /* loop over added columns */
254       if (in[l] < 0) continue;
255 #if defined(PETSC_USE_DEBUG)
256       if (in[l] >= A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap->n-1);
257 #endif
258       col = in[l]; bcol = col/bs;
259       if (A->symmetric && brow > bcol) continue;
260       ridx = row % bs; cidx = col % bs;
261       if (roworiented) {
262         value = v[l + k*n];
263       } else {
264         value = v[k + l*m];
265       }
266       if (col <= lastcol) low = 0; else high = nrow;
267       lastcol = col;
268       while (high-low > 7) {
269         t = (low+high)/2;
270         if (rp[t] > bcol) high = t;
271         else              low  = t;
272       }
273       for (i=low; i<high; i++) {
274         if (rp[i] > bcol) break;
275         if (rp[i] == bcol) {
276           goto noinsert1;
277         }
278       }
279       if (nonew == 1) goto noinsert1;
280       if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col);
281       MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat);
282       N = nrow++ - 1; high++;
283       /* shift up all the later entries in this row */
284       for (ii=N; ii>=i; ii--) {
285         rp[ii+1] = rp[ii];
286         ap[ii+1] = ap[ii];
287       }
288       if (N>=i) ap[i] = 0;
289       rp[i]           = bcol;
290       a->nz++;
291       noinsert1:;
292       if (!*(ap+i)) {
293         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr);
294       }
295       ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr);
296       low = i;
297     }
298     ailen[brow] = nrow;
299   }
300   A->same_nonzero = PETSC_FALSE;
301   PetscFunctionReturn(0);
302 }
303 
304 #undef __FUNCT__
305 #define __FUNCT__ "MatLoad_BlockMat"
306 PetscErrorCode MatLoad_BlockMat(Mat newmat, PetscViewer viewer)
307 {
308   PetscErrorCode    ierr;
309   Mat               tmpA;
310   PetscInt          i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0;
311   const PetscInt    *cols;
312   const PetscScalar *values;
313   PetscBool         flg = PETSC_FALSE,notdone;
314   Mat_SeqAIJ        *a;
315   Mat_BlockMat      *amat;
316 
317   PetscFunctionBegin;
318   ierr = MatCreate(PETSC_COMM_SELF,&tmpA);CHKERRQ(ierr);
319   ierr = MatSetType(tmpA,MATSEQAIJ);CHKERRQ(ierr);
320   ierr = MatLoad_SeqAIJ(tmpA,viewer);CHKERRQ(ierr);
321 
322   ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr);
323   ierr = PetscOptionsBegin(PETSC_COMM_SELF,PETSC_NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr);
324   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr);
325   ierr = PetscOptionsBool("-matload_symmetric","Store the matrix as symmetric","MatLoad",flg,&flg,PETSC_NULL);CHKERRQ(ierr);
326   ierr = PetscOptionsEnd();CHKERRQ(ierr);
327 
328   /* Determine number of nonzero blocks for each block row */
329   a    = (Mat_SeqAIJ*) tmpA->data;
330   mbs  = m/bs;
331   ierr = PetscMalloc3(mbs,PetscInt,&lens,bs,PetscInt*,&ii,bs,PetscInt,&ilens);CHKERRQ(ierr);
332   ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr);
333 
334   for (i=0; i<mbs; i++) {
335     for (j=0; j<bs; j++) {
336       ii[j]         = a->j + a->i[i*bs + j];
337       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
338     }
339 
340     currentcol = -1;
341     notdone = PETSC_TRUE;
342     while (PETSC_TRUE) {
343       notdone = PETSC_FALSE;
344       nextcol = 1000000000;
345       for (j=0; j<bs; j++) {
346         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) {
347           ii[j]++;
348           ilens[j]--;
349         }
350         if (ilens[j] > 0) {
351           notdone = PETSC_TRUE;
352           nextcol = PetscMin(nextcol,ii[j][0]/bs);
353         }
354       }
355       if (!notdone) break;
356       if (!flg || (nextcol >= i)) lens[i]++;
357       currentcol = nextcol;
358     }
359   }
360 
361   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) {
362     ierr = MatSetSizes(newmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
363   }
364   ierr = MatBlockMatSetPreallocation(newmat,bs,0,lens);CHKERRQ(ierr);
365   if (flg) {
366     ierr = MatSetOption(newmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
367   }
368   amat = (Mat_BlockMat*)(newmat)->data;
369 
370   /* preallocate the submatrices */
371   ierr = PetscMalloc(bs*sizeof(PetscInt),&llens);CHKERRQ(ierr);
372   for (i=0; i<mbs; i++) { /* loops for block rows */
373     for (j=0; j<bs; j++) {
374       ii[j]         = a->j + a->i[i*bs + j];
375       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
376     }
377 
378     currentcol = 1000000000;
379     for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */
380       if (ilens[j] > 0) {
381 	currentcol = PetscMin(currentcol,ii[j][0]/bs);
382       }
383     }
384 
385     notdone = PETSC_TRUE;
386     while (PETSC_TRUE) {  /* loops over blocks in block row */
387 
388       notdone = PETSC_FALSE;
389       nextcol = 1000000000;
390       ierr = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr);
391       for (j=0; j<bs; j++) { /* loop over rows in block */
392         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */
393           ii[j]++;
394           ilens[j]--;
395           llens[j]++;
396         }
397         if (ilens[j] > 0) {
398           notdone = PETSC_TRUE;
399           nextcol = PetscMin(nextcol,ii[j][0]/bs);
400         }
401       }
402       if (cnt >= amat->maxnz) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt);
403       if (!flg || currentcol >= i) {
404         amat->j[cnt] = currentcol;
405         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr);
406       }
407 
408       if (!notdone) break;
409       currentcol = nextcol;
410     }
411     amat->ilen[i] = lens[i];
412   }
413   CHKMEMQ;
414 
415   ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr);
416   ierr = PetscFree(llens);CHKERRQ(ierr);
417 
418   /* copy over the matrix, one row at a time */
419   for (i=0; i<m; i++) {
420     ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
421     ierr = MatSetValues(newmat,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr);
422     ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
423   }
424   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
425   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
426   PetscFunctionReturn(0);
427 }
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "MatView_BlockMat"
431 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer)
432 {
433   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
434   PetscErrorCode    ierr;
435   const char        *name;
436   PetscViewerFormat format;
437 
438   PetscFunctionBegin;
439   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
440   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
441   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) {
442     ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr);
443     if (A->symmetric) {
444       ierr = PetscViewerASCIIPrintf(viewer,"Only upper triangular part of symmetric matrix is stored\n");CHKERRQ(ierr);
445     }
446   }
447   PetscFunctionReturn(0);
448 }
449 
450 #undef __FUNCT__
451 #define __FUNCT__ "MatDestroy_BlockMat"
452 PetscErrorCode MatDestroy_BlockMat(Mat mat)
453 {
454   PetscErrorCode ierr;
455   Mat_BlockMat   *bmat = (Mat_BlockMat*)mat->data;
456   PetscInt       i;
457 
458   PetscFunctionBegin;
459   if (bmat->right) {
460     ierr = VecDestroy(bmat->right);CHKERRQ(ierr);
461   }
462   if (bmat->left) {
463     ierr = VecDestroy(bmat->left);CHKERRQ(ierr);
464   }
465   if (bmat->middle) {
466     ierr = VecDestroy(bmat->middle);CHKERRQ(ierr);
467   }
468   if (bmat->workb) {
469     ierr = VecDestroy(bmat->workb);CHKERRQ(ierr);
470   }
471   if (bmat->diags) {
472     for (i=0; i<mat->rmap->n/mat->rmap->bs; i++) {
473       if (bmat->diags[i]) {ierr = MatDestroy(bmat->diags[i]);CHKERRQ(ierr);}
474     }
475   }
476   if (bmat->a) {
477     for (i=0; i<bmat->nz; i++) {
478       if (bmat->a[i]) {ierr = MatDestroy(bmat->a[i]);CHKERRQ(ierr);}
479     }
480   }
481   ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
482   ierr = PetscFree(bmat);CHKERRQ(ierr);
483   PetscFunctionReturn(0);
484 }
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "MatMult_BlockMat"
488 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y)
489 {
490   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
491   PetscErrorCode ierr;
492   PetscScalar    *xx,*yy;
493   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
494   Mat            *aa;
495 
496   PetscFunctionBegin;
497   CHKMEMQ;
498   /*
499      Standard CSR multiply except each entry is a Mat
500   */
501   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
502 
503   ierr = VecSet(y,0.0);CHKERRQ(ierr);
504   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
505   aj  = bmat->j;
506   aa  = bmat->a;
507   ii  = bmat->i;
508   for (i=0; i<m; i++) {
509     jrow = ii[i];
510     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
511     n    = ii[i+1] - jrow;
512     for (j=0; j<n; j++) {
513       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
514       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
515       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
516       jrow++;
517     }
518     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
519   }
520   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
521   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
522   CHKMEMQ;
523   PetscFunctionReturn(0);
524 }
525 
526 #undef __FUNCT__
527 #define __FUNCT__ "MatMult_BlockMat_Symmetric"
528 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y)
529 {
530   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
531   PetscErrorCode ierr;
532   PetscScalar    *xx,*yy;
533   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
534   Mat            *aa;
535 
536   PetscFunctionBegin;
537   CHKMEMQ;
538   /*
539      Standard CSR multiply except each entry is a Mat
540   */
541   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
542 
543   ierr = VecSet(y,0.0);CHKERRQ(ierr);
544   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
545   aj  = bmat->j;
546   aa  = bmat->a;
547   ii  = bmat->i;
548   for (i=0; i<m; i++) {
549     jrow = ii[i];
550     n    = ii[i+1] - jrow;
551     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
552     ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr);
553     /* if we ALWAYS required a diagonal entry then could remove this if test */
554     if (aj[jrow] == i) {
555       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
556       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
557       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
558       jrow++;
559       n--;
560     }
561     for (j=0; j<n; j++) {
562       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);            /* upper triangular part */
563       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
564       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
565 
566       ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr);            /* lower triangular part */
567       ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr);
568       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
569       jrow++;
570     }
571     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
572     ierr = VecResetArray(bmat->middle);CHKERRQ(ierr);
573   }
574   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
575   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
576   CHKMEMQ;
577   PetscFunctionReturn(0);
578 }
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "MatMultAdd_BlockMat"
582 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
583 {
584   PetscFunctionBegin;
585   PetscFunctionReturn(0);
586 }
587 
588 #undef __FUNCT__
589 #define __FUNCT__ "MatMultTranspose_BlockMat"
590 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y)
591 {
592   PetscFunctionBegin;
593   PetscFunctionReturn(0);
594 }
595 
596 #undef __FUNCT__
597 #define __FUNCT__ "MatMultTransposeAdd_BlockMat"
598 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
599 {
600   PetscFunctionBegin;
601   PetscFunctionReturn(0);
602 }
603 
604 /*
605      Adds diagonal pointers to sparse matrix structure.
606 */
607 #undef __FUNCT__
608 #define __FUNCT__ "MatMarkDiagonal_BlockMat"
609 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A)
610 {
611   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
612   PetscErrorCode ierr;
613   PetscInt       i,j,mbs = A->rmap->n/A->rmap->bs;
614 
615   PetscFunctionBegin;
616   if (!a->diag) {
617     ierr = PetscMalloc(mbs*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
618   }
619   for (i=0; i<mbs; i++) {
620     a->diag[i] = a->i[i+1];
621     for (j=a->i[i]; j<a->i[i+1]; j++) {
622       if (a->j[j] == i) {
623         a->diag[i] = j;
624         break;
625       }
626     }
627   }
628   PetscFunctionReturn(0);
629 }
630 
631 #undef __FUNCT__
632 #define __FUNCT__ "MatGetSubMatrix_BlockMat"
633 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *B)
634 {
635   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
636   Mat_SeqAIJ     *c;
637   PetscErrorCode ierr;
638   PetscInt       i,k,first,step,lensi,nrows,ncols;
639   PetscInt       *j_new,*i_new,*aj = a->j,*ailen = a->ilen;
640   PetscScalar    *a_new;
641   Mat            C,*aa = a->a;
642   PetscBool      stride,equal;
643 
644   PetscFunctionBegin;
645   ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr);
646   if (!equal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for idential column and row indices");
647   ierr = PetscTypeCompare((PetscObject)iscol,ISSTRIDE,&stride);CHKERRQ(ierr);
648   if (!stride) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for stride indices");
649   ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr);
650   if (step != A->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only select one entry from each block");
651 
652   ierr = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr);
653   ncols = nrows;
654 
655   /* create submatrix */
656   if (scall == MAT_REUSE_MATRIX) {
657     PetscInt n_cols,n_rows;
658     C = *B;
659     ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr);
660     if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size");
661     ierr = MatZeroEntries(C);CHKERRQ(ierr);
662   } else {
663     ierr = MatCreate(((PetscObject)A)->comm,&C);CHKERRQ(ierr);
664     ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
665     if (A->symmetric) {
666       ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr);
667     } else {
668       ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr);
669     }
670     ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr);
671     ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr);
672   }
673   c = (Mat_SeqAIJ*)C->data;
674 
675   /* loop over rows inserting into submatrix */
676   a_new    = c->a;
677   j_new    = c->j;
678   i_new    = c->i;
679 
680   for (i=0; i<nrows; i++) {
681     lensi = ailen[i];
682     for (k=0; k<lensi; k++) {
683       *j_new++ = *aj++;
684       ierr     = MatGetValue(*aa++,first,first,a_new++);CHKERRQ(ierr);
685     }
686     i_new[i+1]  = i_new[i] + lensi;
687     c->ilen[i]  = lensi;
688   }
689 
690   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
691   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
692   *B = C;
693   PetscFunctionReturn(0);
694 }
695 
696 #undef __FUNCT__
697 #define __FUNCT__ "MatAssemblyEnd_BlockMat"
698 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode)
699 {
700   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
701   PetscErrorCode ierr;
702   PetscInt       fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax;
703   PetscInt       m = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0;
704   Mat            *aa = a->a,*ap;
705 
706   PetscFunctionBegin;
707   if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0);
708 
709   if (m) rmax = ailen[0]; /* determine row with most nonzeros */
710   for (i=1; i<m; i++) {
711     /* move each row back by the amount of empty slots (fshift) before it*/
712     fshift += imax[i-1] - ailen[i-1];
713     rmax   = PetscMax(rmax,ailen[i]);
714     if (fshift) {
715       ip = aj + ai[i] ;
716       ap = aa + ai[i] ;
717       N  = ailen[i];
718       for (j=0; j<N; j++) {
719         ip[j-fshift] = ip[j];
720         ap[j-fshift] = ap[j];
721       }
722     }
723     ai[i] = ai[i-1] + ailen[i-1];
724   }
725   if (m) {
726     fshift += imax[m-1] - ailen[m-1];
727     ai[m]  = ai[m-1] + ailen[m-1];
728   }
729   /* reset ilen and imax for each row */
730   for (i=0; i<m; i++) {
731     ailen[i] = imax[i] = ai[i+1] - ai[i];
732   }
733   a->nz = ai[m];
734   for (i=0; i<a->nz; i++) {
735 #if defined(PETSC_USE_DEBUG)
736     if (!aa[i]) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz);
737 #endif
738     ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
739     ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
740   }
741   CHKMEMQ;
742   ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap->n/A->cmap->bs,fshift,a->nz);CHKERRQ(ierr);
743   ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr);
744   ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr);
745   A->info.mallocs     += a->reallocs;
746   a->reallocs          = 0;
747   A->info.nz_unneeded  = (double)fshift;
748   a->rmax              = rmax;
749 
750   A->same_nonzero = PETSC_TRUE;
751   ierr = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr);
752   PetscFunctionReturn(0);
753 }
754 
755 #undef __FUNCT__
756 #define __FUNCT__ "MatSetOption_BlockMat"
757 PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt,PetscBool  flg)
758 {
759   PetscFunctionBegin;
760   if (opt == MAT_SYMMETRIC && flg) {
761     A->ops->sor = MatSOR_BlockMat_Symmetric;
762     A->ops->mult  = MatMult_BlockMat_Symmetric;
763   } else {
764     PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]);
765   }
766   PetscFunctionReturn(0);
767 }
768 
769 
770 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat,
771        0,
772        0,
773        MatMult_BlockMat,
774 /* 4*/ MatMultAdd_BlockMat,
775        MatMultTranspose_BlockMat,
776        MatMultTransposeAdd_BlockMat,
777        0,
778        0,
779        0,
780 /*10*/ 0,
781        0,
782        0,
783        MatSOR_BlockMat,
784        0,
785 /*15*/ 0,
786        0,
787        0,
788        0,
789        0,
790 /*20*/ 0,
791        MatAssemblyEnd_BlockMat,
792        MatSetOption_BlockMat,
793        0,
794 /*24*/ 0,
795        0,
796        0,
797        0,
798        0,
799 /*29*/ 0,
800        0,
801        0,
802        0,
803        0,
804 /*34*/ 0,
805        0,
806        0,
807        0,
808        0,
809 /*39*/ 0,
810        0,
811        0,
812        0,
813        0,
814 /*44*/ 0,
815        0,
816        0,
817        0,
818        0,
819 /*49*/ 0,
820        0,
821        0,
822        0,
823        0,
824 /*54*/ 0,
825        0,
826        0,
827        0,
828        0,
829 /*59*/ MatGetSubMatrix_BlockMat,
830        MatDestroy_BlockMat,
831        MatView_BlockMat,
832        0,
833        0,
834 /*64*/ 0,
835        0,
836        0,
837        0,
838        0,
839 /*69*/ 0,
840        0,
841        0,
842        0,
843        0,
844 /*74*/ 0,
845        0,
846        0,
847        0,
848        0,
849 /*79*/ 0,
850        0,
851        0,
852        0,
853        MatLoad_BlockMat,
854 /*84*/ 0,
855        0,
856        0,
857        0,
858        0,
859 /*89*/ 0,
860        0,
861        0,
862        0,
863        0,
864 /*94*/ 0,
865        0,
866        0,
867        0,
868        0,
869 /*99*/ 0,
870        0,
871        0,
872        0,
873        0,
874 /*104*/0,
875        0,
876        0,
877        0,
878        0,
879 /*109*/0,
880        0,
881        0,
882        0,
883        0,
884 /*114*/0,
885        0,
886        0,
887        0,
888        0,
889 /*119*/0,
890        0,
891        0,
892        0
893 };
894 
895 #undef __FUNCT__
896 #define __FUNCT__ "MatBlockMatSetPreallocation"
897 /*@C
898    MatBlockMatSetPreallocation - For good matrix assembly performance
899    the user should preallocate the matrix storage by setting the parameter nz
900    (or the array nnz).  By setting these parameters accurately, performance
901    during matrix assembly can be increased by more than a factor of 50.
902 
903    Collective on MPI_Comm
904 
905    Input Parameters:
906 +  B - The matrix
907 .  bs - size of each block in matrix
908 .  nz - number of nonzeros per block row (same for all rows)
909 -  nnz - array containing the number of nonzeros in the various block rows
910          (possibly different for each row) or PETSC_NULL
911 
912    Notes:
913      If nnz is given then nz is ignored
914 
915    Specify the preallocated storage with either nz or nnz (not both).
916    Set nz=PETSC_DEFAULT and nnz=PETSC_NULL for PETSc to control dynamic memory
917    allocation.  For large problems you MUST preallocate memory or you
918    will get TERRIBLE performance, see the users' manual chapter on matrices.
919 
920    Level: intermediate
921 
922 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues()
923 
924 @*/
925 PetscErrorCode  MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[])
926 {
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   ierr = PetscTryMethod(B,"MatBlockMatSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[]),(B,bs,nz,nnz));CHKERRQ(ierr);
931   PetscFunctionReturn(0);
932 }
933 
934 EXTERN_C_BEGIN
935 #undef __FUNCT__
936 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat"
937 PetscErrorCode  MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz)
938 {
939   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
940   PetscErrorCode ierr;
941   PetscInt       i;
942 
943   PetscFunctionBegin;
944   ierr = PetscLayoutSetBlockSize(A->rmap,1);CHKERRQ(ierr);
945   ierr = PetscLayoutSetBlockSize(A->cmap,1);CHKERRQ(ierr);
946   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
947   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
948 
949   if (bs < 1) SETERRQ1(((PetscObject)A)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Block size given %D must be great than zero",bs);
950   if (A->rmap->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of rows %D",bs,A->rmap->n);
951   if (A->cmap->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of columns %D",bs,A->cmap->n);
952   if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5;
953   if (nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz);
954   if (nnz) {
955     for (i=0; i<A->rmap->n/bs; i++) {
956       if (nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]);
957       if (nnz[i] > A->cmap->n/bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap->n/bs);
958     }
959   }
960   A->rmap->bs = A->cmap->bs = bs;
961   bmat->mbs  = A->rmap->n/bs;
962 
963   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->right);CHKERRQ(ierr);
964   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->middle);CHKERRQ(ierr);
965   ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr);
966 
967   if (!bmat->imax) {
968     ierr = PetscMalloc2(A->rmap->n,PetscInt,&bmat->imax,A->rmap->n,PetscInt,&bmat->ilen);CHKERRQ(ierr);
969     ierr = PetscLogObjectMemory(A,2*A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
970   }
971   if (nnz) {
972     nz = 0;
973     for (i=0; i<A->rmap->n/A->rmap->bs; i++) {
974       bmat->imax[i] = nnz[i];
975       nz           += nnz[i];
976     }
977   } else {
978     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently requires block row by row preallocation");
979   }
980 
981   /* bmat->ilen will count nonzeros in each row so far. */
982   for (i=0; i<bmat->mbs; i++) { bmat->ilen[i] = 0;}
983 
984   /* allocate the matrix space */
985   ierr = MatSeqXAIJFreeAIJ(A,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
986   ierr = PetscMalloc3(nz,Mat,&bmat->a,nz,PetscInt,&bmat->j,A->rmap->n+1,PetscInt,&bmat->i);CHKERRQ(ierr);
987   ierr = PetscLogObjectMemory(A,(A->rmap->n+1)*sizeof(PetscInt)+nz*(sizeof(PetscScalar)+sizeof(PetscInt)));CHKERRQ(ierr);
988   bmat->i[0] = 0;
989   for (i=1; i<bmat->mbs+1; i++) {
990     bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1];
991   }
992   bmat->singlemalloc = PETSC_TRUE;
993   bmat->free_a       = PETSC_TRUE;
994   bmat->free_ij      = PETSC_TRUE;
995 
996   bmat->nz                = 0;
997   bmat->maxnz             = nz;
998   A->info.nz_unneeded  = (double)bmat->maxnz;
999 
1000   PetscFunctionReturn(0);
1001 }
1002 EXTERN_C_END
1003 
1004 /*MC
1005    MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix
1006                  consisting of (usually) sparse blocks.
1007 
1008   Level: advanced
1009 
1010 .seealso: MatCreateBlockMat()
1011 
1012 M*/
1013 
1014 EXTERN_C_BEGIN
1015 #undef __FUNCT__
1016 #define __FUNCT__ "MatCreate_BlockMat"
1017 PetscErrorCode  MatCreate_BlockMat(Mat A)
1018 {
1019   Mat_BlockMat   *b;
1020   PetscErrorCode ierr;
1021 
1022   PetscFunctionBegin;
1023   ierr = PetscNewLog(A,Mat_BlockMat,&b);CHKERRQ(ierr);
1024   A->data = (void*)b;
1025   ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1026 
1027   A->assembled     = PETSC_TRUE;
1028   A->preallocated  = PETSC_FALSE;
1029   ierr = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr);
1030 
1031   ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatBlockMatSetPreallocation_C",
1032                                      "MatBlockMatSetPreallocation_BlockMat",
1033                                       MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr);
1034 
1035   PetscFunctionReturn(0);
1036 }
1037 EXTERN_C_END
1038 
1039 #undef __FUNCT__
1040 #define __FUNCT__ "MatCreateBlockMat"
1041 /*@C
1042    MatCreateBlockMat - Creates a new matrix based sparse Mat storage
1043 
1044   Collective on MPI_Comm
1045 
1046    Input Parameters:
1047 +  comm - MPI communicator
1048 .  m - number of rows
1049 .  n  - number of columns
1050 .  bs - size of each submatrix
1051 .  nz  - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known)
1052 -  nnz - expected number of nonzers per block row if known (use PETSC_NULL otherwise)
1053 
1054 
1055    Output Parameter:
1056 .  A - the matrix
1057 
1058    Level: intermediate
1059 
1060    PETSc requires that matrices and vectors being used for certain
1061    operations are partitioned accordingly.  For example, when
1062    creating a bmat matrix, A, that supports parallel matrix-vector
1063    products using MatMult(A,x,y) the user should set the number
1064    of local matrix rows to be the number of local elements of the
1065    corresponding result vector, y. Note that this is information is
1066    required for use of the matrix interface routines, even though
1067    the bmat matrix may not actually be physically partitioned.
1068    For example,
1069 
1070 .keywords: matrix, bmat, create
1071 
1072 .seealso: MATBLOCKMAT
1073 @*/
1074 PetscErrorCode  MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A)
1075 {
1076   PetscErrorCode ierr;
1077 
1078   PetscFunctionBegin;
1079   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1080   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1081   ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr);
1082   ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr);
1083   PetscFunctionReturn(0);
1084 }
1085 
1086 
1087 
1088