xref: /petsc/src/mat/impls/blockmat/seq/blockmat.c (revision fef13f97fc8065f3070ca9972f2f2fb67800151f)
1 #define PETSCMAT_DLL
2 
3 /*
4    This provides a matrix that consists of Mats
5 */
6 
7 #include "private/matimpl.h"              /*I "petscmat.h" I*/
8 #include "../src/mat/impls/baij/seq/baij.h"    /* use the common AIJ data-structure */
9 #include "petscksp.h"
10 
11 typedef struct {
12   SEQAIJHEADER(Mat);
13   SEQBAIJHEADER;
14   Mat               *diags;
15 
16   Vec               left,right,middle,workb;   /* dummy vectors to perform local parts of product */
17 } Mat_BlockMat;
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatSOR_BlockMat_Symmetric"
21 PetscErrorCode MatSOR_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
22 {
23   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
24   PetscScalar        *x;
25   const Mat          *v = a->a;
26   const PetscScalar  *b;
27   PetscErrorCode     ierr;
28   PetscInt           n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
29   const PetscInt     *idx;
30   IS                 row,col;
31   MatFactorInfo      info;
32   Vec                left = a->left,right = a->right, middle = a->middle;
33   Mat                *diag;
34 
35   PetscFunctionBegin;
36   its = its*lits;
37   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
38   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
39   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
40   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
41   if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP))
42     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep");
43 
44   if (!a->diags) {
45     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
46     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
47     for (i=0; i<mbs; i++) {
48       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
49       ierr = MatCholeskyFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,&info);CHKERRQ(ierr);
50       ierr = MatCholeskyFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
51       ierr = ISDestroy(row);CHKERRQ(ierr);
52       ierr = ISDestroy(col);CHKERRQ(ierr);
53     }
54     ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr);
55   }
56   diag    = a->diags;
57 
58   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
59   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
60   /* copy right hand side because it must be modified during iteration */
61   ierr = VecCopy(bb,a->workb);CHKERRQ(ierr);
62   ierr = VecGetArray(a->workb,(PetscScalar**)&b);CHKERRQ(ierr);
63 
64   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
65   while (its--) {
66     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
67 
68       for (i=0; i<mbs; i++) {
69         n    = a->i[i+1] - a->i[i] - 1;
70         idx  = a->j + a->i[i] + 1;
71         v    = a->a + a->i[i] + 1;
72 
73         ierr = VecSet(left,0.0);CHKERRQ(ierr);
74         for (j=0; j<n; j++) {
75           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
76           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
77           ierr = VecResetArray(right);CHKERRQ(ierr);
78         }
79         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
80         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
81         ierr = VecResetArray(right);CHKERRQ(ierr);
82 
83         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
84         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
85 
86         /* now adjust right hand side, see MatSOR_SeqSBAIJ */
87         for (j=0; j<n; j++) {
88           ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr);
89           ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr);
90           ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr);
91           ierr = VecResetArray(middle);CHKERRQ(ierr);
92         }
93         ierr = VecResetArray(right);CHKERRQ(ierr);
94 
95       }
96     }
97     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
98 
99       for (i=mbs-1; i>=0; i--) {
100         n    = a->i[i+1] - a->i[i] - 1;
101         idx  = a->j + a->i[i] + 1;
102         v    = a->a + a->i[i] + 1;
103 
104         ierr = VecSet(left,0.0);CHKERRQ(ierr);
105         for (j=0; j<n; j++) {
106           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
107           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
108           ierr = VecResetArray(right);CHKERRQ(ierr);
109         }
110         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
111         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
112         ierr = VecResetArray(right);CHKERRQ(ierr);
113 
114         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
115         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
116         ierr = VecResetArray(right);CHKERRQ(ierr);
117 
118       }
119     }
120   }
121   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
122   ierr = VecRestoreArray(a->workb,(PetscScalar**)&b);CHKERRQ(ierr);
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "MatSOR_BlockMat"
128 PetscErrorCode MatSOR_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
129 {
130   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
131   PetscScalar        *x;
132   const Mat          *v = a->a;
133   const PetscScalar  *b;
134   PetscErrorCode     ierr;
135   PetscInt           n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
136   const PetscInt     *idx;
137   IS                 row,col;
138   MatFactorInfo      info;
139   Vec                left = a->left,right = a->right;
140   Mat                *diag;
141 
142   PetscFunctionBegin;
143   its = its*lits;
144   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
145   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
146   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
147   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
148 
149   if (!a->diags) {
150     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
151     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
152     for (i=0; i<mbs; i++) {
153       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
154       ierr = MatLUFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,col,&info);CHKERRQ(ierr);
155       ierr = MatLUFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
156       ierr = ISDestroy(row);CHKERRQ(ierr);
157       ierr = ISDestroy(col);CHKERRQ(ierr);
158     }
159   }
160   diag = a->diags;
161 
162   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
163   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
164   ierr = VecGetArray(bb,(PetscScalar**)&b);CHKERRQ(ierr);
165 
166   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
167   while (its--) {
168     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
169 
170       for (i=0; i<mbs; i++) {
171         n    = a->i[i+1] - a->i[i];
172         idx  = a->j + a->i[i];
173         v    = a->a + a->i[i];
174 
175         ierr = VecSet(left,0.0);CHKERRQ(ierr);
176         for (j=0; j<n; j++) {
177           if (idx[j] != i) {
178             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
179             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
180             ierr = VecResetArray(right);CHKERRQ(ierr);
181           }
182         }
183         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
184         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
185         ierr = VecResetArray(right);CHKERRQ(ierr);
186 
187         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
188         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
189         ierr = VecResetArray(right);CHKERRQ(ierr);
190       }
191     }
192     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
193 
194       for (i=mbs-1; i>=0; i--) {
195         n    = a->i[i+1] - a->i[i];
196         idx  = a->j + a->i[i];
197         v    = a->a + a->i[i];
198 
199         ierr = VecSet(left,0.0);CHKERRQ(ierr);
200         for (j=0; j<n; j++) {
201           if (idx[j] != i) {
202             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
203             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
204             ierr = VecResetArray(right);CHKERRQ(ierr);
205           }
206         }
207         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
208         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
209         ierr = VecResetArray(right);CHKERRQ(ierr);
210 
211         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
212         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
213         ierr = VecResetArray(right);CHKERRQ(ierr);
214 
215       }
216     }
217   }
218   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
219   ierr = VecRestoreArray(bb,(PetscScalar**)&b);CHKERRQ(ierr);
220   PetscFunctionReturn(0);
221 }
222 
223 #undef __FUNCT__
224 #define __FUNCT__ "MatSetValues_BlockMat"
225 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is)
226 {
227   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
228   PetscInt       *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1;
229   PetscInt       *imax=a->imax,*ai=a->i,*ailen=a->ilen;
230   PetscInt       *aj=a->j,nonew=a->nonew,bs=A->rmap->bs,brow,bcol;
231   PetscErrorCode ierr;
232   PetscInt       ridx,cidx;
233   PetscTruth     roworiented=a->roworiented;
234   MatScalar      value;
235   Mat            *ap,*aa = a->a;
236 
237   PetscFunctionBegin;
238   if (v) PetscValidScalarPointer(v,6);
239   for (k=0; k<m; k++) { /* loop over added rows */
240     row  = im[k];
241     brow = row/bs;
242     if (row < 0) continue;
243 #if defined(PETSC_USE_DEBUG)
244     if (row >= A->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap->N-1);
245 #endif
246     rp   = aj + ai[brow];
247     ap   = aa + ai[brow];
248     rmax = imax[brow];
249     nrow = ailen[brow];
250     low  = 0;
251     high = nrow;
252     for (l=0; l<n; l++) { /* loop over added columns */
253       if (in[l] < 0) continue;
254 #if defined(PETSC_USE_DEBUG)
255       if (in[l] >= A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap->n-1);
256 #endif
257       col = in[l]; bcol = col/bs;
258       if (A->symmetric && brow > bcol) continue;
259       ridx = row % bs; cidx = col % bs;
260       if (roworiented) {
261         value = v[l + k*n];
262       } else {
263         value = v[k + l*m];
264       }
265       if (col <= lastcol) low = 0; else high = nrow;
266       lastcol = col;
267       while (high-low > 7) {
268         t = (low+high)/2;
269         if (rp[t] > bcol) high = t;
270         else              low  = t;
271       }
272       for (i=low; i<high; i++) {
273         if (rp[i] > bcol) break;
274         if (rp[i] == bcol) {
275           goto noinsert1;
276         }
277       }
278       if (nonew == 1) goto noinsert1;
279       if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col);
280       MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat);
281       N = nrow++ - 1; high++;
282       /* shift up all the later entries in this row */
283       for (ii=N; ii>=i; ii--) {
284         rp[ii+1] = rp[ii];
285         ap[ii+1] = ap[ii];
286       }
287       if (N>=i) ap[i] = 0;
288       rp[i]           = bcol;
289       a->nz++;
290       noinsert1:;
291       if (!*(ap+i)) {
292         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr);
293       }
294       ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr);
295       low = i;
296     }
297     ailen[brow] = nrow;
298   }
299   A->same_nonzero = PETSC_FALSE;
300   PetscFunctionReturn(0);
301 }
302 
303 #undef __FUNCT__
304 #define __FUNCT__ "MatLoad_BlockMat"
305 PetscErrorCode MatLoad_BlockMat(PetscViewer viewer, const MatType type,Mat *A)
306 {
307   PetscErrorCode    ierr;
308   Mat               tmpA;
309   PetscInt          i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0;
310   const PetscInt    *cols;
311   const PetscScalar *values;
312   PetscTruth        flg = PETSC_FALSE,notdone;
313   Mat_SeqAIJ        *a;
314   Mat_BlockMat      *amat;
315 
316   PetscFunctionBegin;
317   ierr = MatLoad_SeqAIJ(viewer,MATSEQAIJ,&tmpA);CHKERRQ(ierr);
318 
319   ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr);
320   ierr = PetscOptionsBegin(PETSC_COMM_SELF,PETSC_NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr);
321     ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr);
322     ierr = PetscOptionsTruth("-matload_symmetric","Store the matrix as symmetric","MatLoad",flg,&flg,PETSC_NULL);CHKERRQ(ierr);
323   ierr = PetscOptionsEnd();CHKERRQ(ierr);
324 
325   /* Determine number of nonzero blocks for each block row */
326   a    = (Mat_SeqAIJ*) tmpA->data;
327   mbs  = m/bs;
328   ierr = PetscMalloc3(mbs,PetscInt,&lens,bs,PetscInt*,&ii,bs,PetscInt,&ilens);CHKERRQ(ierr);
329   ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr);
330 
331   for (i=0; i<mbs; i++) {
332     for (j=0; j<bs; j++) {
333       ii[j]         = a->j + a->i[i*bs + j];
334       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
335     }
336 
337     currentcol = -1;
338     notdone = PETSC_TRUE;
339     while (PETSC_TRUE) {
340       notdone = PETSC_FALSE;
341       nextcol = 1000000000;
342       for (j=0; j<bs; j++) {
343         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) {
344           ii[j]++;
345           ilens[j]--;
346         }
347         if (ilens[j] > 0) {
348           notdone = PETSC_TRUE;
349           nextcol = PetscMin(nextcol,ii[j][0]/bs);
350         }
351       }
352       if (!notdone) break;
353       if (!flg || (nextcol >= i)) lens[i]++;
354       currentcol = nextcol;
355     }
356   }
357 
358   ierr = MatCreateBlockMat(PETSC_COMM_SELF,m,n,bs,0,lens,A);CHKERRQ(ierr);
359   if (flg) {
360     ierr = MatSetOption(*A,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
361   }
362   amat = (Mat_BlockMat*)(*A)->data;
363 
364   /* preallocate the submatrices */
365   ierr = PetscMalloc(bs*sizeof(PetscInt),&llens);CHKERRQ(ierr);
366   for (i=0; i<mbs; i++) { /* loops for block rows */
367     for (j=0; j<bs; j++) {
368       ii[j]         = a->j + a->i[i*bs + j];
369       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
370     }
371 
372     currentcol = 1000000000;
373     for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */
374       if (ilens[j] > 0) {
375 	currentcol = PetscMin(currentcol,ii[j][0]/bs);
376       }
377     }
378 
379     notdone = PETSC_TRUE;
380     while (PETSC_TRUE) {  /* loops over blocks in block row */
381 
382       notdone = PETSC_FALSE;
383       nextcol = 1000000000;
384       ierr = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr);
385       for (j=0; j<bs; j++) { /* loop over rows in block */
386         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */
387           ii[j]++;
388           ilens[j]--;
389           llens[j]++;
390         }
391         if (ilens[j] > 0) {
392           notdone = PETSC_TRUE;
393           nextcol = PetscMin(nextcol,ii[j][0]/bs);
394         }
395       }
396       if (cnt >= amat->maxnz) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt);
397       if (!flg || currentcol >= i) {
398         amat->j[cnt] = currentcol;
399         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr);
400       }
401 
402       if (!notdone) break;
403       currentcol = nextcol;
404     }
405     amat->ilen[i] = lens[i];
406   }
407   CHKMEMQ;
408 
409   ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr);
410   ierr = PetscFree(llens);CHKERRQ(ierr);
411 
412   /* copy over the matrix, one row at a time */
413   for (i=0; i<m; i++) {
414     ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
415     ierr = MatSetValues(*A,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr);
416     ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
417   }
418   ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
419   ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
420   PetscFunctionReturn(0);
421 }
422 
423 #undef __FUNCT__
424 #define __FUNCT__ "MatView_BlockMat"
425 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer)
426 {
427   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
428   PetscErrorCode    ierr;
429   const char        *name;
430   PetscViewerFormat format;
431 
432   PetscFunctionBegin;
433   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
434   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
435   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) {
436     ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr);
437     if (A->symmetric) {
438       ierr = PetscViewerASCIIPrintf(viewer,"Only upper triangular part of symmetric matrix is stored\n");CHKERRQ(ierr);
439     }
440   }
441   PetscFunctionReturn(0);
442 }
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatDestroy_BlockMat"
446 PetscErrorCode MatDestroy_BlockMat(Mat mat)
447 {
448   PetscErrorCode ierr;
449   Mat_BlockMat   *bmat = (Mat_BlockMat*)mat->data;
450   PetscInt       i;
451 
452   PetscFunctionBegin;
453   if (bmat->right) {
454     ierr = VecDestroy(bmat->right);CHKERRQ(ierr);
455   }
456   if (bmat->left) {
457     ierr = VecDestroy(bmat->left);CHKERRQ(ierr);
458   }
459   if (bmat->middle) {
460     ierr = VecDestroy(bmat->middle);CHKERRQ(ierr);
461   }
462   if (bmat->workb) {
463     ierr = VecDestroy(bmat->workb);CHKERRQ(ierr);
464   }
465   if (bmat->diags) {
466     for (i=0; i<mat->rmap->n/mat->rmap->bs; i++) {
467       if (bmat->diags[i]) {ierr = MatDestroy(bmat->diags[i]);CHKERRQ(ierr);}
468     }
469   }
470   if (bmat->a) {
471     for (i=0; i<bmat->nz; i++) {
472       if (bmat->a[i]) {ierr = MatDestroy(bmat->a[i]);CHKERRQ(ierr);}
473     }
474   }
475   ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
476   ierr = PetscFree(bmat);CHKERRQ(ierr);
477   PetscFunctionReturn(0);
478 }
479 
480 #undef __FUNCT__
481 #define __FUNCT__ "MatMult_BlockMat"
482 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y)
483 {
484   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
485   PetscErrorCode ierr;
486   PetscScalar    *xx,*yy;
487   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
488   Mat            *aa;
489 
490   PetscFunctionBegin;
491   CHKMEMQ;
492   /*
493      Standard CSR multiply except each entry is a Mat
494   */
495   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
496 
497   ierr = VecSet(y,0.0);CHKERRQ(ierr);
498   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
499   aj  = bmat->j;
500   aa  = bmat->a;
501   ii  = bmat->i;
502   for (i=0; i<m; i++) {
503     jrow = ii[i];
504     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
505     n    = ii[i+1] - jrow;
506     for (j=0; j<n; j++) {
507       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
508       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
509       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
510       jrow++;
511     }
512     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
513   }
514   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
515   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
516   CHKMEMQ;
517   PetscFunctionReturn(0);
518 }
519 
520 #undef __FUNCT__
521 #define __FUNCT__ "MatMult_BlockMat_Symmetric"
522 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y)
523 {
524   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
525   PetscErrorCode ierr;
526   PetscScalar    *xx,*yy;
527   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
528   Mat            *aa;
529 
530   PetscFunctionBegin;
531   CHKMEMQ;
532   /*
533      Standard CSR multiply except each entry is a Mat
534   */
535   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
536 
537   ierr = VecSet(y,0.0);CHKERRQ(ierr);
538   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
539   aj  = bmat->j;
540   aa  = bmat->a;
541   ii  = bmat->i;
542   for (i=0; i<m; i++) {
543     jrow = ii[i];
544     n    = ii[i+1] - jrow;
545     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
546     ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr);
547     /* if we ALWAYS required a diagonal entry then could remove this if test */
548     if (aj[jrow] == i) {
549       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
550       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
551       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
552       jrow++;
553       n--;
554     }
555     for (j=0; j<n; j++) {
556       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);            /* upper triangular part */
557       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
558       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
559 
560       ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr);            /* lower triangular part */
561       ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr);
562       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
563       jrow++;
564     }
565     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
566     ierr = VecResetArray(bmat->middle);CHKERRQ(ierr);
567   }
568   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
569   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
570   CHKMEMQ;
571   PetscFunctionReturn(0);
572 }
573 
574 #undef __FUNCT__
575 #define __FUNCT__ "MatMultAdd_BlockMat"
576 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
577 {
578   PetscFunctionBegin;
579   PetscFunctionReturn(0);
580 }
581 
582 #undef __FUNCT__
583 #define __FUNCT__ "MatMultTranspose_BlockMat"
584 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y)
585 {
586   PetscFunctionBegin;
587   PetscFunctionReturn(0);
588 }
589 
590 #undef __FUNCT__
591 #define __FUNCT__ "MatMultTransposeAdd_BlockMat"
592 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
593 {
594   PetscFunctionBegin;
595   PetscFunctionReturn(0);
596 }
597 
598 /*
599      Adds diagonal pointers to sparse matrix structure.
600 */
601 #undef __FUNCT__
602 #define __FUNCT__ "MatMarkDiagonal_BlockMat"
603 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A)
604 {
605   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
606   PetscErrorCode ierr;
607   PetscInt       i,j,mbs = A->rmap->n/A->rmap->bs;
608 
609   PetscFunctionBegin;
610   if (!a->diag) {
611     ierr = PetscMalloc(mbs*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
612   }
613   for (i=0; i<mbs; i++) {
614     a->diag[i] = a->i[i+1];
615     for (j=a->i[i]; j<a->i[i+1]; j++) {
616       if (a->j[j] == i) {
617         a->diag[i] = j;
618         break;
619       }
620     }
621   }
622   PetscFunctionReturn(0);
623 }
624 
625 #undef __FUNCT__
626 #define __FUNCT__ "MatGetSubMatrix_BlockMat"
627 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *B)
628 {
629   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
630   Mat_SeqAIJ     *c;
631   PetscErrorCode ierr;
632   PetscInt       i,k,first,step,lensi,nrows,ncols;
633   PetscInt       *j_new,*i_new,*aj = a->j,*ailen = a->ilen;
634   PetscScalar    *a_new;
635   Mat            C,*aa = a->a;
636   PetscTruth     stride,equal;
637 
638   PetscFunctionBegin;
639   ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr);
640   if (!equal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for idential column and row indices");
641   ierr = ISStride(iscol,&stride);CHKERRQ(ierr);
642   if (!stride) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for stride indices");
643   ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr);
644   if (step != A->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only select one entry from each block");
645 
646   ierr = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr);
647   ncols = nrows;
648 
649   /* create submatrix */
650   if (scall == MAT_REUSE_MATRIX) {
651     PetscInt n_cols,n_rows;
652     C = *B;
653     ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr);
654     if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size");
655     ierr = MatZeroEntries(C);CHKERRQ(ierr);
656   } else {
657     ierr = MatCreate(((PetscObject)A)->comm,&C);CHKERRQ(ierr);
658     ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
659     if (A->symmetric) {
660       ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr);
661     } else {
662       ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr);
663     }
664     ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr);
665     ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr);
666   }
667   c = (Mat_SeqAIJ*)C->data;
668 
669   /* loop over rows inserting into submatrix */
670   a_new    = c->a;
671   j_new    = c->j;
672   i_new    = c->i;
673 
674   for (i=0; i<nrows; i++) {
675     lensi = ailen[i];
676     for (k=0; k<lensi; k++) {
677       *j_new++ = *aj++;
678       ierr     = MatGetValue(*aa++,first,first,a_new++);CHKERRQ(ierr);
679     }
680     i_new[i+1]  = i_new[i] + lensi;
681     c->ilen[i]  = lensi;
682   }
683 
684   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
685   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
686   *B = C;
687   PetscFunctionReturn(0);
688 }
689 
690 #undef __FUNCT__
691 #define __FUNCT__ "MatAssemblyEnd_BlockMat"
692 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode)
693 {
694   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
695   PetscErrorCode ierr;
696   PetscInt       fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax;
697   PetscInt       m = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0;
698   Mat            *aa = a->a,*ap;
699 
700   PetscFunctionBegin;
701   if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0);
702 
703   if (m) rmax = ailen[0]; /* determine row with most nonzeros */
704   for (i=1; i<m; i++) {
705     /* move each row back by the amount of empty slots (fshift) before it*/
706     fshift += imax[i-1] - ailen[i-1];
707     rmax   = PetscMax(rmax,ailen[i]);
708     if (fshift) {
709       ip = aj + ai[i] ;
710       ap = aa + ai[i] ;
711       N  = ailen[i];
712       for (j=0; j<N; j++) {
713         ip[j-fshift] = ip[j];
714         ap[j-fshift] = ap[j];
715       }
716     }
717     ai[i] = ai[i-1] + ailen[i-1];
718   }
719   if (m) {
720     fshift += imax[m-1] - ailen[m-1];
721     ai[m]  = ai[m-1] + ailen[m-1];
722   }
723   /* reset ilen and imax for each row */
724   for (i=0; i<m; i++) {
725     ailen[i] = imax[i] = ai[i+1] - ai[i];
726   }
727   a->nz = ai[m];
728   for (i=0; i<a->nz; i++) {
729 #if defined(PETSC_USE_DEBUG)
730     if (!aa[i]) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz);
731 #endif
732     ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
733     ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
734   }
735   CHKMEMQ;
736   ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap->n/A->cmap->bs,fshift,a->nz);CHKERRQ(ierr);
737   ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr);
738   ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr);
739   A->info.mallocs     += a->reallocs;
740   a->reallocs          = 0;
741   A->info.nz_unneeded  = (double)fshift;
742   a->rmax              = rmax;
743 
744   A->same_nonzero = PETSC_TRUE;
745   ierr = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr);
746   PetscFunctionReturn(0);
747 }
748 
749 #undef __FUNCT__
750 #define __FUNCT__ "MatSetOption_BlockMat"
751 PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt,PetscTruth flg)
752 {
753   PetscFunctionBegin;
754   if (opt == MAT_SYMMETRIC && flg) {
755     A->ops->sor = MatSOR_BlockMat_Symmetric;
756     A->ops->mult  = MatMult_BlockMat_Symmetric;
757   } else {
758     PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]);
759   }
760   PetscFunctionReturn(0);
761 }
762 
763 
764 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat,
765        0,
766        0,
767        MatMult_BlockMat,
768 /* 4*/ MatMultAdd_BlockMat,
769        MatMultTranspose_BlockMat,
770        MatMultTransposeAdd_BlockMat,
771        0,
772        0,
773        0,
774 /*10*/ 0,
775        0,
776        0,
777        MatSOR_BlockMat,
778        0,
779 /*15*/ 0,
780        0,
781        0,
782        0,
783        0,
784 /*20*/ 0,
785        MatAssemblyEnd_BlockMat,
786        MatSetOption_BlockMat,
787        0,
788 /*24*/ 0,
789        0,
790        0,
791        0,
792        0,
793 /*29*/ 0,
794        0,
795        0,
796        0,
797        0,
798 /*34*/ 0,
799        0,
800        0,
801        0,
802        0,
803 /*39*/ 0,
804        0,
805        0,
806        0,
807        0,
808 /*44*/ 0,
809        0,
810        0,
811        0,
812        0,
813 /*49*/ 0,
814        0,
815        0,
816        0,
817        0,
818 /*54*/ 0,
819        0,
820        0,
821        0,
822        0,
823 /*59*/ MatGetSubMatrix_BlockMat,
824        MatDestroy_BlockMat,
825        MatView_BlockMat,
826        0,
827        0,
828 /*64*/ 0,
829        0,
830        0,
831        0,
832        0,
833 /*69*/ 0,
834        0,
835        0,
836        0,
837        0,
838 /*74*/ 0,
839        0,
840        0,
841        0,
842        0,
843 /*79*/ 0,
844        0,
845        0,
846        0,
847        MatLoad_BlockMat,
848 /*84*/ 0,
849        0,
850        0,
851        0,
852        0,
853 /*89*/ 0,
854        0,
855        0,
856        0,
857        0,
858 /*94*/ 0,
859        0,
860        0,
861        0};
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "MatBlockMatSetPreallocation"
865 /*@C
866    MatBlockMatSetPreallocation - For good matrix assembly performance
867    the user should preallocate the matrix storage by setting the parameter nz
868    (or the array nnz).  By setting these parameters accurately, performance
869    during matrix assembly can be increased by more than a factor of 50.
870 
871    Collective on MPI_Comm
872 
873    Input Parameters:
874 +  B - The matrix
875 .  bs - size of each block in matrix
876 .  nz - number of nonzeros per block row (same for all rows)
877 -  nnz - array containing the number of nonzeros in the various block rows
878          (possibly different for each row) or PETSC_NULL
879 
880    Notes:
881      If nnz is given then nz is ignored
882 
883    Specify the preallocated storage with either nz or nnz (not both).
884    Set nz=PETSC_DEFAULT and nnz=PETSC_NULL for PETSc to control dynamic memory
885    allocation.  For large problems you MUST preallocate memory or you
886    will get TERRIBLE performance, see the users' manual chapter on matrices.
887 
888    Level: intermediate
889 
890 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues()
891 
892 @*/
893 PetscErrorCode PETSCMAT_DLLEXPORT MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[])
894 {
895   PetscErrorCode ierr,(*f)(Mat,PetscInt,PetscInt,const PetscInt[]);
896 
897   PetscFunctionBegin;
898   ierr = PetscObjectQueryFunction((PetscObject)B,"MatBlockMatSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
899   if (f) {
900     ierr = (*f)(B,bs,nz,nnz);CHKERRQ(ierr);
901   }
902   PetscFunctionReturn(0);
903 }
904 
905 EXTERN_C_BEGIN
906 #undef __FUNCT__
907 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat"
908 PetscErrorCode PETSCMAT_DLLEXPORT MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz)
909 {
910   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
911   PetscErrorCode ierr;
912   PetscInt       i;
913 
914   PetscFunctionBegin;
915   if (bs < 1) SETERRQ1(((PetscObject)A)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Block size given %D must be great than zero",bs);
916   if (A->rmap->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of rows %D",bs,A->rmap->n);
917   if (A->cmap->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of columns %D",bs,A->cmap->n);
918   if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5;
919   if (nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz);
920   if (nnz) {
921     for (i=0; i<A->rmap->n/bs; i++) {
922       if (nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]);
923       if (nnz[i] > A->cmap->n/bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap->n/bs);
924     }
925   }
926   A->rmap->bs = A->cmap->bs = bs;
927   bmat->mbs  = A->rmap->n/bs;
928 
929   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->right);CHKERRQ(ierr);
930   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->middle);CHKERRQ(ierr);
931   ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr);
932 
933   if (!bmat->imax) {
934     ierr = PetscMalloc2(A->rmap->n,PetscInt,&bmat->imax,A->rmap->n,PetscInt,&bmat->ilen);CHKERRQ(ierr);
935     ierr = PetscLogObjectMemory(A,2*A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
936   }
937   if (nnz) {
938     nz = 0;
939     for (i=0; i<A->rmap->n/A->rmap->bs; i++) {
940       bmat->imax[i] = nnz[i];
941       nz           += nnz[i];
942     }
943   } else {
944     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently requires block row by row preallocation");
945   }
946 
947   /* bmat->ilen will count nonzeros in each row so far. */
948   for (i=0; i<bmat->mbs; i++) { bmat->ilen[i] = 0;}
949 
950   /* allocate the matrix space */
951   ierr = MatSeqXAIJFreeAIJ(A,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
952   ierr = PetscMalloc3(nz,Mat,&bmat->a,nz,PetscInt,&bmat->j,A->rmap->n+1,PetscInt,&bmat->i);CHKERRQ(ierr);
953   ierr = PetscLogObjectMemory(A,(A->rmap->n+1)*sizeof(PetscInt)+nz*(sizeof(PetscScalar)+sizeof(PetscInt)));CHKERRQ(ierr);
954   bmat->i[0] = 0;
955   for (i=1; i<bmat->mbs+1; i++) {
956     bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1];
957   }
958   bmat->singlemalloc = PETSC_TRUE;
959   bmat->free_a       = PETSC_TRUE;
960   bmat->free_ij      = PETSC_TRUE;
961 
962   bmat->nz                = 0;
963   bmat->maxnz             = nz;
964   A->info.nz_unneeded  = (double)bmat->maxnz;
965 
966   PetscFunctionReturn(0);
967 }
968 EXTERN_C_END
969 
970 /*MC
971    MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix
972                  consisting of (usually) sparse blocks.
973 
974   Level: advanced
975 
976 .seealso: MatCreateBlockMat()
977 
978 M*/
979 
980 EXTERN_C_BEGIN
981 #undef __FUNCT__
982 #define __FUNCT__ "MatCreate_BlockMat"
983 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_BlockMat(Mat A)
984 {
985   Mat_BlockMat   *b;
986   PetscErrorCode ierr;
987 
988   PetscFunctionBegin;
989   ierr = PetscNewLog(A,Mat_BlockMat,&b);CHKERRQ(ierr);
990   A->data = (void*)b;
991   ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
992 
993   ierr = PetscLayoutSetBlockSize(A->rmap,1);CHKERRQ(ierr);
994   ierr = PetscLayoutSetBlockSize(A->cmap,1);CHKERRQ(ierr);
995   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
996   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
997 
998   A->assembled     = PETSC_TRUE;
999   A->preallocated  = PETSC_FALSE;
1000   ierr = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr);
1001 
1002   ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatBlockMatSetPreallocation_C",
1003                                      "MatBlockMatSetPreallocation_BlockMat",
1004                                       MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr);
1005 
1006   PetscFunctionReturn(0);
1007 }
1008 EXTERN_C_END
1009 
1010 #undef __FUNCT__
1011 #define __FUNCT__ "MatCreateBlockMat"
1012 /*@C
1013    MatCreateBlockMat - Creates a new matrix based sparse Mat storage
1014 
1015   Collective on MPI_Comm
1016 
1017    Input Parameters:
1018 +  comm - MPI communicator
1019 .  m - number of rows
1020 .  n  - number of columns
1021 .  bs - size of each submatrix
1022 .  nz  - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known)
1023 -  nnz - expected number of nonzers per block row if known (use PETSC_NULL otherwise)
1024 
1025 
1026    Output Parameter:
1027 .  A - the matrix
1028 
1029    Level: intermediate
1030 
1031    PETSc requires that matrices and vectors being used for certain
1032    operations are partitioned accordingly.  For example, when
1033    creating a bmat matrix, A, that supports parallel matrix-vector
1034    products using MatMult(A,x,y) the user should set the number
1035    of local matrix rows to be the number of local elements of the
1036    corresponding result vector, y. Note that this is information is
1037    required for use of the matrix interface routines, even though
1038    the bmat matrix may not actually be physically partitioned.
1039    For example,
1040 
1041 .keywords: matrix, bmat, create
1042 
1043 .seealso: MATBLOCKMAT
1044 @*/
1045 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A)
1046 {
1047   PetscErrorCode ierr;
1048 
1049   PetscFunctionBegin;
1050   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1051   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1052   ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr);
1053   ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr);
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 
1058 
1059