xref: /petsc/src/mat/impls/blockmat/seq/blockmat.c (revision 73e31fe287e90bf7d8f915b02b2c934a4e6aaee4)
1 
2 /*
3    This provides a matrix that consists of Mats
4 */
5 
6 #include <petsc-private/matimpl.h>              /*I "petscmat.h" I*/
7 #include <../src/mat/impls/baij/seq/baij.h>    /* use the common AIJ data-structure */
8 
9 typedef struct {
10   SEQAIJHEADER(Mat);
11   SEQBAIJHEADER;
12   Mat *diags;
13 
14   Vec left,right,middle,workb;                 /* dummy vectors to perform local parts of product */
15 } Mat_BlockMat;
16 
17 extern PetscErrorCode  MatBlockMatSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt*);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatSOR_BlockMat_Symmetric"
21 PetscErrorCode MatSOR_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
22 {
23   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
24   PetscScalar       *x;
25   const Mat         *v;
26   const PetscScalar *b;
27   PetscErrorCode    ierr;
28   PetscInt          n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
29   const PetscInt    *idx;
30   IS                row,col;
31   MatFactorInfo     info;
32   Vec               left = a->left,right = a->right, middle = a->middle;
33   Mat               *diag;
34 
35   PetscFunctionBegin;
36   its = its*lits;
37   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
38   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
39   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
40   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
41   if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP))
42     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep");
43 
44   if (!a->diags) {
45     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
46     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
47     for (i=0; i<mbs; i++) {
48       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
49       ierr = MatCholeskyFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,&info);CHKERRQ(ierr);
50       ierr = MatCholeskyFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
51       ierr = ISDestroy(&row);CHKERRQ(ierr);
52       ierr = ISDestroy(&col);CHKERRQ(ierr);
53     }
54     ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr);
55   }
56   diag = a->diags;
57 
58   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
59   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
60   /* copy right hand side because it must be modified during iteration */
61   ierr = VecCopy(bb,a->workb);CHKERRQ(ierr);
62   ierr = VecGetArrayRead(a->workb,&b);CHKERRQ(ierr);
63 
64   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
65   while (its--) {
66     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) {
67 
68       for (i=0; i<mbs; i++) {
69         n   = a->i[i+1] - a->i[i] - 1;
70         idx = a->j + a->i[i] + 1;
71         v   = a->a + a->i[i] + 1;
72 
73         ierr = VecSet(left,0.0);CHKERRQ(ierr);
74         for (j=0; j<n; j++) {
75           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
76           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
77           ierr = VecResetArray(right);CHKERRQ(ierr);
78         }
79         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
80         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
81         ierr = VecResetArray(right);CHKERRQ(ierr);
82 
83         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
84         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
85 
86         /* now adjust right hand side, see MatSOR_SeqSBAIJ */
87         for (j=0; j<n; j++) {
88           ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr);
89           ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr);
90           ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr);
91           ierr = VecResetArray(middle);CHKERRQ(ierr);
92         }
93         ierr = VecResetArray(right);CHKERRQ(ierr);
94 
95       }
96     }
97     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) {
98 
99       for (i=mbs-1; i>=0; i--) {
100         n   = a->i[i+1] - a->i[i] - 1;
101         idx = a->j + a->i[i] + 1;
102         v   = a->a + a->i[i] + 1;
103 
104         ierr = VecSet(left,0.0);CHKERRQ(ierr);
105         for (j=0; j<n; j++) {
106           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
107           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
108           ierr = VecResetArray(right);CHKERRQ(ierr);
109         }
110         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
111         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
112         ierr = VecResetArray(right);CHKERRQ(ierr);
113 
114         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
115         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
116         ierr = VecResetArray(right);CHKERRQ(ierr);
117 
118       }
119     }
120   }
121   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
122   ierr = VecRestoreArrayRead(a->workb,&b);CHKERRQ(ierr);
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "MatSOR_BlockMat"
128 PetscErrorCode MatSOR_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
129 {
130   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
131   PetscScalar       *x;
132   const Mat         *v;
133   const PetscScalar *b;
134   PetscErrorCode    ierr;
135   PetscInt          n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
136   const PetscInt    *idx;
137   IS                row,col;
138   MatFactorInfo     info;
139   Vec               left = a->left,right = a->right;
140   Mat               *diag;
141 
142   PetscFunctionBegin;
143   its = its*lits;
144   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
145   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
146   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
147   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
148 
149   if (!a->diags) {
150     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
151     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
152     for (i=0; i<mbs; i++) {
153       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
154       ierr = MatLUFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,col,&info);CHKERRQ(ierr);
155       ierr = MatLUFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
156       ierr = ISDestroy(&row);CHKERRQ(ierr);
157       ierr = ISDestroy(&col);CHKERRQ(ierr);
158     }
159   }
160   diag = a->diags;
161 
162   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
163   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
164   ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
165 
166   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
167   while (its--) {
168     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) {
169 
170       for (i=0; i<mbs; i++) {
171         n   = a->i[i+1] - a->i[i];
172         idx = a->j + a->i[i];
173         v   = a->a + a->i[i];
174 
175         ierr = VecSet(left,0.0);CHKERRQ(ierr);
176         for (j=0; j<n; j++) {
177           if (idx[j] != i) {
178             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
179             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
180             ierr = VecResetArray(right);CHKERRQ(ierr);
181           }
182         }
183         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
184         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
185         ierr = VecResetArray(right);CHKERRQ(ierr);
186 
187         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
188         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
189         ierr = VecResetArray(right);CHKERRQ(ierr);
190       }
191     }
192     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) {
193 
194       for (i=mbs-1; i>=0; i--) {
195         n   = a->i[i+1] - a->i[i];
196         idx = a->j + a->i[i];
197         v   = a->a + a->i[i];
198 
199         ierr = VecSet(left,0.0);CHKERRQ(ierr);
200         for (j=0; j<n; j++) {
201           if (idx[j] != i) {
202             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
203             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
204             ierr = VecResetArray(right);CHKERRQ(ierr);
205           }
206         }
207         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
208         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
209         ierr = VecResetArray(right);CHKERRQ(ierr);
210 
211         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
212         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
213         ierr = VecResetArray(right);CHKERRQ(ierr);
214 
215       }
216     }
217   }
218   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
219   ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
220   PetscFunctionReturn(0);
221 }
222 
223 #undef __FUNCT__
224 #define __FUNCT__ "MatSetValues_BlockMat"
225 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is)
226 {
227   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
228   PetscInt       *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1;
229   PetscInt       *imax=a->imax,*ai=a->i,*ailen=a->ilen;
230   PetscInt       *aj  =a->j,nonew=a->nonew,bs=A->rmap->bs,brow,bcol;
231   PetscErrorCode ierr;
232   PetscInt       ridx,cidx;
233   PetscBool      roworiented=a->roworiented;
234   MatScalar      value;
235   Mat            *ap,*aa = a->a;
236 
237   PetscFunctionBegin;
238   if (v) PetscValidScalarPointer(v,6);
239   for (k=0; k<m; k++) { /* loop over added rows */
240     row  = im[k];
241     brow = row/bs;
242     if (row < 0) continue;
243 #if defined(PETSC_USE_DEBUG)
244     if (row >= A->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap->N-1);
245 #endif
246     rp   = aj + ai[brow];
247     ap   = aa + ai[brow];
248     rmax = imax[brow];
249     nrow = ailen[brow];
250     low  = 0;
251     high = nrow;
252     for (l=0; l<n; l++) { /* loop over added columns */
253       if (in[l] < 0) continue;
254 #if defined(PETSC_USE_DEBUG)
255       if (in[l] >= A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap->n-1);
256 #endif
257       col = in[l]; bcol = col/bs;
258       if (A->symmetric && brow > bcol) continue;
259       ridx = row % bs; cidx = col % bs;
260       if (roworiented) value = v[l + k*n];
261       else value = v[k + l*m];
262 
263       if (col <= lastcol) low = 0;
264       else high = nrow;
265       lastcol = col;
266       while (high-low > 7) {
267         t = (low+high)/2;
268         if (rp[t] > bcol) high = t;
269         else              low  = t;
270       }
271       for (i=low; i<high; i++) {
272         if (rp[i] > bcol) break;
273         if (rp[i] == bcol) goto noinsert1;
274       }
275       if (nonew == 1) goto noinsert1;
276       if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col);
277       MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat);
278       N = nrow++ - 1; high++;
279       /* shift up all the later entries in this row */
280       for (ii=N; ii>=i; ii--) {
281         rp[ii+1] = rp[ii];
282         ap[ii+1] = ap[ii];
283       }
284       if (N>=i) ap[i] = 0;
285       rp[i] = bcol;
286       a->nz++;
287 noinsert1:;
288       if (!*(ap+i)) {
289         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr);
290       }
291       ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr);
292       low  = i;
293     }
294     ailen[brow] = nrow;
295   }
296   A->same_nonzero = PETSC_FALSE;
297   PetscFunctionReturn(0);
298 }
299 
300 #undef __FUNCT__
301 #define __FUNCT__ "MatLoad_BlockMat"
302 PetscErrorCode MatLoad_BlockMat(Mat newmat, PetscViewer viewer)
303 {
304   PetscErrorCode    ierr;
305   Mat               tmpA;
306   PetscInt          i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0;
307   const PetscInt    *cols;
308   const PetscScalar *values;
309   PetscBool         flg = PETSC_FALSE,notdone;
310   Mat_SeqAIJ        *a;
311   Mat_BlockMat      *amat;
312 
313   PetscFunctionBegin;
314   ierr = MatCreate(PETSC_COMM_SELF,&tmpA);CHKERRQ(ierr);
315   ierr = MatSetType(tmpA,MATSEQAIJ);CHKERRQ(ierr);
316   ierr = MatLoad_SeqAIJ(tmpA,viewer);CHKERRQ(ierr);
317 
318   ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr);
319   ierr = PetscOptionsBegin(PETSC_COMM_SELF,PETSC_NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr);
320   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr);
321   ierr = PetscOptionsBool("-matload_symmetric","Store the matrix as symmetric","MatLoad",flg,&flg,PETSC_NULL);CHKERRQ(ierr);
322   ierr = PetscOptionsEnd();CHKERRQ(ierr);
323 
324   /* Determine number of nonzero blocks for each block row */
325   a    = (Mat_SeqAIJ*) tmpA->data;
326   mbs  = m/bs;
327   ierr = PetscMalloc3(mbs,PetscInt,&lens,bs,PetscInt*,&ii,bs,PetscInt,&ilens);CHKERRQ(ierr);
328   ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr);
329 
330   for (i=0; i<mbs; i++) {
331     for (j=0; j<bs; j++) {
332       ii[j]    = a->j + a->i[i*bs + j];
333       ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j];
334     }
335 
336     currentcol = -1;
337     notdone    = PETSC_TRUE;
338     while (PETSC_TRUE) {
339       notdone = PETSC_FALSE;
340       nextcol = 1000000000;
341       for (j=0; j<bs; j++) {
342         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) {
343           ii[j]++;
344           ilens[j]--;
345         }
346         if (ilens[j] > 0) {
347           notdone = PETSC_TRUE;
348           nextcol = PetscMin(nextcol,ii[j][0]/bs);
349         }
350       }
351       if (!notdone) break;
352       if (!flg || (nextcol >= i)) lens[i]++;
353       currentcol = nextcol;
354     }
355   }
356 
357   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) {
358     ierr = MatSetSizes(newmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
359   }
360   ierr = MatBlockMatSetPreallocation(newmat,bs,0,lens);CHKERRQ(ierr);
361   if (flg) {
362     ierr = MatSetOption(newmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
363   }
364   amat = (Mat_BlockMat*)(newmat)->data;
365 
366   /* preallocate the submatrices */
367   ierr = PetscMalloc(bs*sizeof(PetscInt),&llens);CHKERRQ(ierr);
368   for (i=0; i<mbs; i++) { /* loops for block rows */
369     for (j=0; j<bs; j++) {
370       ii[j]    = a->j + a->i[i*bs + j];
371       ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j];
372     }
373 
374     currentcol = 1000000000;
375     for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */
376       if (ilens[j] > 0) {
377         currentcol = PetscMin(currentcol,ii[j][0]/bs);
378       }
379     }
380 
381     notdone = PETSC_TRUE;
382     while (PETSC_TRUE) {  /* loops over blocks in block row */
383 
384       notdone = PETSC_FALSE;
385       nextcol = 1000000000;
386       ierr    = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr);
387       for (j=0; j<bs; j++) { /* loop over rows in block */
388         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */
389           ii[j]++;
390           ilens[j]--;
391           llens[j]++;
392         }
393         if (ilens[j] > 0) {
394           notdone = PETSC_TRUE;
395           nextcol = PetscMin(nextcol,ii[j][0]/bs);
396         }
397       }
398       if (cnt >= amat->maxnz) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt);
399       if (!flg || currentcol >= i) {
400         amat->j[cnt] = currentcol;
401         ierr         = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr);
402       }
403 
404       if (!notdone) break;
405       currentcol = nextcol;
406     }
407     amat->ilen[i] = lens[i];
408   }
409   CHKMEMQ;
410 
411   ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr);
412   ierr = PetscFree(llens);CHKERRQ(ierr);
413 
414   /* copy over the matrix, one row at a time */
415   for (i=0; i<m; i++) {
416     ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
417     ierr = MatSetValues(newmat,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr);
418     ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
419   }
420   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
421   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
422   PetscFunctionReturn(0);
423 }
424 
425 #undef __FUNCT__
426 #define __FUNCT__ "MatView_BlockMat"
427 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer)
428 {
429   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
430   PetscErrorCode    ierr;
431   const char        *name;
432   PetscViewerFormat format;
433 
434   PetscFunctionBegin;
435   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
436   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
437   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) {
438     ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr);
439     if (A->symmetric) {
440       ierr = PetscViewerASCIIPrintf(viewer,"Only upper triangular part of symmetric matrix is stored\n");CHKERRQ(ierr);
441     }
442   }
443   PetscFunctionReturn(0);
444 }
445 
446 #undef __FUNCT__
447 #define __FUNCT__ "MatDestroy_BlockMat"
448 PetscErrorCode MatDestroy_BlockMat(Mat mat)
449 {
450   PetscErrorCode ierr;
451   Mat_BlockMat   *bmat = (Mat_BlockMat*)mat->data;
452   PetscInt       i;
453 
454   PetscFunctionBegin;
455   ierr = VecDestroy(&bmat->right);CHKERRQ(ierr);
456   ierr = VecDestroy(&bmat->left);CHKERRQ(ierr);
457   ierr = VecDestroy(&bmat->middle);CHKERRQ(ierr);
458   ierr = VecDestroy(&bmat->workb);CHKERRQ(ierr);
459   if (bmat->diags) {
460     for (i=0; i<mat->rmap->n/mat->rmap->bs; i++) {
461       ierr = MatDestroy(&bmat->diags[i]);CHKERRQ(ierr);
462     }
463   }
464   if (bmat->a) {
465     for (i=0; i<bmat->nz; i++) {
466       ierr = MatDestroy(&bmat->a[i]);CHKERRQ(ierr);
467     }
468   }
469   ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
470   ierr = PetscFree(mat->data);CHKERRQ(ierr);
471   PetscFunctionReturn(0);
472 }
473 
474 #undef __FUNCT__
475 #define __FUNCT__ "MatMult_BlockMat"
476 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y)
477 {
478   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
479   PetscErrorCode ierr;
480   PetscScalar    *xx,*yy;
481   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
482   Mat            *aa;
483 
484   PetscFunctionBegin;
485   CHKMEMQ;
486   /*
487      Standard CSR multiply except each entry is a Mat
488   */
489   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
490 
491   ierr = VecSet(y,0.0);CHKERRQ(ierr);
492   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
493   aj   = bmat->j;
494   aa   = bmat->a;
495   ii   = bmat->i;
496   for (i=0; i<m; i++) {
497     jrow = ii[i];
498     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
499     n    = ii[i+1] - jrow;
500     for (j=0; j<n; j++) {
501       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
502       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
503       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
504       jrow++;
505     }
506     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
507   }
508   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
509   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
510   CHKMEMQ;
511   PetscFunctionReturn(0);
512 }
513 
514 #undef __FUNCT__
515 #define __FUNCT__ "MatMult_BlockMat_Symmetric"
516 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y)
517 {
518   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
519   PetscErrorCode ierr;
520   PetscScalar    *xx,*yy;
521   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
522   Mat            *aa;
523 
524   PetscFunctionBegin;
525   CHKMEMQ;
526   /*
527      Standard CSR multiply except each entry is a Mat
528   */
529   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
530 
531   ierr = VecSet(y,0.0);CHKERRQ(ierr);
532   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
533   aj   = bmat->j;
534   aa   = bmat->a;
535   ii   = bmat->i;
536   for (i=0; i<m; i++) {
537     jrow = ii[i];
538     n    = ii[i+1] - jrow;
539     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
540     ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr);
541     /* if we ALWAYS required a diagonal entry then could remove this if test */
542     if (aj[jrow] == i) {
543       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
544       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
545       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
546       jrow++;
547       n--;
548     }
549     for (j=0; j<n; j++) {
550       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);            /* upper triangular part */
551       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
552       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
553 
554       ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr);            /* lower triangular part */
555       ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr);
556       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
557       jrow++;
558     }
559     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
560     ierr = VecResetArray(bmat->middle);CHKERRQ(ierr);
561   }
562   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
563   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
564   CHKMEMQ;
565   PetscFunctionReturn(0);
566 }
567 
568 #undef __FUNCT__
569 #define __FUNCT__ "MatMultAdd_BlockMat"
570 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
571 {
572   PetscFunctionBegin;
573   PetscFunctionReturn(0);
574 }
575 
576 #undef __FUNCT__
577 #define __FUNCT__ "MatMultTranspose_BlockMat"
578 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y)
579 {
580   PetscFunctionBegin;
581   PetscFunctionReturn(0);
582 }
583 
584 #undef __FUNCT__
585 #define __FUNCT__ "MatMultTransposeAdd_BlockMat"
586 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
587 {
588   PetscFunctionBegin;
589   PetscFunctionReturn(0);
590 }
591 
592 /*
593      Adds diagonal pointers to sparse matrix structure.
594 */
595 #undef __FUNCT__
596 #define __FUNCT__ "MatMarkDiagonal_BlockMat"
597 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A)
598 {
599   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
600   PetscErrorCode ierr;
601   PetscInt       i,j,mbs = A->rmap->n/A->rmap->bs;
602 
603   PetscFunctionBegin;
604   if (!a->diag) {
605     ierr = PetscMalloc(mbs*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
606   }
607   for (i=0; i<mbs; i++) {
608     a->diag[i] = a->i[i+1];
609     for (j=a->i[i]; j<a->i[i+1]; j++) {
610       if (a->j[j] == i) {
611         a->diag[i] = j;
612         break;
613       }
614     }
615   }
616   PetscFunctionReturn(0);
617 }
618 
619 #undef __FUNCT__
620 #define __FUNCT__ "MatGetSubMatrix_BlockMat"
621 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *B)
622 {
623   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
624   Mat_SeqAIJ     *c;
625   PetscErrorCode ierr;
626   PetscInt       i,k,first,step,lensi,nrows,ncols;
627   PetscInt       *j_new,*i_new,*aj = a->j,*ailen = a->ilen;
628   PetscScalar    *a_new;
629   Mat            C,*aa = a->a;
630   PetscBool      stride,equal;
631 
632   PetscFunctionBegin;
633   ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr);
634   if (!equal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for idential column and row indices");
635   ierr = PetscObjectTypeCompare((PetscObject)iscol,ISSTRIDE,&stride);CHKERRQ(ierr);
636   if (!stride) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for stride indices");
637   ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr);
638   if (step != A->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only select one entry from each block");
639 
640   ierr  = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr);
641   ncols = nrows;
642 
643   /* create submatrix */
644   if (scall == MAT_REUSE_MATRIX) {
645     PetscInt n_cols,n_rows;
646     C    = *B;
647     ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr);
648     if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size");
649     ierr = MatZeroEntries(C);CHKERRQ(ierr);
650   } else {
651     ierr = MatCreate(((PetscObject)A)->comm,&C);CHKERRQ(ierr);
652     ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
653     if (A->symmetric) {
654       ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr);
655     } else {
656       ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr);
657     }
658     ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr);
659     ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr);
660   }
661   c = (Mat_SeqAIJ*)C->data;
662 
663   /* loop over rows inserting into submatrix */
664   a_new = c->a;
665   j_new = c->j;
666   i_new = c->i;
667 
668   for (i=0; i<nrows; i++) {
669     lensi = ailen[i];
670     for (k=0; k<lensi; k++) {
671       *j_new++ = *aj++;
672       ierr     = MatGetValue(*aa++,first,first,a_new++);CHKERRQ(ierr);
673     }
674     i_new[i+1] = i_new[i] + lensi;
675     c->ilen[i] = lensi;
676   }
677 
678   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
679   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
680   *B   = C;
681   PetscFunctionReturn(0);
682 }
683 
684 #undef __FUNCT__
685 #define __FUNCT__ "MatAssemblyEnd_BlockMat"
686 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode)
687 {
688   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
689   PetscErrorCode ierr;
690   PetscInt       fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax;
691   PetscInt       m      = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0;
692   Mat            *aa    = a->a,*ap;
693 
694   PetscFunctionBegin;
695   if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0);
696 
697   if (m) rmax = ailen[0]; /* determine row with most nonzeros */
698   for (i=1; i<m; i++) {
699     /* move each row back by the amount of empty slots (fshift) before it*/
700     fshift += imax[i-1] - ailen[i-1];
701     rmax    = PetscMax(rmax,ailen[i]);
702     if (fshift) {
703       ip = aj + ai[i];
704       ap = aa + ai[i];
705       N  = ailen[i];
706       for (j=0; j<N; j++) {
707         ip[j-fshift] = ip[j];
708         ap[j-fshift] = ap[j];
709       }
710     }
711     ai[i] = ai[i-1] + ailen[i-1];
712   }
713   if (m) {
714     fshift += imax[m-1] - ailen[m-1];
715     ai[m]   = ai[m-1] + ailen[m-1];
716   }
717   /* reset ilen and imax for each row */
718   for (i=0; i<m; i++) {
719     ailen[i] = imax[i] = ai[i+1] - ai[i];
720   }
721   a->nz = ai[m];
722   for (i=0; i<a->nz; i++) {
723 #if defined(PETSC_USE_DEBUG)
724     if (!aa[i]) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz);
725 #endif
726     ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
727     ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
728   }
729   CHKMEMQ;
730   ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap->n/A->cmap->bs,fshift,a->nz);CHKERRQ(ierr);
731   ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr);
732   ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr);
733 
734   A->info.mallocs    += a->reallocs;
735   a->reallocs         = 0;
736   A->info.nz_unneeded = (double)fshift;
737   a->rmax             = rmax;
738 
739   A->same_nonzero = PETSC_TRUE;
740   ierr            = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr);
741   PetscFunctionReturn(0);
742 }
743 
744 #undef __FUNCT__
745 #define __FUNCT__ "MatSetOption_BlockMat"
746 PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt,PetscBool flg)
747 {
748   PetscFunctionBegin;
749   if (opt == MAT_SYMMETRIC && flg) {
750     A->ops->sor  = MatSOR_BlockMat_Symmetric;
751     A->ops->mult = MatMult_BlockMat_Symmetric;
752   } else {
753     PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]);
754   }
755   PetscFunctionReturn(0);
756 }
757 
758 
759 static struct _MatOps MatOps_Values = {
760         MatSetValues_BlockMat,
761         0,
762         0,
763         MatMult_BlockMat,
764 /*  4*/ MatMultAdd_BlockMat,
765         MatMultTranspose_BlockMat,
766         MatMultTransposeAdd_BlockMat,
767         0,
768         0,
769         0,
770 /* 10*/ 0,
771         0,
772         0,
773         MatSOR_BlockMat,
774         0,
775 /* 15*/ 0,
776         0,
777         0,
778         0,
779         0,
780 /* 20*/ 0,
781         MatAssemblyEnd_BlockMat,
782         MatSetOption_BlockMat,
783         0,
784 /* 24*/ 0,
785         0,
786         0,
787         0,
788         0,
789 /* 29*/ 0,
790         0,
791         0,
792         0,
793         0,
794 /* 34*/ 0,
795         0,
796         0,
797         0,
798         0,
799 /* 39*/ 0,
800         0,
801         0,
802         0,
803         0,
804 /* 44*/ 0,
805         0,
806         0,
807         0,
808         0,
809 /* 49*/ 0,
810         0,
811         0,
812         0,
813         0,
814 /* 54*/ 0,
815         0,
816         0,
817         0,
818         0,
819 /* 59*/ MatGetSubMatrix_BlockMat,
820         MatDestroy_BlockMat,
821         MatView_BlockMat,
822         0,
823         0,
824 /* 64*/ 0,
825         0,
826         0,
827         0,
828         0,
829 /* 69*/ 0,
830         0,
831         0,
832         0,
833         0,
834 /* 74*/ 0,
835         0,
836         0,
837         0,
838         0,
839 /* 79*/ 0,
840         0,
841         0,
842         0,
843         MatLoad_BlockMat,
844 /* 84*/ 0,
845         0,
846         0,
847         0,
848         0,
849 /* 89*/ 0,
850         0,
851         0,
852         0,
853         0,
854 /* 94*/ 0,
855         0,
856         0,
857         0,
858         0,
859 /* 99*/ 0,
860         0,
861         0,
862         0,
863         0,
864 /*104*/ 0,
865         0,
866         0,
867         0,
868         0,
869 /*109*/ 0,
870         0,
871         0,
872         0,
873         0,
874 /*114*/ 0,
875         0,
876         0,
877         0,
878         0,
879 /*119*/ 0,
880         0,
881         0,
882         0
883 };
884 
885 #undef __FUNCT__
886 #define __FUNCT__ "MatBlockMatSetPreallocation"
887 /*@C
888    MatBlockMatSetPreallocation - For good matrix assembly performance
889    the user should preallocate the matrix storage by setting the parameter nz
890    (or the array nnz).  By setting these parameters accurately, performance
891    during matrix assembly can be increased by more than a factor of 50.
892 
893    Collective on MPI_Comm
894 
895    Input Parameters:
896 +  B - The matrix
897 .  bs - size of each block in matrix
898 .  nz - number of nonzeros per block row (same for all rows)
899 -  nnz - array containing the number of nonzeros in the various block rows
900          (possibly different for each row) or PETSC_NULL
901 
902    Notes:
903      If nnz is given then nz is ignored
904 
905    Specify the preallocated storage with either nz or nnz (not both).
906    Set nz=PETSC_DEFAULT and nnz=PETSC_NULL for PETSc to control dynamic memory
907    allocation.  For large problems you MUST preallocate memory or you
908    will get TERRIBLE performance, see the users' manual chapter on matrices.
909 
910    Level: intermediate
911 
912 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues()
913 
914 @*/
915 PetscErrorCode  MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[])
916 {
917   PetscErrorCode ierr;
918 
919   PetscFunctionBegin;
920   ierr = PetscTryMethod(B,"MatBlockMatSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[]),(B,bs,nz,nnz));CHKERRQ(ierr);
921   PetscFunctionReturn(0);
922 }
923 
924 EXTERN_C_BEGIN
925 #undef __FUNCT__
926 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat"
927 PetscErrorCode  MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz)
928 {
929   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
930   PetscErrorCode ierr;
931   PetscInt       i;
932 
933   PetscFunctionBegin;
934   ierr = PetscLayoutSetBlockSize(A->rmap,bs);CHKERRQ(ierr);
935   ierr = PetscLayoutSetBlockSize(A->cmap,bs);CHKERRQ(ierr);
936   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
937   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
938   ierr = PetscLayoutGetBlockSize(A->rmap,&bs);CHKERRQ(ierr);
939 
940   if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5;
941   if (nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz);
942   if (nnz) {
943     for (i=0; i<A->rmap->n/bs; i++) {
944       if (nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]);
945       if (nnz[i] > A->cmap->n/bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap->n/bs);
946     }
947   }
948   bmat->mbs = A->rmap->n/bs;
949 
950   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,PETSC_NULL,&bmat->right);CHKERRQ(ierr);
951   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,PETSC_NULL,&bmat->middle);CHKERRQ(ierr);
952   ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr);
953 
954   if (!bmat->imax) {
955     ierr = PetscMalloc2(A->rmap->n,PetscInt,&bmat->imax,A->rmap->n,PetscInt,&bmat->ilen);CHKERRQ(ierr);
956     ierr = PetscLogObjectMemory(A,2*A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
957   }
958   if (nnz) {
959     nz = 0;
960     for (i=0; i<A->rmap->n/A->rmap->bs; i++) {
961       bmat->imax[i] = nnz[i];
962       nz           += nnz[i];
963     }
964   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently requires block row by row preallocation");
965 
966   /* bmat->ilen will count nonzeros in each row so far. */
967   for (i=0; i<bmat->mbs; i++) bmat->ilen[i] = 0;
968 
969   /* allocate the matrix space */
970   ierr       = MatSeqXAIJFreeAIJ(A,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
971   ierr       = PetscMalloc3(nz,Mat,&bmat->a,nz,PetscInt,&bmat->j,A->rmap->n+1,PetscInt,&bmat->i);CHKERRQ(ierr);
972   ierr       = PetscLogObjectMemory(A,(A->rmap->n+1)*sizeof(PetscInt)+nz*(sizeof(PetscScalar)+sizeof(PetscInt)));CHKERRQ(ierr);
973   bmat->i[0] = 0;
974   for (i=1; i<bmat->mbs+1; i++) {
975     bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1];
976   }
977   bmat->singlemalloc = PETSC_TRUE;
978   bmat->free_a       = PETSC_TRUE;
979   bmat->free_ij      = PETSC_TRUE;
980 
981   bmat->nz            = 0;
982   bmat->maxnz         = nz;
983   A->info.nz_unneeded = (double)bmat->maxnz;
984   ierr                = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
985   PetscFunctionReturn(0);
986 }
987 EXTERN_C_END
988 
989 /*MC
990    MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix
991                  consisting of (usually) sparse blocks.
992 
993   Level: advanced
994 
995 .seealso: MatCreateBlockMat()
996 
997 M*/
998 
999 EXTERN_C_BEGIN
1000 #undef __FUNCT__
1001 #define __FUNCT__ "MatCreate_BlockMat"
1002 PetscErrorCode  MatCreate_BlockMat(Mat A)
1003 {
1004   Mat_BlockMat   *b;
1005   PetscErrorCode ierr;
1006 
1007   PetscFunctionBegin;
1008   ierr    = PetscNewLog(A,Mat_BlockMat,&b);CHKERRQ(ierr);
1009   A->data = (void*)b;
1010   ierr    = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1011 
1012   A->assembled    = PETSC_TRUE;
1013   A->preallocated = PETSC_FALSE;
1014   ierr            = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr);
1015 
1016   ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatBlockMatSetPreallocation_C",
1017                                            "MatBlockMatSetPreallocation_BlockMat",
1018                                            MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr);
1019   PetscFunctionReturn(0);
1020 }
1021 EXTERN_C_END
1022 
1023 #undef __FUNCT__
1024 #define __FUNCT__ "MatCreateBlockMat"
1025 /*@C
1026    MatCreateBlockMat - Creates a new matrix based sparse Mat storage
1027 
1028   Collective on MPI_Comm
1029 
1030    Input Parameters:
1031 +  comm - MPI communicator
1032 .  m - number of rows
1033 .  n  - number of columns
1034 .  bs - size of each submatrix
1035 .  nz  - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known)
1036 -  nnz - expected number of nonzers per block row if known (use PETSC_NULL otherwise)
1037 
1038 
1039    Output Parameter:
1040 .  A - the matrix
1041 
1042    Level: intermediate
1043 
1044    PETSc requires that matrices and vectors being used for certain
1045    operations are partitioned accordingly.  For example, when
1046    creating a bmat matrix, A, that supports parallel matrix-vector
1047    products using MatMult(A,x,y) the user should set the number
1048    of local matrix rows to be the number of local elements of the
1049    corresponding result vector, y. Note that this is information is
1050    required for use of the matrix interface routines, even though
1051    the bmat matrix may not actually be physically partitioned.
1052    For example,
1053 
1054 .keywords: matrix, bmat, create
1055 
1056 .seealso: MATBLOCKMAT
1057 @*/
1058 PetscErrorCode  MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A)
1059 {
1060   PetscErrorCode ierr;
1061 
1062   PetscFunctionBegin;
1063   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1064   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1065   ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr);
1066   ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr);
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 
1071 
1072