xref: /petsc/src/mat/impls/blockmat/seq/blockmat.c (revision 28adb3f739167aae2a3fdf9bf501dbd0150de1de)
1 #define PETSCMAT_DLL
2 
3 /*
4    This provides a matrix that consists of Mats
5 */
6 
7 #include "private/matimpl.h"              /*I "petscmat.h" I*/
8 #include "../src/mat/impls/baij/seq/baij.h"    /* use the common AIJ data-structure */
9 #include "petscksp.h"
10 
11 #define CHUNKSIZE   15
12 
13 typedef struct {
14   SEQAIJHEADER(Mat);
15   SEQBAIJHEADER;
16   Mat               *diags;
17 
18   Vec               left,right,middle,workb;   /* dummy vectors to perform local parts of product */
19 } Mat_BlockMat;
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "MatRelax_BlockMat_Symmetric"
23 PetscErrorCode MatRelax_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
24 {
25   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
26   PetscScalar        *x;
27   const Mat          *v = a->a;
28   const PetscScalar  *b;
29   PetscErrorCode     ierr;
30   PetscInt           n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
31   const PetscInt     *idx;
32   IS                 row,col;
33   MatFactorInfo      info;
34   Vec                left = a->left,right = a->right, middle = a->middle;
35   Mat                *diag;
36 
37   PetscFunctionBegin;
38   its = its*lits;
39   if (its <= 0) SETERRQ2(PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
40   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_ERR_SUP,"No support yet for Eisenstat");
41   if (omega != 1.0) SETERRQ(PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
42   if (fshift) SETERRQ(PETSC_ERR_SUP,"No support yet for fshift");
43   if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP))
44     SETERRQ(PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep");
45 
46   if (!a->diags) {
47     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
48     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
49     for (i=0; i<mbs; i++) {
50       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERING_ND,&row,&col);CHKERRQ(ierr);
51       ierr = MatCholeskyFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,&info);CHKERRQ(ierr);
52       ierr = MatCholeskyFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
53       ierr = ISDestroy(row);CHKERRQ(ierr);
54       ierr = ISDestroy(col);CHKERRQ(ierr);
55     }
56     ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr);
57   }
58   diag    = a->diags;
59 
60   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
61   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
62   /* copy right hand side because it must be modified during iteration */
63   ierr = VecCopy(bb,a->workb);CHKERRQ(ierr);
64   ierr = VecGetArray(a->workb,(PetscScalar**)&b);CHKERRQ(ierr);
65 
66   /* need to add code for when initial guess is zero, see MatRelax_SeqAIJ */
67   while (its--) {
68     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
69 
70       for (i=0; i<mbs; i++) {
71         n    = a->i[i+1] - a->i[i] - 1;
72         idx  = a->j + a->i[i] + 1;
73         v    = a->a + a->i[i] + 1;
74 
75         ierr = VecSet(left,0.0);CHKERRQ(ierr);
76         for (j=0; j<n; j++) {
77           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
78           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
79           ierr = VecResetArray(right);CHKERRQ(ierr);
80         }
81         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
82         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
83         ierr = VecResetArray(right);CHKERRQ(ierr);
84 
85         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
86         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
87 
88         /* now adjust right hand side, see MatRelax_SeqSBAIJ */
89         for (j=0; j<n; j++) {
90           ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr);
91           ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr);
92           ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr);
93           ierr = VecResetArray(middle);CHKERRQ(ierr);
94         }
95         ierr = VecResetArray(right);CHKERRQ(ierr);
96 
97       }
98     }
99     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
100 
101       for (i=mbs-1; i>=0; i--) {
102         n    = a->i[i+1] - a->i[i] - 1;
103         idx  = a->j + a->i[i] + 1;
104         v    = a->a + a->i[i] + 1;
105 
106         ierr = VecSet(left,0.0);CHKERRQ(ierr);
107         for (j=0; j<n; j++) {
108           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
109           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
110           ierr = VecResetArray(right);CHKERRQ(ierr);
111         }
112         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
113         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
114         ierr = VecResetArray(right);CHKERRQ(ierr);
115 
116         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
117         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
118         ierr = VecResetArray(right);CHKERRQ(ierr);
119 
120       }
121     }
122   }
123   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
124   ierr = VecRestoreArray(a->workb,(PetscScalar**)&b);CHKERRQ(ierr);
125   PetscFunctionReturn(0);
126 }
127 
128 #undef __FUNCT__
129 #define __FUNCT__ "MatRelax_BlockMat"
130 PetscErrorCode MatRelax_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
131 {
132   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
133   PetscScalar        *x;
134   const Mat          *v = a->a;
135   const PetscScalar  *b;
136   PetscErrorCode     ierr;
137   PetscInt           n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
138   const PetscInt     *idx;
139   IS                 row,col;
140   MatFactorInfo      info;
141   Vec                left = a->left,right = a->right;
142   Mat                *diag;
143 
144   PetscFunctionBegin;
145   its = its*lits;
146   if (its <= 0) SETERRQ2(PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
147   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_ERR_SUP,"No support yet for Eisenstat");
148   if (omega != 1.0) SETERRQ(PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
149   if (fshift) SETERRQ(PETSC_ERR_SUP,"No support yet for fshift");
150 
151   if (!a->diags) {
152     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
153     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
154     for (i=0; i<mbs; i++) {
155       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERING_ND,&row,&col);CHKERRQ(ierr);
156       ierr = MatLUFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,col,&info);CHKERRQ(ierr);
157       ierr = MatLUFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
158       ierr = ISDestroy(row);CHKERRQ(ierr);
159       ierr = ISDestroy(col);CHKERRQ(ierr);
160     }
161   }
162   diag = a->diags;
163 
164   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
165   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
166   ierr = VecGetArray(bb,(PetscScalar**)&b);CHKERRQ(ierr);
167 
168   /* need to add code for when initial guess is zero, see MatRelax_SeqAIJ */
169   while (its--) {
170     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
171 
172       for (i=0; i<mbs; i++) {
173         n    = a->i[i+1] - a->i[i];
174         idx  = a->j + a->i[i];
175         v    = a->a + a->i[i];
176 
177         ierr = VecSet(left,0.0);CHKERRQ(ierr);
178         for (j=0; j<n; j++) {
179           if (idx[j] != i) {
180             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
181             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
182             ierr = VecResetArray(right);CHKERRQ(ierr);
183           }
184         }
185         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
186         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
187         ierr = VecResetArray(right);CHKERRQ(ierr);
188 
189         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
190         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
191         ierr = VecResetArray(right);CHKERRQ(ierr);
192       }
193     }
194     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
195 
196       for (i=mbs-1; i>=0; i--) {
197         n    = a->i[i+1] - a->i[i];
198         idx  = a->j + a->i[i];
199         v    = a->a + a->i[i];
200 
201         ierr = VecSet(left,0.0);CHKERRQ(ierr);
202         for (j=0; j<n; j++) {
203           if (idx[j] != i) {
204             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
205             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
206             ierr = VecResetArray(right);CHKERRQ(ierr);
207           }
208         }
209         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
210         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
211         ierr = VecResetArray(right);CHKERRQ(ierr);
212 
213         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
214         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
215         ierr = VecResetArray(right);CHKERRQ(ierr);
216 
217       }
218     }
219   }
220   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
221   ierr = VecRestoreArray(bb,(PetscScalar**)&b);CHKERRQ(ierr);
222   PetscFunctionReturn(0);
223 }
224 
225 #undef __FUNCT__
226 #define __FUNCT__ "MatSetValues_BlockMat"
227 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is)
228 {
229   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
230   PetscInt       *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1;
231   PetscInt       *imax=a->imax,*ai=a->i,*ailen=a->ilen;
232   PetscInt       *aj=a->j,nonew=a->nonew,bs=A->rmap->bs,brow,bcol;
233   PetscErrorCode ierr;
234   PetscInt       ridx,cidx;
235   PetscTruth     roworiented=a->roworiented;
236   MatScalar      value;
237   Mat            *ap,*aa = a->a;
238 
239   PetscFunctionBegin;
240   for (k=0; k<m; k++) { /* loop over added rows */
241     row  = im[k];
242     brow = row/bs;
243     if (row < 0) continue;
244 #if defined(PETSC_USE_DEBUG)
245     if (row >= A->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap->N-1);
246 #endif
247     rp   = aj + ai[brow];
248     ap   = aa + ai[brow];
249     rmax = imax[brow];
250     nrow = ailen[brow];
251     low  = 0;
252     high = nrow;
253     for (l=0; l<n; l++) { /* loop over added columns */
254       if (in[l] < 0) continue;
255 #if defined(PETSC_USE_DEBUG)
256       if (in[l] >= A->cmap->n) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap->n-1);
257 #endif
258       col = in[l]; bcol = col/bs;
259       if (A->symmetric && brow > bcol) continue;
260       ridx = row % bs; cidx = col % bs;
261       if (roworiented) {
262         value = v[l + k*n];
263       } else {
264         value = v[k + l*m];
265       }
266       if (col <= lastcol) low = 0; else high = nrow;
267       lastcol = col;
268       while (high-low > 7) {
269         t = (low+high)/2;
270         if (rp[t] > bcol) high = t;
271         else              low  = t;
272       }
273       for (i=low; i<high; i++) {
274         if (rp[i] > bcol) break;
275         if (rp[i] == bcol) {
276           goto noinsert1;
277         }
278       }
279       if (nonew == 1) goto noinsert1;
280       if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col);
281       MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat);
282       N = nrow++ - 1; high++;
283       /* shift up all the later entries in this row */
284       for (ii=N; ii>=i; ii--) {
285         rp[ii+1] = rp[ii];
286         ap[ii+1] = ap[ii];
287       }
288       if (N>=i) ap[i] = 0;
289       rp[i]           = bcol;
290       a->nz++;
291       noinsert1:;
292       if (!*(ap+i)) {
293         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr);
294       }
295       ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr);
296       low = i;
297     }
298     ailen[brow] = nrow;
299   }
300   A->same_nonzero = PETSC_FALSE;
301   PetscFunctionReturn(0);
302 }
303 
304 #undef __FUNCT__
305 #define __FUNCT__ "MatLoad_BlockMat"
306 PetscErrorCode MatLoad_BlockMat(PetscViewer viewer, const MatType type,Mat *A)
307 {
308   PetscErrorCode    ierr;
309   Mat               tmpA;
310   PetscInt          i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0;
311   const PetscInt    *cols;
312   const PetscScalar *values;
313   PetscTruth        flg = PETSC_FALSE,notdone;
314   Mat_SeqAIJ        *a;
315   Mat_BlockMat      *amat;
316 
317   PetscFunctionBegin;
318   ierr = MatLoad_SeqAIJ(viewer,MATSEQAIJ,&tmpA);CHKERRQ(ierr);
319 
320   ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr);
321   ierr = PetscOptionsBegin(PETSC_COMM_SELF,PETSC_NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr);
322     ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr);
323     ierr = PetscOptionsTruth("-matload_symmetric","Store the matrix as symmetric","MatLoad",flg,&flg,PETSC_NULL);CHKERRQ(ierr);
324   ierr = PetscOptionsEnd();CHKERRQ(ierr);
325 
326   /* Determine number of nonzero blocks for each block row */
327   a    = (Mat_SeqAIJ*) tmpA->data;
328   mbs  = m/bs;
329   ierr = PetscMalloc3(mbs,PetscInt,&lens,bs,PetscInt*,&ii,bs,PetscInt,&ilens);CHKERRQ(ierr);
330   ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr);
331 
332   for (i=0; i<mbs; i++) {
333     for (j=0; j<bs; j++) {
334       ii[j]         = a->j + a->i[i*bs + j];
335       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
336     }
337 
338     currentcol = -1;
339     notdone = PETSC_TRUE;
340     while (PETSC_TRUE) {
341       notdone = PETSC_FALSE;
342       nextcol = 1000000000;
343       for (j=0; j<bs; j++) {
344         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) {
345           ii[j]++;
346           ilens[j]--;
347         }
348         if (ilens[j] > 0) {
349           notdone = PETSC_TRUE;
350           nextcol = PetscMin(nextcol,ii[j][0]/bs);
351         }
352       }
353       if (!notdone) break;
354       if (!flg || (nextcol >= i)) lens[i]++;
355       currentcol = nextcol;
356     }
357   }
358 
359   ierr = MatCreateBlockMat(PETSC_COMM_SELF,m,n,bs,0,lens,A);CHKERRQ(ierr);
360   if (flg) {
361     ierr = MatSetOption(*A,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
362   }
363   amat = (Mat_BlockMat*)(*A)->data;
364 
365   /* preallocate the submatrices */
366   ierr = PetscMalloc(bs*sizeof(PetscInt),&llens);CHKERRQ(ierr);
367   for (i=0; i<mbs; i++) { /* loops for block rows */
368     for (j=0; j<bs; j++) {
369       ii[j]         = a->j + a->i[i*bs + j];
370       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
371     }
372 
373     currentcol = 1000000000;
374     for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */
375       if (ilens[j] > 0) {
376 	currentcol = PetscMin(currentcol,ii[j][0]/bs);
377       }
378     }
379 
380     notdone = PETSC_TRUE;
381     while (PETSC_TRUE) {  /* loops over blocks in block row */
382 
383       notdone = PETSC_FALSE;
384       nextcol = 1000000000;
385       ierr = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr);
386       for (j=0; j<bs; j++) { /* loop over rows in block */
387         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */
388           ii[j]++;
389           ilens[j]--;
390           llens[j]++;
391         }
392         if (ilens[j] > 0) {
393           notdone = PETSC_TRUE;
394           nextcol = PetscMin(nextcol,ii[j][0]/bs);
395         }
396       }
397       if (cnt >= amat->maxnz) SETERRQ1(PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt);
398       if (!flg || currentcol >= i) {
399         amat->j[cnt] = currentcol;
400         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr);
401       }
402 
403       if (!notdone) break;
404       currentcol = nextcol;
405     }
406     amat->ilen[i] = lens[i];
407   }
408   CHKMEMQ;
409 
410   ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr);
411   ierr = PetscFree(llens);CHKERRQ(ierr);
412 
413   /* copy over the matrix, one row at a time */
414   for (i=0; i<m; i++) {
415     ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
416     ierr = MatSetValues(*A,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr);
417     ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
418   }
419   ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
420   ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
421   PetscFunctionReturn(0);
422 }
423 
424 #undef __FUNCT__
425 #define __FUNCT__ "MatView_BlockMat"
426 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer)
427 {
428   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
429   PetscErrorCode    ierr;
430   const char        *name;
431   PetscViewerFormat format;
432 
433   PetscFunctionBegin;
434   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
435   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
436   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) {
437     ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr);
438     if (A->symmetric) {
439       ierr = PetscViewerASCIIPrintf(viewer,"Only upper triangular part of symmetric matrix is stored\n");CHKERRQ(ierr);
440     }
441   }
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "MatDestroy_BlockMat"
447 PetscErrorCode MatDestroy_BlockMat(Mat mat)
448 {
449   PetscErrorCode ierr;
450   Mat_BlockMat   *bmat = (Mat_BlockMat*)mat->data;
451   PetscInt       i;
452 
453   PetscFunctionBegin;
454   if (bmat->right) {
455     ierr = VecDestroy(bmat->right);CHKERRQ(ierr);
456   }
457   if (bmat->left) {
458     ierr = VecDestroy(bmat->left);CHKERRQ(ierr);
459   }
460   if (bmat->middle) {
461     ierr = VecDestroy(bmat->middle);CHKERRQ(ierr);
462   }
463   if (bmat->workb) {
464     ierr = VecDestroy(bmat->workb);CHKERRQ(ierr);
465   }
466   if (bmat->diags) {
467     for (i=0; i<mat->rmap->n/mat->rmap->bs; i++) {
468       if (bmat->diags[i]) {ierr = MatDestroy(bmat->diags[i]);CHKERRQ(ierr);}
469     }
470   }
471   if (bmat->a) {
472     for (i=0; i<bmat->nz; i++) {
473       if (bmat->a[i]) {ierr = MatDestroy(bmat->a[i]);CHKERRQ(ierr);}
474     }
475   }
476   ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
477   ierr = PetscFree(bmat);CHKERRQ(ierr);
478   PetscFunctionReturn(0);
479 }
480 
481 #undef __FUNCT__
482 #define __FUNCT__ "MatMult_BlockMat"
483 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y)
484 {
485   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
486   PetscErrorCode ierr;
487   PetscScalar    *xx,*yy;
488   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
489   Mat            *aa;
490 
491   PetscFunctionBegin;
492   CHKMEMQ;
493   /*
494      Standard CSR multiply except each entry is a Mat
495   */
496   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
497 
498   ierr = VecSet(y,0.0);CHKERRQ(ierr);
499   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
500   aj  = bmat->j;
501   aa  = bmat->a;
502   ii  = bmat->i;
503   for (i=0; i<m; i++) {
504     jrow = ii[i];
505     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
506     n    = ii[i+1] - jrow;
507     for (j=0; j<n; j++) {
508       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
509       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
510       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
511       jrow++;
512     }
513     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
514   }
515   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
516   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
517   CHKMEMQ;
518   PetscFunctionReturn(0);
519 }
520 
521 #undef __FUNCT__
522 #define __FUNCT__ "MatMult_BlockMat_Symmetric"
523 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y)
524 {
525   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
526   PetscErrorCode ierr;
527   PetscScalar    *xx,*yy;
528   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
529   Mat            *aa;
530 
531   PetscFunctionBegin;
532   CHKMEMQ;
533   /*
534      Standard CSR multiply except each entry is a Mat
535   */
536   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
537 
538   ierr = VecSet(y,0.0);CHKERRQ(ierr);
539   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
540   aj  = bmat->j;
541   aa  = bmat->a;
542   ii  = bmat->i;
543   for (i=0; i<m; i++) {
544     jrow = ii[i];
545     n    = ii[i+1] - jrow;
546     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
547     ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr);
548     /* if we ALWAYS required a diagonal entry then could remove this if test */
549     if (aj[jrow] == i) {
550       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
551       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
552       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
553       jrow++;
554       n--;
555     }
556     for (j=0; j<n; j++) {
557       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);            /* upper triangular part */
558       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
559       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
560 
561       ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr);            /* lower triangular part */
562       ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr);
563       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
564       jrow++;
565     }
566     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
567     ierr = VecResetArray(bmat->middle);CHKERRQ(ierr);
568   }
569   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
570   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
571   CHKMEMQ;
572   PetscFunctionReturn(0);
573 }
574 
575 #undef __FUNCT__
576 #define __FUNCT__ "MatMultAdd_BlockMat"
577 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
578 {
579   PetscFunctionBegin;
580   PetscFunctionReturn(0);
581 }
582 
583 #undef __FUNCT__
584 #define __FUNCT__ "MatMultTranspose_BlockMat"
585 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y)
586 {
587   PetscFunctionBegin;
588   PetscFunctionReturn(0);
589 }
590 
591 #undef __FUNCT__
592 #define __FUNCT__ "MatMultTransposeAdd_BlockMat"
593 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
594 {
595   PetscFunctionBegin;
596   PetscFunctionReturn(0);
597 }
598 
599 /*
600      Adds diagonal pointers to sparse matrix structure.
601 */
602 #undef __FUNCT__
603 #define __FUNCT__ "MatMarkDiagonal_BlockMat"
604 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A)
605 {
606   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
607   PetscErrorCode ierr;
608   PetscInt       i,j,mbs = A->rmap->n/A->rmap->bs;
609 
610   PetscFunctionBegin;
611   if (!a->diag) {
612     ierr = PetscMalloc(mbs*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
613   }
614   for (i=0; i<mbs; i++) {
615     a->diag[i] = a->i[i+1];
616     for (j=a->i[i]; j<a->i[i+1]; j++) {
617       if (a->j[j] == i) {
618         a->diag[i] = j;
619         break;
620       }
621     }
622   }
623   PetscFunctionReturn(0);
624 }
625 
626 #undef __FUNCT__
627 #define __FUNCT__ "MatGetSubMatrix_BlockMat"
628 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *B)
629 {
630   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
631   Mat_SeqAIJ     *c;
632   PetscErrorCode ierr;
633   PetscInt       i,k,first,step,lensi,nrows,ncols;
634   PetscInt       *j_new,*i_new,*aj = a->j,*ai = a->i,ii,*ailen = a->ilen;
635   PetscScalar    *a_new;
636   Mat            C,*aa = a->a;
637   PetscTruth     stride,equal;
638 
639   PetscFunctionBegin;
640   ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr);
641   if (!equal) SETERRQ(PETSC_ERR_SUP,"Only for idential column and row indices");
642   ierr = ISStride(iscol,&stride);CHKERRQ(ierr);
643   if (!stride) SETERRQ(PETSC_ERR_SUP,"Only for stride indices");
644   ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr);
645   if (step != A->rmap->bs) SETERRQ(PETSC_ERR_SUP,"Can only select one entry from each block");
646 
647   ierr = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr);
648   ncols = nrows;
649 
650   /* create submatrix */
651   if (scall == MAT_REUSE_MATRIX) {
652     PetscInt n_cols,n_rows;
653     C = *B;
654     ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr);
655     if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size");
656     ierr = MatZeroEntries(C);CHKERRQ(ierr);
657   } else {
658     ierr = MatCreate(((PetscObject)A)->comm,&C);CHKERRQ(ierr);
659     ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
660     if (A->symmetric) {
661       ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr);
662     } else {
663       ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr);
664     }
665     ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr);
666     ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr);
667   }
668   c = (Mat_SeqAIJ*)C->data;
669 
670   /* loop over rows inserting into submatrix */
671   a_new    = c->a;
672   j_new    = c->j;
673   i_new    = c->i;
674 
675   for (i=0; i<nrows; i++) {
676     ii    = ai[i];
677     lensi = ailen[i];
678     for (k=0; k<lensi; k++) {
679       *j_new++ = *aj++;
680       ierr     = MatGetValue(*aa++,first,first,a_new++);CHKERRQ(ierr);
681     }
682     i_new[i+1]  = i_new[i] + lensi;
683     c->ilen[i]  = lensi;
684   }
685 
686   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
687   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
688   *B = C;
689   PetscFunctionReturn(0);
690 }
691 
692 #undef __FUNCT__
693 #define __FUNCT__ "MatAssemblyEnd_BlockMat"
694 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode)
695 {
696   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
697   PetscErrorCode ierr;
698   PetscInt       fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax;
699   PetscInt       m = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0;
700   Mat            *aa = a->a,*ap;
701 
702   PetscFunctionBegin;
703   if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0);
704 
705   if (m) rmax = ailen[0]; /* determine row with most nonzeros */
706   for (i=1; i<m; i++) {
707     /* move each row back by the amount of empty slots (fshift) before it*/
708     fshift += imax[i-1] - ailen[i-1];
709     rmax   = PetscMax(rmax,ailen[i]);
710     if (fshift) {
711       ip = aj + ai[i] ;
712       ap = aa + ai[i] ;
713       N  = ailen[i];
714       for (j=0; j<N; j++) {
715         ip[j-fshift] = ip[j];
716         ap[j-fshift] = ap[j];
717       }
718     }
719     ai[i] = ai[i-1] + ailen[i-1];
720   }
721   if (m) {
722     fshift += imax[m-1] - ailen[m-1];
723     ai[m]  = ai[m-1] + ailen[m-1];
724   }
725   /* reset ilen and imax for each row */
726   for (i=0; i<m; i++) {
727     ailen[i] = imax[i] = ai[i+1] - ai[i];
728   }
729   a->nz = ai[m];
730   for (i=0; i<a->nz; i++) {
731 #if defined(PETSC_USE_DEBUG)
732     if (!aa[i]) SETERRQ3(PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz);
733 #endif
734     ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
735     ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
736   }
737   CHKMEMQ;
738   ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap->n/A->cmap->bs,fshift,a->nz);CHKERRQ(ierr);
739   ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr);
740   ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr);
741   a->reallocs          = 0;
742   A->info.nz_unneeded  = (double)fshift;
743   a->rmax              = rmax;
744 
745   A->same_nonzero = PETSC_TRUE;
746   ierr = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr);
747   PetscFunctionReturn(0);
748 }
749 
750 #undef __FUNCT__
751 #define __FUNCT__ "MatSetOption_BlockMat"
752 PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt,PetscTruth flg)
753 {
754   PetscFunctionBegin;
755   if (opt == MAT_SYMMETRIC && flg) {
756     A->ops->relax = MatRelax_BlockMat_Symmetric;
757     A->ops->mult  = MatMult_BlockMat_Symmetric;
758   } else {
759     PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]);
760   }
761   PetscFunctionReturn(0);
762 }
763 
764 
765 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat,
766        0,
767        0,
768        MatMult_BlockMat,
769 /* 4*/ MatMultAdd_BlockMat,
770        MatMultTranspose_BlockMat,
771        MatMultTransposeAdd_BlockMat,
772        0,
773        0,
774        0,
775 /*10*/ 0,
776        0,
777        0,
778        MatRelax_BlockMat,
779        0,
780 /*15*/ 0,
781        0,
782        0,
783        0,
784        0,
785 /*20*/ 0,
786        MatAssemblyEnd_BlockMat,
787        MatSetOption_BlockMat,
788        0,
789 /*24*/ 0,
790        0,
791        0,
792        0,
793        0,
794 /*29*/ 0,
795        0,
796        0,
797        0,
798        0,
799 /*34*/ 0,
800        0,
801        0,
802        0,
803        0,
804 /*39*/ 0,
805        0,
806        0,
807        0,
808        0,
809 /*44*/ 0,
810        0,
811        0,
812        0,
813        0,
814 /*49*/ 0,
815        0,
816        0,
817        0,
818        0,
819 /*54*/ 0,
820        0,
821        0,
822        0,
823        0,
824 /*59*/ MatGetSubMatrix_BlockMat,
825        MatDestroy_BlockMat,
826        MatView_BlockMat,
827        0,
828        0,
829 /*64*/ 0,
830        0,
831        0,
832        0,
833        0,
834 /*69*/ 0,
835        0,
836        0,
837        0,
838        0,
839 /*74*/ 0,
840        0,
841        0,
842        0,
843        0,
844 /*79*/ 0,
845        0,
846        0,
847        0,
848        MatLoad_BlockMat,
849 /*84*/ 0,
850        0,
851        0,
852        0,
853        0,
854 /*89*/ 0,
855        0,
856        0,
857        0,
858        0,
859 /*94*/ 0,
860        0,
861        0,
862        0};
863 
864 #undef __FUNCT__
865 #define __FUNCT__ "MatBlockMatSetPreallocation"
866 /*@C
867    MatBlockMatSetPreallocation - For good matrix assembly performance
868    the user should preallocate the matrix storage by setting the parameter nz
869    (or the array nnz).  By setting these parameters accurately, performance
870    during matrix assembly can be increased by more than a factor of 50.
871 
872    Collective on MPI_Comm
873 
874    Input Parameters:
875 +  B - The matrix
876 .  bs - size of each block in matrix
877 .  nz - number of nonzeros per block row (same for all rows)
878 -  nnz - array containing the number of nonzeros in the various block rows
879          (possibly different for each row) or PETSC_NULL
880 
881    Notes:
882      If nnz is given then nz is ignored
883 
884    Specify the preallocated storage with either nz or nnz (not both).
885    Set nz=PETSC_DEFAULT and nnz=PETSC_NULL for PETSc to control dynamic memory
886    allocation.  For large problems you MUST preallocate memory or you
887    will get TERRIBLE performance, see the users' manual chapter on matrices.
888 
889    Level: intermediate
890 
891 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues()
892 
893 @*/
894 PetscErrorCode PETSCMAT_DLLEXPORT MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[])
895 {
896   PetscErrorCode ierr,(*f)(Mat,PetscInt,PetscInt,const PetscInt[]);
897 
898   PetscFunctionBegin;
899   ierr = PetscObjectQueryFunction((PetscObject)B,"MatBlockMatSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
900   if (f) {
901     ierr = (*f)(B,bs,nz,nnz);CHKERRQ(ierr);
902   }
903   PetscFunctionReturn(0);
904 }
905 
906 EXTERN_C_BEGIN
907 #undef __FUNCT__
908 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat"
909 PetscErrorCode PETSCMAT_DLLEXPORT MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz)
910 {
911   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
912   PetscErrorCode ierr;
913   PetscInt       i;
914 
915   PetscFunctionBegin;
916   if (bs < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Block size given %D must be great than zero",bs);
917   if (A->rmap->n % bs) SETERRQ2(PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of rows %D",bs,A->rmap->n);
918   if (A->cmap->n % bs) SETERRQ2(PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of columns %D",bs,A->cmap->n);
919   if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5;
920   if (nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz);
921   if (nnz) {
922     for (i=0; i<A->rmap->n/bs; i++) {
923       if (nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]);
924       if (nnz[i] > A->cmap->n/bs) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap->n/bs);
925     }
926   }
927   A->rmap->bs = A->cmap->bs = bs;
928   bmat->mbs  = A->rmap->n/bs;
929 
930   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->right);CHKERRQ(ierr);
931   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->middle);CHKERRQ(ierr);
932   ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr);
933 
934   if (!bmat->imax) {
935     ierr = PetscMalloc2(A->rmap->n,PetscInt,&bmat->imax,A->rmap->n,PetscInt,&bmat->ilen);CHKERRQ(ierr);
936     ierr = PetscLogObjectMemory(A,2*A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
937   }
938   if (nnz) {
939     nz = 0;
940     for (i=0; i<A->rmap->n/A->rmap->bs; i++) {
941       bmat->imax[i] = nnz[i];
942       nz           += nnz[i];
943     }
944   } else {
945     SETERRQ(PETSC_ERR_SUP,"Currently requires block row by row preallocation");
946   }
947 
948   /* bmat->ilen will count nonzeros in each row so far. */
949   for (i=0; i<bmat->mbs; i++) { bmat->ilen[i] = 0;}
950 
951   /* allocate the matrix space */
952   ierr = MatSeqXAIJFreeAIJ(A,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
953   ierr = PetscMalloc3(nz,Mat,&bmat->a,nz,PetscInt,&bmat->j,A->rmap->n+1,PetscInt,&bmat->i);CHKERRQ(ierr);
954   ierr = PetscLogObjectMemory(A,(A->rmap->n+1)*sizeof(PetscInt)+nz*(sizeof(PetscScalar)+sizeof(PetscInt)));CHKERRQ(ierr);
955   bmat->i[0] = 0;
956   for (i=1; i<bmat->mbs+1; i++) {
957     bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1];
958   }
959   bmat->singlemalloc = PETSC_TRUE;
960   bmat->free_a       = PETSC_TRUE;
961   bmat->free_ij      = PETSC_TRUE;
962 
963   bmat->nz                = 0;
964   bmat->maxnz             = nz;
965   A->info.nz_unneeded  = (double)bmat->maxnz;
966 
967   PetscFunctionReturn(0);
968 }
969 EXTERN_C_END
970 
971 /*MC
972    MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix
973                  consisting of (usually) sparse blocks.
974 
975   Level: advanced
976 
977 .seealso: MatCreateBlockMat()
978 
979 M*/
980 
981 EXTERN_C_BEGIN
982 #undef __FUNCT__
983 #define __FUNCT__ "MatCreate_BlockMat"
984 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_BlockMat(Mat A)
985 {
986   Mat_BlockMat   *b;
987   PetscErrorCode ierr;
988 
989   PetscFunctionBegin;
990   ierr = PetscNewLog(A,Mat_BlockMat,&b);CHKERRQ(ierr);
991   A->data = (void*)b;
992   ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
993 
994   ierr = PetscMapSetBlockSize(A->rmap,1);CHKERRQ(ierr);
995   ierr = PetscMapSetBlockSize(A->cmap,1);CHKERRQ(ierr);
996   ierr = PetscMapSetUp(A->rmap);CHKERRQ(ierr);
997   ierr = PetscMapSetUp(A->cmap);CHKERRQ(ierr);
998 
999   A->assembled     = PETSC_TRUE;
1000   A->preallocated  = PETSC_FALSE;
1001   ierr = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr);
1002 
1003   ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatBlockMatSetPreallocation_C",
1004                                      "MatBlockMatSetPreallocation_BlockMat",
1005                                       MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr);
1006 
1007   PetscFunctionReturn(0);
1008 }
1009 EXTERN_C_END
1010 
1011 #undef __FUNCT__
1012 #define __FUNCT__ "MatCreateBlockMat"
1013 /*@C
1014    MatCreateBlockMat - Creates a new matrix based sparse Mat storage
1015 
1016   Collective on MPI_Comm
1017 
1018    Input Parameters:
1019 +  comm - MPI communicator
1020 .  m - number of rows
1021 .  n  - number of columns
1022 .  bs - size of each submatrix
1023 .  nz  - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known)
1024 -  nnz - expected number of nonzers per block row if known (use PETSC_NULL otherwise)
1025 
1026 
1027    Output Parameter:
1028 .  A - the matrix
1029 
1030    Level: intermediate
1031 
1032    PETSc requires that matrices and vectors being used for certain
1033    operations are partitioned accordingly.  For example, when
1034    creating a bmat matrix, A, that supports parallel matrix-vector
1035    products using MatMult(A,x,y) the user should set the number
1036    of local matrix rows to be the number of local elements of the
1037    corresponding result vector, y. Note that this is information is
1038    required for use of the matrix interface routines, even though
1039    the bmat matrix may not actually be physically partitioned.
1040    For example,
1041 
1042 .keywords: matrix, bmat, create
1043 
1044 .seealso: MATBLOCKMAT
1045 @*/
1046 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A)
1047 {
1048   PetscErrorCode ierr;
1049 
1050   PetscFunctionBegin;
1051   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1052   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1053   ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr);
1054   ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr);
1055   PetscFunctionReturn(0);
1056 }
1057 
1058 
1059 
1060