xref: /petsc/src/mat/impls/blockmat/seq/blockmat.c (revision 84df9cb40eca90ea9b18a456fab7a4ecc7f6c1a4)
1 
2 /*
3    This provides a matrix that consists of Mats
4 */
5 
6 #include <private/matimpl.h>              /*I "petscmat.h" I*/
7 #include <../src/mat/impls/baij/seq/baij.h>    /* use the common AIJ data-structure */
8 #include <petscksp.h>
9 
10 typedef struct {
11   SEQAIJHEADER(Mat);
12   SEQBAIJHEADER;
13   Mat               *diags;
14 
15   Vec               left,right,middle,workb;   /* dummy vectors to perform local parts of product */
16 } Mat_BlockMat;
17 
18 extern PetscErrorCode  MatBlockMatSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt*);
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "MatSOR_BlockMat_Symmetric"
22 PetscErrorCode MatSOR_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
23 {
24   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
25   PetscScalar        *x;
26   const Mat          *v = a->a;
27   const PetscScalar  *b;
28   PetscErrorCode     ierr;
29   PetscInt           n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
30   const PetscInt     *idx;
31   IS                 row,col;
32   MatFactorInfo      info;
33   Vec                left = a->left,right = a->right, middle = a->middle;
34   Mat                *diag;
35 
36   PetscFunctionBegin;
37   its = its*lits;
38   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
39   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
40   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
41   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
42   if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP))
43     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep");
44 
45   if (!a->diags) {
46     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
47     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
48     for (i=0; i<mbs; i++) {
49       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
50       ierr = MatCholeskyFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,&info);CHKERRQ(ierr);
51       ierr = MatCholeskyFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
52       ierr = ISDestroy(&row);CHKERRQ(ierr);
53       ierr = ISDestroy(&col);CHKERRQ(ierr);
54     }
55     ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr);
56   }
57   diag    = a->diags;
58 
59   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
60   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
61   /* copy right hand side because it must be modified during iteration */
62   ierr = VecCopy(bb,a->workb);CHKERRQ(ierr);
63   ierr = VecGetArrayRead(a->workb,&b);CHKERRQ(ierr);
64 
65   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
66   while (its--) {
67     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
68 
69       for (i=0; i<mbs; i++) {
70         n    = a->i[i+1] - a->i[i] - 1;
71         idx  = a->j + a->i[i] + 1;
72         v    = a->a + a->i[i] + 1;
73 
74         ierr = VecSet(left,0.0);CHKERRQ(ierr);
75         for (j=0; j<n; j++) {
76           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
77           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
78           ierr = VecResetArray(right);CHKERRQ(ierr);
79         }
80         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
81         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
82         ierr = VecResetArray(right);CHKERRQ(ierr);
83 
84         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
85         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
86 
87         /* now adjust right hand side, see MatSOR_SeqSBAIJ */
88         for (j=0; j<n; j++) {
89           ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr);
90           ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr);
91           ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr);
92           ierr = VecResetArray(middle);CHKERRQ(ierr);
93         }
94         ierr = VecResetArray(right);CHKERRQ(ierr);
95 
96       }
97     }
98     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
99 
100       for (i=mbs-1; i>=0; i--) {
101         n    = a->i[i+1] - a->i[i] - 1;
102         idx  = a->j + a->i[i] + 1;
103         v    = a->a + a->i[i] + 1;
104 
105         ierr = VecSet(left,0.0);CHKERRQ(ierr);
106         for (j=0; j<n; j++) {
107           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
108           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
109           ierr = VecResetArray(right);CHKERRQ(ierr);
110         }
111         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
112         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
113         ierr = VecResetArray(right);CHKERRQ(ierr);
114 
115         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
116         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
117         ierr = VecResetArray(right);CHKERRQ(ierr);
118 
119       }
120     }
121   }
122   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
123   ierr = VecRestoreArrayRead(a->workb,&b);CHKERRQ(ierr);
124   PetscFunctionReturn(0);
125 }
126 
127 #undef __FUNCT__
128 #define __FUNCT__ "MatSOR_BlockMat"
129 PetscErrorCode MatSOR_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
130 {
131   Mat_BlockMat       *a = (Mat_BlockMat*)A->data;
132   PetscScalar        *x;
133   const Mat          *v = a->a;
134   const PetscScalar  *b;
135   PetscErrorCode     ierr;
136   PetscInt           n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
137   const PetscInt     *idx;
138   IS                 row,col;
139   MatFactorInfo      info;
140   Vec                left = a->left,right = a->right;
141   Mat                *diag;
142 
143   PetscFunctionBegin;
144   its = its*lits;
145   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
146   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
147   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
148   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
149 
150   if (!a->diags) {
151     ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr);
152     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
153     for (i=0; i<mbs; i++) {
154       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
155       ierr = MatLUFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,col,&info);CHKERRQ(ierr);
156       ierr = MatLUFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
157       ierr = ISDestroy(&row);CHKERRQ(ierr);
158       ierr = ISDestroy(&col);CHKERRQ(ierr);
159     }
160   }
161   diag = a->diags;
162 
163   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
164   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
165   ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
166 
167   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
168   while (its--) {
169     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){
170 
171       for (i=0; i<mbs; i++) {
172         n    = a->i[i+1] - a->i[i];
173         idx  = a->j + a->i[i];
174         v    = a->a + a->i[i];
175 
176         ierr = VecSet(left,0.0);CHKERRQ(ierr);
177         for (j=0; j<n; j++) {
178           if (idx[j] != i) {
179             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
180             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
181             ierr = VecResetArray(right);CHKERRQ(ierr);
182           }
183         }
184         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
185         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
186         ierr = VecResetArray(right);CHKERRQ(ierr);
187 
188         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
189         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
190         ierr = VecResetArray(right);CHKERRQ(ierr);
191       }
192     }
193     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){
194 
195       for (i=mbs-1; i>=0; i--) {
196         n    = a->i[i+1] - a->i[i];
197         idx  = a->j + a->i[i];
198         v    = a->a + a->i[i];
199 
200         ierr = VecSet(left,0.0);CHKERRQ(ierr);
201         for (j=0; j<n; j++) {
202           if (idx[j] != i) {
203             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
204             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
205             ierr = VecResetArray(right);CHKERRQ(ierr);
206           }
207         }
208         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
209         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
210         ierr = VecResetArray(right);CHKERRQ(ierr);
211 
212         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
213         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
214         ierr = VecResetArray(right);CHKERRQ(ierr);
215 
216       }
217     }
218   }
219   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
220   ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
221   PetscFunctionReturn(0);
222 }
223 
224 #undef __FUNCT__
225 #define __FUNCT__ "MatSetValues_BlockMat"
226 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is)
227 {
228   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
229   PetscInt       *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1;
230   PetscInt       *imax=a->imax,*ai=a->i,*ailen=a->ilen;
231   PetscInt       *aj=a->j,nonew=a->nonew,bs=A->rmap->bs,brow,bcol;
232   PetscErrorCode ierr;
233   PetscInt       ridx,cidx;
234   PetscBool      roworiented=a->roworiented;
235   MatScalar      value;
236   Mat            *ap,*aa = a->a;
237 
238   PetscFunctionBegin;
239   if (v) PetscValidScalarPointer(v,6);
240   for (k=0; k<m; k++) { /* loop over added rows */
241     row  = im[k];
242     brow = row/bs;
243     if (row < 0) continue;
244 #if defined(PETSC_USE_DEBUG)
245     if (row >= A->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap->N-1);
246 #endif
247     rp   = aj + ai[brow];
248     ap   = aa + ai[brow];
249     rmax = imax[brow];
250     nrow = ailen[brow];
251     low  = 0;
252     high = nrow;
253     for (l=0; l<n; l++) { /* loop over added columns */
254       if (in[l] < 0) continue;
255 #if defined(PETSC_USE_DEBUG)
256       if (in[l] >= A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap->n-1);
257 #endif
258       col = in[l]; bcol = col/bs;
259       if (A->symmetric && brow > bcol) continue;
260       ridx = row % bs; cidx = col % bs;
261       if (roworiented) {
262         value = v[l + k*n];
263       } else {
264         value = v[k + l*m];
265       }
266       if (col <= lastcol) low = 0; else high = nrow;
267       lastcol = col;
268       while (high-low > 7) {
269         t = (low+high)/2;
270         if (rp[t] > bcol) high = t;
271         else              low  = t;
272       }
273       for (i=low; i<high; i++) {
274         if (rp[i] > bcol) break;
275         if (rp[i] == bcol) {
276           goto noinsert1;
277         }
278       }
279       if (nonew == 1) goto noinsert1;
280       if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col);
281       MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat);
282       N = nrow++ - 1; high++;
283       /* shift up all the later entries in this row */
284       for (ii=N; ii>=i; ii--) {
285         rp[ii+1] = rp[ii];
286         ap[ii+1] = ap[ii];
287       }
288       if (N>=i) ap[i] = 0;
289       rp[i]           = bcol;
290       a->nz++;
291       noinsert1:;
292       if (!*(ap+i)) {
293         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr);
294       }
295       ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr);
296       low = i;
297     }
298     ailen[brow] = nrow;
299   }
300   A->same_nonzero = PETSC_FALSE;
301   PetscFunctionReturn(0);
302 }
303 
304 #undef __FUNCT__
305 #define __FUNCT__ "MatLoad_BlockMat"
306 PetscErrorCode MatLoad_BlockMat(Mat newmat, PetscViewer viewer)
307 {
308   PetscErrorCode    ierr;
309   Mat               tmpA;
310   PetscInt          i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0;
311   const PetscInt    *cols;
312   const PetscScalar *values;
313   PetscBool         flg = PETSC_FALSE,notdone;
314   Mat_SeqAIJ        *a;
315   Mat_BlockMat      *amat;
316 
317   PetscFunctionBegin;
318   ierr = MatCreate(PETSC_COMM_SELF,&tmpA);CHKERRQ(ierr);
319   ierr = MatSetType(tmpA,MATSEQAIJ);CHKERRQ(ierr);
320   ierr = MatLoad_SeqAIJ(tmpA,viewer);CHKERRQ(ierr);
321 
322   ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr);
323   ierr = PetscOptionsBegin(PETSC_COMM_SELF,PETSC_NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr);
324   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr);
325   ierr = PetscOptionsBool("-matload_symmetric","Store the matrix as symmetric","MatLoad",flg,&flg,PETSC_NULL);CHKERRQ(ierr);
326   ierr = PetscOptionsEnd();CHKERRQ(ierr);
327 
328   /* Determine number of nonzero blocks for each block row */
329   a    = (Mat_SeqAIJ*) tmpA->data;
330   mbs  = m/bs;
331   ierr = PetscMalloc3(mbs,PetscInt,&lens,bs,PetscInt*,&ii,bs,PetscInt,&ilens);CHKERRQ(ierr);
332   ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr);
333 
334   for (i=0; i<mbs; i++) {
335     for (j=0; j<bs; j++) {
336       ii[j]         = a->j + a->i[i*bs + j];
337       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
338     }
339 
340     currentcol = -1;
341     notdone = PETSC_TRUE;
342     while (PETSC_TRUE) {
343       notdone = PETSC_FALSE;
344       nextcol = 1000000000;
345       for (j=0; j<bs; j++) {
346         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) {
347           ii[j]++;
348           ilens[j]--;
349         }
350         if (ilens[j] > 0) {
351           notdone = PETSC_TRUE;
352           nextcol = PetscMin(nextcol,ii[j][0]/bs);
353         }
354       }
355       if (!notdone) break;
356       if (!flg || (nextcol >= i)) lens[i]++;
357       currentcol = nextcol;
358     }
359   }
360 
361   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) {
362     ierr = MatSetSizes(newmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
363   }
364   ierr = MatBlockMatSetPreallocation(newmat,bs,0,lens);CHKERRQ(ierr);
365   if (flg) {
366     ierr = MatSetOption(newmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
367   }
368   amat = (Mat_BlockMat*)(newmat)->data;
369 
370   /* preallocate the submatrices */
371   ierr = PetscMalloc(bs*sizeof(PetscInt),&llens);CHKERRQ(ierr);
372   for (i=0; i<mbs; i++) { /* loops for block rows */
373     for (j=0; j<bs; j++) {
374       ii[j]         = a->j + a->i[i*bs + j];
375       ilens[j]      = a->i[i*bs + j + 1] - a->i[i*bs + j];
376     }
377 
378     currentcol = 1000000000;
379     for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */
380       if (ilens[j] > 0) {
381 	currentcol = PetscMin(currentcol,ii[j][0]/bs);
382       }
383     }
384 
385     notdone = PETSC_TRUE;
386     while (PETSC_TRUE) {  /* loops over blocks in block row */
387 
388       notdone = PETSC_FALSE;
389       nextcol = 1000000000;
390       ierr = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr);
391       for (j=0; j<bs; j++) { /* loop over rows in block */
392         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */
393           ii[j]++;
394           ilens[j]--;
395           llens[j]++;
396         }
397         if (ilens[j] > 0) {
398           notdone = PETSC_TRUE;
399           nextcol = PetscMin(nextcol,ii[j][0]/bs);
400         }
401       }
402       if (cnt >= amat->maxnz) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt);
403       if (!flg || currentcol >= i) {
404         amat->j[cnt] = currentcol;
405         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr);
406       }
407 
408       if (!notdone) break;
409       currentcol = nextcol;
410     }
411     amat->ilen[i] = lens[i];
412   }
413   CHKMEMQ;
414 
415   ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr);
416   ierr = PetscFree(llens);CHKERRQ(ierr);
417 
418   /* copy over the matrix, one row at a time */
419   for (i=0; i<m; i++) {
420     ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
421     ierr = MatSetValues(newmat,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr);
422     ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
423   }
424   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
425   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
426   PetscFunctionReturn(0);
427 }
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "MatView_BlockMat"
431 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer)
432 {
433   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
434   PetscErrorCode    ierr;
435   const char        *name;
436   PetscViewerFormat format;
437 
438   PetscFunctionBegin;
439   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
440   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
441   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) {
442     ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr);
443     if (A->symmetric) {
444       ierr = PetscViewerASCIIPrintf(viewer,"Only upper triangular part of symmetric matrix is stored\n");CHKERRQ(ierr);
445     }
446   }
447   PetscFunctionReturn(0);
448 }
449 
450 #undef __FUNCT__
451 #define __FUNCT__ "MatDestroy_BlockMat"
452 PetscErrorCode MatDestroy_BlockMat(Mat mat)
453 {
454   PetscErrorCode ierr;
455   Mat_BlockMat   *bmat = (Mat_BlockMat*)mat->data;
456   PetscInt       i;
457 
458   PetscFunctionBegin;
459   ierr = VecDestroy(&bmat->right);CHKERRQ(ierr);
460   ierr = VecDestroy(&bmat->left);CHKERRQ(ierr);
461   ierr = VecDestroy(&bmat->middle);CHKERRQ(ierr);
462   ierr = VecDestroy(&bmat->workb);CHKERRQ(ierr);
463   if (bmat->diags) {
464     for (i=0; i<mat->rmap->n/mat->rmap->bs; i++) {
465       ierr = MatDestroy(&bmat->diags[i]);CHKERRQ(ierr);
466     }
467   }
468   if (bmat->a) {
469     for (i=0; i<bmat->nz; i++) {
470       ierr = MatDestroy(&bmat->a[i]);CHKERRQ(ierr);
471     }
472   }
473   ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
474   ierr = PetscFree(mat->data);CHKERRQ(ierr);
475   PetscFunctionReturn(0);
476 }
477 
478 #undef __FUNCT__
479 #define __FUNCT__ "MatMult_BlockMat"
480 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y)
481 {
482   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
483   PetscErrorCode ierr;
484   PetscScalar    *xx,*yy;
485   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
486   Mat            *aa;
487 
488   PetscFunctionBegin;
489   CHKMEMQ;
490   /*
491      Standard CSR multiply except each entry is a Mat
492   */
493   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
494 
495   ierr = VecSet(y,0.0);CHKERRQ(ierr);
496   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
497   aj  = bmat->j;
498   aa  = bmat->a;
499   ii  = bmat->i;
500   for (i=0; i<m; i++) {
501     jrow = ii[i];
502     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
503     n    = ii[i+1] - jrow;
504     for (j=0; j<n; j++) {
505       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
506       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
507       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
508       jrow++;
509     }
510     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
511   }
512   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
513   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
514   CHKMEMQ;
515   PetscFunctionReturn(0);
516 }
517 
518 #undef __FUNCT__
519 #define __FUNCT__ "MatMult_BlockMat_Symmetric"
520 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y)
521 {
522   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
523   PetscErrorCode ierr;
524   PetscScalar    *xx,*yy;
525   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
526   Mat            *aa;
527 
528   PetscFunctionBegin;
529   CHKMEMQ;
530   /*
531      Standard CSR multiply except each entry is a Mat
532   */
533   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
534 
535   ierr = VecSet(y,0.0);CHKERRQ(ierr);
536   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
537   aj  = bmat->j;
538   aa  = bmat->a;
539   ii  = bmat->i;
540   for (i=0; i<m; i++) {
541     jrow = ii[i];
542     n    = ii[i+1] - jrow;
543     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
544     ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr);
545     /* if we ALWAYS required a diagonal entry then could remove this if test */
546     if (aj[jrow] == i) {
547       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
548       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
549       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
550       jrow++;
551       n--;
552     }
553     for (j=0; j<n; j++) {
554       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);            /* upper triangular part */
555       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
556       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
557 
558       ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr);            /* lower triangular part */
559       ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr);
560       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
561       jrow++;
562     }
563     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
564     ierr = VecResetArray(bmat->middle);CHKERRQ(ierr);
565   }
566   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
567   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
568   CHKMEMQ;
569   PetscFunctionReturn(0);
570 }
571 
572 #undef __FUNCT__
573 #define __FUNCT__ "MatMultAdd_BlockMat"
574 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
575 {
576   PetscFunctionBegin;
577   PetscFunctionReturn(0);
578 }
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "MatMultTranspose_BlockMat"
582 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y)
583 {
584   PetscFunctionBegin;
585   PetscFunctionReturn(0);
586 }
587 
588 #undef __FUNCT__
589 #define __FUNCT__ "MatMultTransposeAdd_BlockMat"
590 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
591 {
592   PetscFunctionBegin;
593   PetscFunctionReturn(0);
594 }
595 
596 /*
597      Adds diagonal pointers to sparse matrix structure.
598 */
599 #undef __FUNCT__
600 #define __FUNCT__ "MatMarkDiagonal_BlockMat"
601 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A)
602 {
603   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
604   PetscErrorCode ierr;
605   PetscInt       i,j,mbs = A->rmap->n/A->rmap->bs;
606 
607   PetscFunctionBegin;
608   if (!a->diag) {
609     ierr = PetscMalloc(mbs*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
610   }
611   for (i=0; i<mbs; i++) {
612     a->diag[i] = a->i[i+1];
613     for (j=a->i[i]; j<a->i[i+1]; j++) {
614       if (a->j[j] == i) {
615         a->diag[i] = j;
616         break;
617       }
618     }
619   }
620   PetscFunctionReturn(0);
621 }
622 
623 #undef __FUNCT__
624 #define __FUNCT__ "MatGetSubMatrix_BlockMat"
625 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *B)
626 {
627   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
628   Mat_SeqAIJ     *c;
629   PetscErrorCode ierr;
630   PetscInt       i,k,first,step,lensi,nrows,ncols;
631   PetscInt       *j_new,*i_new,*aj = a->j,*ailen = a->ilen;
632   PetscScalar    *a_new;
633   Mat            C,*aa = a->a;
634   PetscBool      stride,equal;
635 
636   PetscFunctionBegin;
637   ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr);
638   if (!equal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for idential column and row indices");
639   ierr = PetscTypeCompare((PetscObject)iscol,ISSTRIDE,&stride);CHKERRQ(ierr);
640   if (!stride) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for stride indices");
641   ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr);
642   if (step != A->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only select one entry from each block");
643 
644   ierr = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr);
645   ncols = nrows;
646 
647   /* create submatrix */
648   if (scall == MAT_REUSE_MATRIX) {
649     PetscInt n_cols,n_rows;
650     C = *B;
651     ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr);
652     if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size");
653     ierr = MatZeroEntries(C);CHKERRQ(ierr);
654   } else {
655     ierr = MatCreate(((PetscObject)A)->comm,&C);CHKERRQ(ierr);
656     ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
657     if (A->symmetric) {
658       ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr);
659     } else {
660       ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr);
661     }
662     ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr);
663     ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr);
664   }
665   c = (Mat_SeqAIJ*)C->data;
666 
667   /* loop over rows inserting into submatrix */
668   a_new    = c->a;
669   j_new    = c->j;
670   i_new    = c->i;
671 
672   for (i=0; i<nrows; i++) {
673     lensi = ailen[i];
674     for (k=0; k<lensi; k++) {
675       *j_new++ = *aj++;
676       ierr     = MatGetValue(*aa++,first,first,a_new++);CHKERRQ(ierr);
677     }
678     i_new[i+1]  = i_new[i] + lensi;
679     c->ilen[i]  = lensi;
680   }
681 
682   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
683   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
684   *B = C;
685   PetscFunctionReturn(0);
686 }
687 
688 #undef __FUNCT__
689 #define __FUNCT__ "MatAssemblyEnd_BlockMat"
690 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode)
691 {
692   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
693   PetscErrorCode ierr;
694   PetscInt       fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax;
695   PetscInt       m = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0;
696   Mat            *aa = a->a,*ap;
697 
698   PetscFunctionBegin;
699   if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0);
700 
701   if (m) rmax = ailen[0]; /* determine row with most nonzeros */
702   for (i=1; i<m; i++) {
703     /* move each row back by the amount of empty slots (fshift) before it*/
704     fshift += imax[i-1] - ailen[i-1];
705     rmax   = PetscMax(rmax,ailen[i]);
706     if (fshift) {
707       ip = aj + ai[i] ;
708       ap = aa + ai[i] ;
709       N  = ailen[i];
710       for (j=0; j<N; j++) {
711         ip[j-fshift] = ip[j];
712         ap[j-fshift] = ap[j];
713       }
714     }
715     ai[i] = ai[i-1] + ailen[i-1];
716   }
717   if (m) {
718     fshift += imax[m-1] - ailen[m-1];
719     ai[m]  = ai[m-1] + ailen[m-1];
720   }
721   /* reset ilen and imax for each row */
722   for (i=0; i<m; i++) {
723     ailen[i] = imax[i] = ai[i+1] - ai[i];
724   }
725   a->nz = ai[m];
726   for (i=0; i<a->nz; i++) {
727 #if defined(PETSC_USE_DEBUG)
728     if (!aa[i]) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz);
729 #endif
730     ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
731     ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
732   }
733   CHKMEMQ;
734   ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap->n/A->cmap->bs,fshift,a->nz);CHKERRQ(ierr);
735   ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr);
736   ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr);
737   A->info.mallocs     += a->reallocs;
738   a->reallocs          = 0;
739   A->info.nz_unneeded  = (double)fshift;
740   a->rmax              = rmax;
741 
742   A->same_nonzero = PETSC_TRUE;
743   ierr = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr);
744   PetscFunctionReturn(0);
745 }
746 
747 #undef __FUNCT__
748 #define __FUNCT__ "MatSetOption_BlockMat"
749 PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt,PetscBool  flg)
750 {
751   PetscFunctionBegin;
752   if (opt == MAT_SYMMETRIC && flg) {
753     A->ops->sor = MatSOR_BlockMat_Symmetric;
754     A->ops->mult  = MatMult_BlockMat_Symmetric;
755   } else {
756     PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]);
757   }
758   PetscFunctionReturn(0);
759 }
760 
761 
762 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat,
763        0,
764        0,
765        MatMult_BlockMat,
766 /* 4*/ MatMultAdd_BlockMat,
767        MatMultTranspose_BlockMat,
768        MatMultTransposeAdd_BlockMat,
769        0,
770        0,
771        0,
772 /*10*/ 0,
773        0,
774        0,
775        MatSOR_BlockMat,
776        0,
777 /*15*/ 0,
778        0,
779        0,
780        0,
781        0,
782 /*20*/ 0,
783        MatAssemblyEnd_BlockMat,
784        MatSetOption_BlockMat,
785        0,
786 /*24*/ 0,
787        0,
788        0,
789        0,
790        0,
791 /*29*/ 0,
792        0,
793        0,
794        0,
795        0,
796 /*34*/ 0,
797        0,
798        0,
799        0,
800        0,
801 /*39*/ 0,
802        0,
803        0,
804        0,
805        0,
806 /*44*/ 0,
807        0,
808        0,
809        0,
810        0,
811 /*49*/ 0,
812        0,
813        0,
814        0,
815        0,
816 /*54*/ 0,
817        0,
818        0,
819        0,
820        0,
821 /*59*/ MatGetSubMatrix_BlockMat,
822        MatDestroy_BlockMat,
823        MatView_BlockMat,
824        0,
825        0,
826 /*64*/ 0,
827        0,
828        0,
829        0,
830        0,
831 /*69*/ 0,
832        0,
833        0,
834        0,
835        0,
836 /*74*/ 0,
837        0,
838        0,
839        0,
840        0,
841 /*79*/ 0,
842        0,
843        0,
844        0,
845        MatLoad_BlockMat,
846 /*84*/ 0,
847        0,
848        0,
849        0,
850        0,
851 /*89*/ 0,
852        0,
853        0,
854        0,
855        0,
856 /*94*/ 0,
857        0,
858        0,
859        0,
860        0,
861 /*99*/ 0,
862        0,
863        0,
864        0,
865        0,
866 /*104*/0,
867        0,
868        0,
869        0,
870        0,
871 /*109*/0,
872        0,
873        0,
874        0,
875        0,
876 /*114*/0,
877        0,
878        0,
879        0,
880        0,
881 /*119*/0,
882        0,
883        0,
884        0
885 };
886 
887 #undef __FUNCT__
888 #define __FUNCT__ "MatBlockMatSetPreallocation"
889 /*@C
890    MatBlockMatSetPreallocation - For good matrix assembly performance
891    the user should preallocate the matrix storage by setting the parameter nz
892    (or the array nnz).  By setting these parameters accurately, performance
893    during matrix assembly can be increased by more than a factor of 50.
894 
895    Collective on MPI_Comm
896 
897    Input Parameters:
898 +  B - The matrix
899 .  bs - size of each block in matrix
900 .  nz - number of nonzeros per block row (same for all rows)
901 -  nnz - array containing the number of nonzeros in the various block rows
902          (possibly different for each row) or PETSC_NULL
903 
904    Notes:
905      If nnz is given then nz is ignored
906 
907    Specify the preallocated storage with either nz or nnz (not both).
908    Set nz=PETSC_DEFAULT and nnz=PETSC_NULL for PETSc to control dynamic memory
909    allocation.  For large problems you MUST preallocate memory or you
910    will get TERRIBLE performance, see the users' manual chapter on matrices.
911 
912    Level: intermediate
913 
914 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues()
915 
916 @*/
917 PetscErrorCode  MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[])
918 {
919   PetscErrorCode ierr;
920 
921   PetscFunctionBegin;
922   ierr = PetscTryMethod(B,"MatBlockMatSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[]),(B,bs,nz,nnz));CHKERRQ(ierr);
923   PetscFunctionReturn(0);
924 }
925 
926 EXTERN_C_BEGIN
927 #undef __FUNCT__
928 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat"
929 PetscErrorCode  MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz)
930 {
931   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
932   PetscErrorCode ierr;
933   PetscInt       i;
934 
935   PetscFunctionBegin;
936   ierr = PetscLayoutSetBlockSize(A->rmap,1);CHKERRQ(ierr);
937   ierr = PetscLayoutSetBlockSize(A->cmap,1);CHKERRQ(ierr);
938   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
939   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
940 
941   if (bs < 1) SETERRQ1(((PetscObject)A)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Block size given %D must be great than zero",bs);
942   if (A->rmap->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of rows %D",bs,A->rmap->n);
943   if (A->cmap->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of columns %D",bs,A->cmap->n);
944   if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5;
945   if (nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz);
946   if (nnz) {
947     for (i=0; i<A->rmap->n/bs; i++) {
948       if (nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]);
949       if (nnz[i] > A->cmap->n/bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap->n/bs);
950     }
951   }
952   A->rmap->bs = A->cmap->bs = bs;
953   bmat->mbs  = A->rmap->n/bs;
954 
955   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->right);CHKERRQ(ierr);
956   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->middle);CHKERRQ(ierr);
957   ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr);
958 
959   if (!bmat->imax) {
960     ierr = PetscMalloc2(A->rmap->n,PetscInt,&bmat->imax,A->rmap->n,PetscInt,&bmat->ilen);CHKERRQ(ierr);
961     ierr = PetscLogObjectMemory(A,2*A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
962   }
963   if (nnz) {
964     nz = 0;
965     for (i=0; i<A->rmap->n/A->rmap->bs; i++) {
966       bmat->imax[i] = nnz[i];
967       nz           += nnz[i];
968     }
969   } else {
970     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently requires block row by row preallocation");
971   }
972 
973   /* bmat->ilen will count nonzeros in each row so far. */
974   for (i=0; i<bmat->mbs; i++) { bmat->ilen[i] = 0;}
975 
976   /* allocate the matrix space */
977   ierr = MatSeqXAIJFreeAIJ(A,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
978   ierr = PetscMalloc3(nz,Mat,&bmat->a,nz,PetscInt,&bmat->j,A->rmap->n+1,PetscInt,&bmat->i);CHKERRQ(ierr);
979   ierr = PetscLogObjectMemory(A,(A->rmap->n+1)*sizeof(PetscInt)+nz*(sizeof(PetscScalar)+sizeof(PetscInt)));CHKERRQ(ierr);
980   bmat->i[0] = 0;
981   for (i=1; i<bmat->mbs+1; i++) {
982     bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1];
983   }
984   bmat->singlemalloc = PETSC_TRUE;
985   bmat->free_a       = PETSC_TRUE;
986   bmat->free_ij      = PETSC_TRUE;
987 
988   bmat->nz                = 0;
989   bmat->maxnz             = nz;
990   A->info.nz_unneeded  = (double)bmat->maxnz;
991 
992   PetscFunctionReturn(0);
993 }
994 EXTERN_C_END
995 
996 /*MC
997    MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix
998                  consisting of (usually) sparse blocks.
999 
1000   Level: advanced
1001 
1002 .seealso: MatCreateBlockMat()
1003 
1004 M*/
1005 
1006 EXTERN_C_BEGIN
1007 #undef __FUNCT__
1008 #define __FUNCT__ "MatCreate_BlockMat"
1009 PetscErrorCode  MatCreate_BlockMat(Mat A)
1010 {
1011   Mat_BlockMat   *b;
1012   PetscErrorCode ierr;
1013 
1014   PetscFunctionBegin;
1015   ierr = PetscNewLog(A,Mat_BlockMat,&b);CHKERRQ(ierr);
1016   A->data = (void*)b;
1017   ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1018 
1019   A->assembled     = PETSC_TRUE;
1020   A->preallocated  = PETSC_FALSE;
1021   ierr = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr);
1022 
1023   ierr = PetscObjectComposeFunctionDynamic((PetscObject)A,"MatBlockMatSetPreallocation_C",
1024                                      "MatBlockMatSetPreallocation_BlockMat",
1025                                       MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr);
1026 
1027   PetscFunctionReturn(0);
1028 }
1029 EXTERN_C_END
1030 
1031 #undef __FUNCT__
1032 #define __FUNCT__ "MatCreateBlockMat"
1033 /*@C
1034    MatCreateBlockMat - Creates a new matrix based sparse Mat storage
1035 
1036   Collective on MPI_Comm
1037 
1038    Input Parameters:
1039 +  comm - MPI communicator
1040 .  m - number of rows
1041 .  n  - number of columns
1042 .  bs - size of each submatrix
1043 .  nz  - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known)
1044 -  nnz - expected number of nonzers per block row if known (use PETSC_NULL otherwise)
1045 
1046 
1047    Output Parameter:
1048 .  A - the matrix
1049 
1050    Level: intermediate
1051 
1052    PETSc requires that matrices and vectors being used for certain
1053    operations are partitioned accordingly.  For example, when
1054    creating a bmat matrix, A, that supports parallel matrix-vector
1055    products using MatMult(A,x,y) the user should set the number
1056    of local matrix rows to be the number of local elements of the
1057    corresponding result vector, y. Note that this is information is
1058    required for use of the matrix interface routines, even though
1059    the bmat matrix may not actually be physically partitioned.
1060    For example,
1061 
1062 .keywords: matrix, bmat, create
1063 
1064 .seealso: MATBLOCKMAT
1065 @*/
1066 PetscErrorCode  MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A)
1067 {
1068   PetscErrorCode ierr;
1069 
1070   PetscFunctionBegin;
1071   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1072   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1073   ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr);
1074   ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr);
1075   PetscFunctionReturn(0);
1076 }
1077 
1078 
1079 
1080