xref: /petsc/src/mat/impls/blockmat/seq/blockmat.c (revision 785e854f82a3c614b452fca2cf5ad4f2afe8bdde)
1 
2 /*
3    This provides a matrix that consists of Mats
4 */
5 
6 #include <petsc-private/matimpl.h>              /*I "petscmat.h" I*/
7 #include <../src/mat/impls/baij/seq/baij.h>    /* use the common AIJ data-structure */
8 
9 typedef struct {
10   SEQAIJHEADER(Mat);
11   SEQBAIJHEADER;
12   Mat *diags;
13 
14   Vec left,right,middle,workb;                 /* dummy vectors to perform local parts of product */
15 } Mat_BlockMat;
16 
17 extern PetscErrorCode  MatBlockMatSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt*);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatSOR_BlockMat_Symmetric"
21 PetscErrorCode MatSOR_BlockMat_Symmetric(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
22 {
23   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
24   PetscScalar       *x;
25   const Mat         *v;
26   const PetscScalar *b;
27   PetscErrorCode    ierr;
28   PetscInt          n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
29   const PetscInt    *idx;
30   IS                row,col;
31   MatFactorInfo     info;
32   Vec               left = a->left,right = a->right, middle = a->middle;
33   Mat               *diag;
34 
35   PetscFunctionBegin;
36   its = its*lits;
37   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
38   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
39   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
40   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
41   if ((flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) && !(flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP)) {
42     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot do backward sweep without forward sweep");
43   }
44 
45   if (!a->diags) {
46     ierr = PetscMalloc1(mbs,&a->diags);CHKERRQ(ierr);
47     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
48     for (i=0; i<mbs; i++) {
49       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
50       ierr = MatCholeskyFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,&info);CHKERRQ(ierr);
51       ierr = MatCholeskyFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
52       ierr = ISDestroy(&row);CHKERRQ(ierr);
53       ierr = ISDestroy(&col);CHKERRQ(ierr);
54     }
55     ierr = VecDuplicate(bb,&a->workb);CHKERRQ(ierr);
56   }
57   diag = a->diags;
58 
59   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
60   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
61   /* copy right hand side because it must be modified during iteration */
62   ierr = VecCopy(bb,a->workb);CHKERRQ(ierr);
63   ierr = VecGetArrayRead(a->workb,&b);CHKERRQ(ierr);
64 
65   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
66   while (its--) {
67     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) {
68 
69       for (i=0; i<mbs; i++) {
70         n   = a->i[i+1] - a->i[i] - 1;
71         idx = a->j + a->i[i] + 1;
72         v   = a->a + a->i[i] + 1;
73 
74         ierr = VecSet(left,0.0);CHKERRQ(ierr);
75         for (j=0; j<n; j++) {
76           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
77           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
78           ierr = VecResetArray(right);CHKERRQ(ierr);
79         }
80         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
81         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
82         ierr = VecResetArray(right);CHKERRQ(ierr);
83 
84         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
85         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
86 
87         /* now adjust right hand side, see MatSOR_SeqSBAIJ */
88         for (j=0; j<n; j++) {
89           ierr = MatMultTranspose(v[j],right,left);CHKERRQ(ierr);
90           ierr = VecPlaceArray(middle,b + idx[j]*bs);CHKERRQ(ierr);
91           ierr = VecAXPY(middle,-1.0,left);CHKERRQ(ierr);
92           ierr = VecResetArray(middle);CHKERRQ(ierr);
93         }
94         ierr = VecResetArray(right);CHKERRQ(ierr);
95 
96       }
97     }
98     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) {
99 
100       for (i=mbs-1; i>=0; i--) {
101         n   = a->i[i+1] - a->i[i] - 1;
102         idx = a->j + a->i[i] + 1;
103         v   = a->a + a->i[i] + 1;
104 
105         ierr = VecSet(left,0.0);CHKERRQ(ierr);
106         for (j=0; j<n; j++) {
107           ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
108           ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
109           ierr = VecResetArray(right);CHKERRQ(ierr);
110         }
111         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
112         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
113         ierr = VecResetArray(right);CHKERRQ(ierr);
114 
115         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
116         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
117         ierr = VecResetArray(right);CHKERRQ(ierr);
118 
119       }
120     }
121   }
122   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
123   ierr = VecRestoreArrayRead(a->workb,&b);CHKERRQ(ierr);
124   PetscFunctionReturn(0);
125 }
126 
127 #undef __FUNCT__
128 #define __FUNCT__ "MatSOR_BlockMat"
129 PetscErrorCode MatSOR_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
130 {
131   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
132   PetscScalar       *x;
133   const Mat         *v;
134   const PetscScalar *b;
135   PetscErrorCode    ierr;
136   PetscInt          n = A->cmap->n,i,mbs = n/A->rmap->bs,j,bs = A->rmap->bs;
137   const PetscInt    *idx;
138   IS                row,col;
139   MatFactorInfo     info;
140   Vec               left = a->left,right = a->right;
141   Mat               *diag;
142 
143   PetscFunctionBegin;
144   its = its*lits;
145   if (its <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
146   if (flag & SOR_EISENSTAT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for Eisenstat");
147   if (omega != 1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for omega not equal to 1.0");
148   if (fshift) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet for fshift");
149 
150   if (!a->diags) {
151     ierr = PetscMalloc1(mbs,&a->diags);CHKERRQ(ierr);
152     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
153     for (i=0; i<mbs; i++) {
154       ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERINGND,&row,&col);CHKERRQ(ierr);
155       ierr = MatLUFactorSymbolic(a->diags[i],a->a[a->diag[i]],row,col,&info);CHKERRQ(ierr);
156       ierr = MatLUFactorNumeric(a->diags[i],a->a[a->diag[i]],&info);CHKERRQ(ierr);
157       ierr = ISDestroy(&row);CHKERRQ(ierr);
158       ierr = ISDestroy(&col);CHKERRQ(ierr);
159     }
160   }
161   diag = a->diags;
162 
163   ierr = VecSet(xx,0.0);CHKERRQ(ierr);
164   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
165   ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
166 
167   /* need to add code for when initial guess is zero, see MatSOR_SeqAIJ */
168   while (its--) {
169     if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) {
170 
171       for (i=0; i<mbs; i++) {
172         n   = a->i[i+1] - a->i[i];
173         idx = a->j + a->i[i];
174         v   = a->a + a->i[i];
175 
176         ierr = VecSet(left,0.0);CHKERRQ(ierr);
177         for (j=0; j<n; j++) {
178           if (idx[j] != i) {
179             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
180             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
181             ierr = VecResetArray(right);CHKERRQ(ierr);
182           }
183         }
184         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
185         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
186         ierr = VecResetArray(right);CHKERRQ(ierr);
187 
188         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
189         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
190         ierr = VecResetArray(right);CHKERRQ(ierr);
191       }
192     }
193     if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) {
194 
195       for (i=mbs-1; i>=0; i--) {
196         n   = a->i[i+1] - a->i[i];
197         idx = a->j + a->i[i];
198         v   = a->a + a->i[i];
199 
200         ierr = VecSet(left,0.0);CHKERRQ(ierr);
201         for (j=0; j<n; j++) {
202           if (idx[j] != i) {
203             ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr);
204             ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr);
205             ierr = VecResetArray(right);CHKERRQ(ierr);
206           }
207         }
208         ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr);
209         ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr);
210         ierr = VecResetArray(right);CHKERRQ(ierr);
211 
212         ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr);
213         ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr);
214         ierr = VecResetArray(right);CHKERRQ(ierr);
215 
216       }
217     }
218   }
219   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
220   ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
221   PetscFunctionReturn(0);
222 }
223 
224 #undef __FUNCT__
225 #define __FUNCT__ "MatSetValues_BlockMat"
226 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is)
227 {
228   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
229   PetscInt       *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1;
230   PetscInt       *imax=a->imax,*ai=a->i,*ailen=a->ilen;
231   PetscInt       *aj  =a->j,nonew=a->nonew,bs=A->rmap->bs,brow,bcol;
232   PetscErrorCode ierr;
233   PetscInt       ridx,cidx;
234   PetscBool      roworiented=a->roworiented;
235   MatScalar      value;
236   Mat            *ap,*aa = a->a;
237 
238   PetscFunctionBegin;
239   if (v) PetscValidScalarPointer(v,6);
240   for (k=0; k<m; k++) { /* loop over added rows */
241     row  = im[k];
242     brow = row/bs;
243     if (row < 0) continue;
244 #if defined(PETSC_USE_DEBUG)
245     if (row >= A->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap->N-1);
246 #endif
247     rp   = aj + ai[brow];
248     ap   = aa + ai[brow];
249     rmax = imax[brow];
250     nrow = ailen[brow];
251     low  = 0;
252     high = nrow;
253     for (l=0; l<n; l++) { /* loop over added columns */
254       if (in[l] < 0) continue;
255 #if defined(PETSC_USE_DEBUG)
256       if (in[l] >= A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap->n-1);
257 #endif
258       col = in[l]; bcol = col/bs;
259       if (A->symmetric && brow > bcol) continue;
260       ridx = row % bs; cidx = col % bs;
261       if (roworiented) value = v[l + k*n];
262       else value = v[k + l*m];
263 
264       if (col <= lastcol) low = 0;
265       else high = nrow;
266       lastcol = col;
267       while (high-low > 7) {
268         t = (low+high)/2;
269         if (rp[t] > bcol) high = t;
270         else              low  = t;
271       }
272       for (i=low; i<high; i++) {
273         if (rp[i] > bcol) break;
274         if (rp[i] == bcol) goto noinsert1;
275       }
276       if (nonew == 1) goto noinsert1;
277       if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col);
278       MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat);
279       N = nrow++ - 1; high++;
280       /* shift up all the later entries in this row */
281       for (ii=N; ii>=i; ii--) {
282         rp[ii+1] = rp[ii];
283         ap[ii+1] = ap[ii];
284       }
285       if (N>=i) ap[i] = 0;
286       rp[i] = bcol;
287       a->nz++;
288 noinsert1:;
289       if (!*(ap+i)) {
290         ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr);
291       }
292       ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr);
293       low  = i;
294     }
295     ailen[brow] = nrow;
296   }
297   A->same_nonzero = PETSC_FALSE;
298   PetscFunctionReturn(0);
299 }
300 
301 #undef __FUNCT__
302 #define __FUNCT__ "MatLoad_BlockMat"
303 PetscErrorCode MatLoad_BlockMat(Mat newmat, PetscViewer viewer)
304 {
305   PetscErrorCode    ierr;
306   Mat               tmpA;
307   PetscInt          i,j,m,n,bs = 1,ncols,*lens,currentcol,mbs,**ii,*ilens,nextcol,*llens,cnt = 0;
308   const PetscInt    *cols;
309   const PetscScalar *values;
310   PetscBool         flg = PETSC_FALSE,notdone;
311   Mat_SeqAIJ        *a;
312   Mat_BlockMat      *amat;
313 
314   PetscFunctionBegin;
315   ierr = MatCreate(PETSC_COMM_SELF,&tmpA);CHKERRQ(ierr);
316   ierr = MatSetType(tmpA,MATSEQAIJ);CHKERRQ(ierr);
317   ierr = MatLoad_SeqAIJ(tmpA,viewer);CHKERRQ(ierr);
318 
319   ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr);
320   ierr = PetscOptionsBegin(PETSC_COMM_SELF,NULL,"Options for loading BlockMat matrix 1","Mat");CHKERRQ(ierr);
321   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
322   ierr = PetscOptionsBool("-matload_symmetric","Store the matrix as symmetric","MatLoad",flg,&flg,NULL);CHKERRQ(ierr);
323   ierr = PetscOptionsEnd();CHKERRQ(ierr);
324 
325   /* Determine number of nonzero blocks for each block row */
326   a    = (Mat_SeqAIJ*) tmpA->data;
327   mbs  = m/bs;
328   ierr = PetscMalloc3(mbs,&lens,bs,&ii,bs,&ilens);CHKERRQ(ierr);
329   ierr = PetscMemzero(lens,mbs*sizeof(PetscInt));CHKERRQ(ierr);
330 
331   for (i=0; i<mbs; i++) {
332     for (j=0; j<bs; j++) {
333       ii[j]    = a->j + a->i[i*bs + j];
334       ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j];
335     }
336 
337     currentcol = -1;
338     notdone    = PETSC_TRUE;
339     while (PETSC_TRUE) {
340       notdone = PETSC_FALSE;
341       nextcol = 1000000000;
342       for (j=0; j<bs; j++) {
343         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) {
344           ii[j]++;
345           ilens[j]--;
346         }
347         if (ilens[j] > 0) {
348           notdone = PETSC_TRUE;
349           nextcol = PetscMin(nextcol,ii[j][0]/bs);
350         }
351       }
352       if (!notdone) break;
353       if (!flg || (nextcol >= i)) lens[i]++;
354       currentcol = nextcol;
355     }
356   }
357 
358   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) {
359     ierr = MatSetSizes(newmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
360   }
361   ierr = MatBlockMatSetPreallocation(newmat,bs,0,lens);CHKERRQ(ierr);
362   if (flg) {
363     ierr = MatSetOption(newmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
364   }
365   amat = (Mat_BlockMat*)(newmat)->data;
366 
367   /* preallocate the submatrices */
368   ierr = PetscMalloc1(bs,&llens);CHKERRQ(ierr);
369   for (i=0; i<mbs; i++) { /* loops for block rows */
370     for (j=0; j<bs; j++) {
371       ii[j]    = a->j + a->i[i*bs + j];
372       ilens[j] = a->i[i*bs + j + 1] - a->i[i*bs + j];
373     }
374 
375     currentcol = 1000000000;
376     for (j=0; j<bs; j++) { /* loop over rows in block finding first nonzero block */
377       if (ilens[j] > 0) {
378         currentcol = PetscMin(currentcol,ii[j][0]/bs);
379       }
380     }
381 
382     notdone = PETSC_TRUE;
383     while (PETSC_TRUE) {  /* loops over blocks in block row */
384 
385       notdone = PETSC_FALSE;
386       nextcol = 1000000000;
387       ierr    = PetscMemzero(llens,bs*sizeof(PetscInt));CHKERRQ(ierr);
388       for (j=0; j<bs; j++) { /* loop over rows in block */
389         while ((ilens[j] > 0 && ii[j][0]/bs <= currentcol)) { /* loop over columns in row */
390           ii[j]++;
391           ilens[j]--;
392           llens[j]++;
393         }
394         if (ilens[j] > 0) {
395           notdone = PETSC_TRUE;
396           nextcol = PetscMin(nextcol,ii[j][0]/bs);
397         }
398       }
399       if (cnt >= amat->maxnz) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of blocks found greater than expected %D",cnt);
400       if (!flg || currentcol >= i) {
401         amat->j[cnt] = currentcol;
402         ierr         = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,llens,amat->a+cnt++);CHKERRQ(ierr);
403       }
404 
405       if (!notdone) break;
406       currentcol = nextcol;
407     }
408     amat->ilen[i] = lens[i];
409   }
410 
411   ierr = PetscFree3(lens,ii,ilens);CHKERRQ(ierr);
412   ierr = PetscFree(llens);CHKERRQ(ierr);
413 
414   /* copy over the matrix, one row at a time */
415   for (i=0; i<m; i++) {
416     ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
417     ierr = MatSetValues(newmat,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr);
418     ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr);
419   }
420   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
421   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
422   PetscFunctionReturn(0);
423 }
424 
425 #undef __FUNCT__
426 #define __FUNCT__ "MatView_BlockMat"
427 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer)
428 {
429   Mat_BlockMat      *a = (Mat_BlockMat*)A->data;
430   PetscErrorCode    ierr;
431   const char        *name;
432   PetscViewerFormat format;
433 
434   PetscFunctionBegin;
435   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
436   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
437   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) {
438     ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr);
439     if (A->symmetric) {
440       ierr = PetscViewerASCIIPrintf(viewer,"Only upper triangular part of symmetric matrix is stored\n");CHKERRQ(ierr);
441     }
442   }
443   PetscFunctionReturn(0);
444 }
445 
446 #undef __FUNCT__
447 #define __FUNCT__ "MatDestroy_BlockMat"
448 PetscErrorCode MatDestroy_BlockMat(Mat mat)
449 {
450   PetscErrorCode ierr;
451   Mat_BlockMat   *bmat = (Mat_BlockMat*)mat->data;
452   PetscInt       i;
453 
454   PetscFunctionBegin;
455   ierr = VecDestroy(&bmat->right);CHKERRQ(ierr);
456   ierr = VecDestroy(&bmat->left);CHKERRQ(ierr);
457   ierr = VecDestroy(&bmat->middle);CHKERRQ(ierr);
458   ierr = VecDestroy(&bmat->workb);CHKERRQ(ierr);
459   if (bmat->diags) {
460     for (i=0; i<mat->rmap->n/mat->rmap->bs; i++) {
461       ierr = MatDestroy(&bmat->diags[i]);CHKERRQ(ierr);
462     }
463   }
464   if (bmat->a) {
465     for (i=0; i<bmat->nz; i++) {
466       ierr = MatDestroy(&bmat->a[i]);CHKERRQ(ierr);
467     }
468   }
469   ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
470   ierr = PetscFree(mat->data);CHKERRQ(ierr);
471   PetscFunctionReturn(0);
472 }
473 
474 #undef __FUNCT__
475 #define __FUNCT__ "MatMult_BlockMat"
476 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y)
477 {
478   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
479   PetscErrorCode ierr;
480   PetscScalar    *xx,*yy;
481   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
482   Mat            *aa;
483 
484   PetscFunctionBegin;
485   /*
486      Standard CSR multiply except each entry is a Mat
487   */
488   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
489 
490   ierr = VecSet(y,0.0);CHKERRQ(ierr);
491   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
492   aj   = bmat->j;
493   aa   = bmat->a;
494   ii   = bmat->i;
495   for (i=0; i<m; i++) {
496     jrow = ii[i];
497     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
498     n    = ii[i+1] - jrow;
499     for (j=0; j<n; j++) {
500       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
501       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
502       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
503       jrow++;
504     }
505     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
506   }
507   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
508   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
509   PetscFunctionReturn(0);
510 }
511 
512 #undef __FUNCT__
513 #define __FUNCT__ "MatMult_BlockMat_Symmetric"
514 PetscErrorCode MatMult_BlockMat_Symmetric(Mat A,Vec x,Vec y)
515 {
516   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
517   PetscErrorCode ierr;
518   PetscScalar    *xx,*yy;
519   PetscInt       *aj,i,*ii,jrow,m = A->rmap->n/A->rmap->bs,bs = A->rmap->bs,n,j;
520   Mat            *aa;
521 
522   PetscFunctionBegin;
523   /*
524      Standard CSR multiply except each entry is a Mat
525   */
526   ierr = VecGetArray(x,&xx);CHKERRQ(ierr);
527 
528   ierr = VecSet(y,0.0);CHKERRQ(ierr);
529   ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
530   aj   = bmat->j;
531   aa   = bmat->a;
532   ii   = bmat->i;
533   for (i=0; i<m; i++) {
534     jrow = ii[i];
535     n    = ii[i+1] - jrow;
536     ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr);
537     ierr = VecPlaceArray(bmat->middle,xx + bs*i);CHKERRQ(ierr);
538     /* if we ALWAYS required a diagonal entry then could remove this if test */
539     if (aj[jrow] == i) {
540       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);
541       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
542       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
543       jrow++;
544       n--;
545     }
546     for (j=0; j<n; j++) {
547       ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr);            /* upper triangular part */
548       ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr);
549       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
550 
551       ierr = VecPlaceArray(bmat->right,yy + bs*aj[jrow]);CHKERRQ(ierr);            /* lower triangular part */
552       ierr = MatMultTransposeAdd(aa[jrow],bmat->middle,bmat->right,bmat->right);CHKERRQ(ierr);
553       ierr = VecResetArray(bmat->right);CHKERRQ(ierr);
554       jrow++;
555     }
556     ierr = VecResetArray(bmat->left);CHKERRQ(ierr);
557     ierr = VecResetArray(bmat->middle);CHKERRQ(ierr);
558   }
559   ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr);
560   ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
561   PetscFunctionReturn(0);
562 }
563 
564 #undef __FUNCT__
565 #define __FUNCT__ "MatMultAdd_BlockMat"
566 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
567 {
568   PetscFunctionBegin;
569   PetscFunctionReturn(0);
570 }
571 
572 #undef __FUNCT__
573 #define __FUNCT__ "MatMultTranspose_BlockMat"
574 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y)
575 {
576   PetscFunctionBegin;
577   PetscFunctionReturn(0);
578 }
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "MatMultTransposeAdd_BlockMat"
582 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z)
583 {
584   PetscFunctionBegin;
585   PetscFunctionReturn(0);
586 }
587 
588 /*
589      Adds diagonal pointers to sparse matrix structure.
590 */
591 #undef __FUNCT__
592 #define __FUNCT__ "MatMarkDiagonal_BlockMat"
593 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A)
594 {
595   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
596   PetscErrorCode ierr;
597   PetscInt       i,j,mbs = A->rmap->n/A->rmap->bs;
598 
599   PetscFunctionBegin;
600   if (!a->diag) {
601     ierr = PetscMalloc1(mbs,&a->diag);CHKERRQ(ierr);
602   }
603   for (i=0; i<mbs; i++) {
604     a->diag[i] = a->i[i+1];
605     for (j=a->i[i]; j<a->i[i+1]; j++) {
606       if (a->j[j] == i) {
607         a->diag[i] = j;
608         break;
609       }
610     }
611   }
612   PetscFunctionReturn(0);
613 }
614 
615 #undef __FUNCT__
616 #define __FUNCT__ "MatGetSubMatrix_BlockMat"
617 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *B)
618 {
619   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
620   Mat_SeqAIJ     *c;
621   PetscErrorCode ierr;
622   PetscInt       i,k,first,step,lensi,nrows,ncols;
623   PetscInt       *j_new,*i_new,*aj = a->j,*ailen = a->ilen;
624   PetscScalar    *a_new;
625   Mat            C,*aa = a->a;
626   PetscBool      stride,equal;
627 
628   PetscFunctionBegin;
629   ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr);
630   if (!equal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for idential column and row indices");
631   ierr = PetscObjectTypeCompare((PetscObject)iscol,ISSTRIDE,&stride);CHKERRQ(ierr);
632   if (!stride) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only for stride indices");
633   ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr);
634   if (step != A->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only select one entry from each block");
635 
636   ierr  = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr);
637   ncols = nrows;
638 
639   /* create submatrix */
640   if (scall == MAT_REUSE_MATRIX) {
641     PetscInt n_cols,n_rows;
642     C    = *B;
643     ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr);
644     if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size");
645     ierr = MatZeroEntries(C);CHKERRQ(ierr);
646   } else {
647     ierr = MatCreate(PetscObjectComm((PetscObject)A),&C);CHKERRQ(ierr);
648     ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
649     if (A->symmetric) {
650       ierr = MatSetType(C,MATSEQSBAIJ);CHKERRQ(ierr);
651     } else {
652       ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr);
653     }
654     ierr = MatSeqAIJSetPreallocation(C,0,ailen);CHKERRQ(ierr);
655     ierr = MatSeqSBAIJSetPreallocation(C,1,0,ailen);CHKERRQ(ierr);
656   }
657   c = (Mat_SeqAIJ*)C->data;
658 
659   /* loop over rows inserting into submatrix */
660   a_new = c->a;
661   j_new = c->j;
662   i_new = c->i;
663 
664   for (i=0; i<nrows; i++) {
665     lensi = ailen[i];
666     for (k=0; k<lensi; k++) {
667       *j_new++ = *aj++;
668       ierr     = MatGetValue(*aa++,first,first,a_new++);CHKERRQ(ierr);
669     }
670     i_new[i+1] = i_new[i] + lensi;
671     c->ilen[i] = lensi;
672   }
673 
674   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
675   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
676   *B   = C;
677   PetscFunctionReturn(0);
678 }
679 
680 #undef __FUNCT__
681 #define __FUNCT__ "MatAssemblyEnd_BlockMat"
682 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode)
683 {
684   Mat_BlockMat   *a = (Mat_BlockMat*)A->data;
685   PetscErrorCode ierr;
686   PetscInt       fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax;
687   PetscInt       m      = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0;
688   Mat            *aa    = a->a,*ap;
689 
690   PetscFunctionBegin;
691   if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0);
692 
693   if (m) rmax = ailen[0]; /* determine row with most nonzeros */
694   for (i=1; i<m; i++) {
695     /* move each row back by the amount of empty slots (fshift) before it*/
696     fshift += imax[i-1] - ailen[i-1];
697     rmax    = PetscMax(rmax,ailen[i]);
698     if (fshift) {
699       ip = aj + ai[i];
700       ap = aa + ai[i];
701       N  = ailen[i];
702       for (j=0; j<N; j++) {
703         ip[j-fshift] = ip[j];
704         ap[j-fshift] = ap[j];
705       }
706     }
707     ai[i] = ai[i-1] + ailen[i-1];
708   }
709   if (m) {
710     fshift += imax[m-1] - ailen[m-1];
711     ai[m]   = ai[m-1] + ailen[m-1];
712   }
713   /* reset ilen and imax for each row */
714   for (i=0; i<m; i++) {
715     ailen[i] = imax[i] = ai[i+1] - ai[i];
716   }
717   a->nz = ai[m];
718   for (i=0; i<a->nz; i++) {
719 #if defined(PETSC_USE_DEBUG)
720     if (!aa[i]) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz);
721 #endif
722     ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
723     ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
724   }
725   ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap->n/A->cmap->bs,fshift,a->nz);CHKERRQ(ierr);
726   ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr);
727   ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr);
728 
729   A->info.mallocs    += a->reallocs;
730   a->reallocs         = 0;
731   A->info.nz_unneeded = (double)fshift;
732   a->rmax             = rmax;
733 
734   A->same_nonzero = PETSC_TRUE;
735   ierr            = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr);
736   PetscFunctionReturn(0);
737 }
738 
739 #undef __FUNCT__
740 #define __FUNCT__ "MatSetOption_BlockMat"
741 PetscErrorCode MatSetOption_BlockMat(Mat A,MatOption opt,PetscBool flg)
742 {
743   PetscFunctionBegin;
744   if (opt == MAT_SYMMETRIC && flg) {
745     A->ops->sor  = MatSOR_BlockMat_Symmetric;
746     A->ops->mult = MatMult_BlockMat_Symmetric;
747   } else {
748     PetscInfo1(A,"Unused matrix option %s\n",MatOptions[opt]);
749   }
750   PetscFunctionReturn(0);
751 }
752 
753 
754 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat,
755                                        0,
756                                        0,
757                                        MatMult_BlockMat,
758                                /*  4*/ MatMultAdd_BlockMat,
759                                        MatMultTranspose_BlockMat,
760                                        MatMultTransposeAdd_BlockMat,
761                                        0,
762                                        0,
763                                        0,
764                                /* 10*/ 0,
765                                        0,
766                                        0,
767                                        MatSOR_BlockMat,
768                                        0,
769                                /* 15*/ 0,
770                                        0,
771                                        0,
772                                        0,
773                                        0,
774                                /* 20*/ 0,
775                                        MatAssemblyEnd_BlockMat,
776                                        MatSetOption_BlockMat,
777                                        0,
778                                /* 24*/ 0,
779                                        0,
780                                        0,
781                                        0,
782                                        0,
783                                /* 29*/ 0,
784                                        0,
785                                        0,
786                                        0,
787                                        0,
788                                /* 34*/ 0,
789                                        0,
790                                        0,
791                                        0,
792                                        0,
793                                /* 39*/ 0,
794                                        0,
795                                        0,
796                                        0,
797                                        0,
798                                /* 44*/ 0,
799                                        0,
800                                        0,
801                                        0,
802                                        0,
803                                /* 49*/ 0,
804                                        0,
805                                        0,
806                                        0,
807                                        0,
808                                /* 54*/ 0,
809                                        0,
810                                        0,
811                                        0,
812                                        0,
813                                /* 59*/ MatGetSubMatrix_BlockMat,
814                                        MatDestroy_BlockMat,
815                                        MatView_BlockMat,
816                                        0,
817                                        0,
818                                /* 64*/ 0,
819                                        0,
820                                        0,
821                                        0,
822                                        0,
823                                /* 69*/ 0,
824                                        0,
825                                        0,
826                                        0,
827                                        0,
828                                /* 74*/ 0,
829                                        0,
830                                        0,
831                                        0,
832                                        0,
833                                /* 79*/ 0,
834                                        0,
835                                        0,
836                                        0,
837                                        MatLoad_BlockMat,
838                                /* 84*/ 0,
839                                        0,
840                                        0,
841                                        0,
842                                        0,
843                                /* 89*/ 0,
844                                        0,
845                                        0,
846                                        0,
847                                        0,
848                                /* 94*/ 0,
849                                        0,
850                                        0,
851                                        0,
852                                        0,
853                                /* 99*/ 0,
854                                        0,
855                                        0,
856                                        0,
857                                        0,
858                                /*104*/ 0,
859                                        0,
860                                        0,
861                                        0,
862                                        0,
863                                /*109*/ 0,
864                                        0,
865                                        0,
866                                        0,
867                                        0,
868                                /*114*/ 0,
869                                        0,
870                                        0,
871                                        0,
872                                        0,
873                                /*119*/ 0,
874                                        0,
875                                        0,
876                                        0,
877                                        0,
878                                /*124*/ 0,
879                                        0,
880                                        0,
881                                        0,
882                                        0,
883                                /*129*/ 0,
884                                        0,
885                                        0,
886                                        0,
887                                        0,
888                                /*134*/ 0,
889                                        0,
890                                        0,
891                                        0,
892                                        0,
893                                /*139*/ 0,
894                                        0,
895                                        0
896 };
897 
898 #undef __FUNCT__
899 #define __FUNCT__ "MatBlockMatSetPreallocation"
900 /*@C
901    MatBlockMatSetPreallocation - For good matrix assembly performance
902    the user should preallocate the matrix storage by setting the parameter nz
903    (or the array nnz).  By setting these parameters accurately, performance
904    during matrix assembly can be increased by more than a factor of 50.
905 
906    Collective on MPI_Comm
907 
908    Input Parameters:
909 +  B - The matrix
910 .  bs - size of each block in matrix
911 .  nz - number of nonzeros per block row (same for all rows)
912 -  nnz - array containing the number of nonzeros in the various block rows
913          (possibly different for each row) or NULL
914 
915    Notes:
916      If nnz is given then nz is ignored
917 
918    Specify the preallocated storage with either nz or nnz (not both).
919    Set nz=PETSC_DEFAULT and nnz=NULL for PETSc to control dynamic memory
920    allocation.  For large problems you MUST preallocate memory or you
921    will get TERRIBLE performance, see the users' manual chapter on matrices.
922 
923    Level: intermediate
924 
925 .seealso: MatCreate(), MatCreateBlockMat(), MatSetValues()
926 
927 @*/
928 PetscErrorCode  MatBlockMatSetPreallocation(Mat B,PetscInt bs,PetscInt nz,const PetscInt nnz[])
929 {
930   PetscErrorCode ierr;
931 
932   PetscFunctionBegin;
933   ierr = PetscTryMethod(B,"MatBlockMatSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[]),(B,bs,nz,nnz));CHKERRQ(ierr);
934   PetscFunctionReturn(0);
935 }
936 
937 #undef __FUNCT__
938 #define __FUNCT__ "MatBlockMatSetPreallocation_BlockMat"
939 PetscErrorCode  MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz)
940 {
941   Mat_BlockMat   *bmat = (Mat_BlockMat*)A->data;
942   PetscErrorCode ierr;
943   PetscInt       i;
944 
945   PetscFunctionBegin;
946   ierr = PetscLayoutSetBlockSize(A->rmap,bs);CHKERRQ(ierr);
947   ierr = PetscLayoutSetBlockSize(A->cmap,bs);CHKERRQ(ierr);
948   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
949   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
950   ierr = PetscLayoutGetBlockSize(A->rmap,&bs);CHKERRQ(ierr);
951 
952   if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5;
953   if (nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz);
954   if (nnz) {
955     for (i=0; i<A->rmap->n/bs; i++) {
956       if (nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]);
957       if (nnz[i] > A->cmap->n/bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap->n/bs);
958     }
959   }
960   bmat->mbs = A->rmap->n/bs;
961 
962   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,NULL,&bmat->right);CHKERRQ(ierr);
963   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,NULL,&bmat->middle);CHKERRQ(ierr);
964   ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr);
965 
966   if (!bmat->imax) {
967     ierr = PetscMalloc2(A->rmap->n,&bmat->imax,A->rmap->n,&bmat->ilen);CHKERRQ(ierr);
968     ierr = PetscLogObjectMemory((PetscObject)A,2*A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
969   }
970   if (nnz) {
971     nz = 0;
972     for (i=0; i<A->rmap->n/A->rmap->bs; i++) {
973       bmat->imax[i] = nnz[i];
974       nz           += nnz[i];
975     }
976   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently requires block row by row preallocation");
977 
978   /* bmat->ilen will count nonzeros in each row so far. */
979   for (i=0; i<bmat->mbs; i++) bmat->ilen[i] = 0;
980 
981   /* allocate the matrix space */
982   ierr       = MatSeqXAIJFreeAIJ(A,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
983   ierr       = PetscMalloc3(nz,&bmat->a,nz,&bmat->j,A->rmap->n+1,&bmat->i);CHKERRQ(ierr);
984   ierr       = PetscLogObjectMemory((PetscObject)A,(A->rmap->n+1)*sizeof(PetscInt)+nz*(sizeof(PetscScalar)+sizeof(PetscInt)));CHKERRQ(ierr);
985   bmat->i[0] = 0;
986   for (i=1; i<bmat->mbs+1; i++) {
987     bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1];
988   }
989   bmat->singlemalloc = PETSC_TRUE;
990   bmat->free_a       = PETSC_TRUE;
991   bmat->free_ij      = PETSC_TRUE;
992 
993   bmat->nz            = 0;
994   bmat->maxnz         = nz;
995   A->info.nz_unneeded = (double)bmat->maxnz;
996   ierr                = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
997   PetscFunctionReturn(0);
998 }
999 
1000 /*MC
1001    MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix
1002                  consisting of (usually) sparse blocks.
1003 
1004   Level: advanced
1005 
1006 .seealso: MatCreateBlockMat()
1007 
1008 M*/
1009 
1010 #undef __FUNCT__
1011 #define __FUNCT__ "MatCreate_BlockMat"
1012 PETSC_EXTERN PetscErrorCode MatCreate_BlockMat(Mat A)
1013 {
1014   Mat_BlockMat   *b;
1015   PetscErrorCode ierr;
1016 
1017   PetscFunctionBegin;
1018   ierr    = PetscNewLog(A,Mat_BlockMat,&b);CHKERRQ(ierr);
1019   A->data = (void*)b;
1020   ierr    = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1021 
1022   A->assembled    = PETSC_TRUE;
1023   A->preallocated = PETSC_FALSE;
1024   ierr            = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr);
1025 
1026   ierr = PetscObjectComposeFunction((PetscObject)A,"MatBlockMatSetPreallocation_C",MatBlockMatSetPreallocation_BlockMat);CHKERRQ(ierr);
1027   PetscFunctionReturn(0);
1028 }
1029 
1030 #undef __FUNCT__
1031 #define __FUNCT__ "MatCreateBlockMat"
1032 /*@C
1033    MatCreateBlockMat - Creates a new matrix based sparse Mat storage
1034 
1035   Collective on MPI_Comm
1036 
1037    Input Parameters:
1038 +  comm - MPI communicator
1039 .  m - number of rows
1040 .  n  - number of columns
1041 .  bs - size of each submatrix
1042 .  nz  - expected maximum number of nonzero blocks in row (use PETSC_DEFAULT if not known)
1043 -  nnz - expected number of nonzers per block row if known (use NULL otherwise)
1044 
1045 
1046    Output Parameter:
1047 .  A - the matrix
1048 
1049    Level: intermediate
1050 
1051    PETSc requires that matrices and vectors being used for certain
1052    operations are partitioned accordingly.  For example, when
1053    creating a bmat matrix, A, that supports parallel matrix-vector
1054    products using MatMult(A,x,y) the user should set the number
1055    of local matrix rows to be the number of local elements of the
1056    corresponding result vector, y. Note that this is information is
1057    required for use of the matrix interface routines, even though
1058    the bmat matrix may not actually be physically partitioned.
1059    For example,
1060 
1061 .keywords: matrix, bmat, create
1062 
1063 .seealso: MATBLOCKMAT
1064 @*/
1065 PetscErrorCode  MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,PetscInt nz,PetscInt *nnz, Mat *A)
1066 {
1067   PetscErrorCode ierr;
1068 
1069   PetscFunctionBegin;
1070   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1071   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1072   ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr);
1073   ierr = MatBlockMatSetPreallocation(*A,bs,nz,nnz);CHKERRQ(ierr);
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 
1078 
1079