xref: /petsc/src/mat/impls/sbaij/mpi/mpisbaij.c (revision 3e7ff0edd3573be01c8c0fa32db97c3db8fa5c8d)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>    /*I "petscmat.h" I*/
3 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
4 #include <../src/mat/impls/sbaij/seq/sbaij.h>
5 #include <petscblaslapack.h>
6 
7 extern PetscErrorCode MatSetUpMultiply_MPISBAIJ(Mat);
8 extern PetscErrorCode MatSetUpMultiply_MPISBAIJ_2comm(Mat);
9 extern PetscErrorCode MatDisAssemble_MPISBAIJ(Mat);
10 extern PetscErrorCode MatIncreaseOverlap_MPISBAIJ(Mat,PetscInt,IS[],PetscInt);
11 extern PetscErrorCode MatGetValues_SeqSBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar []);
12 extern PetscErrorCode MatGetValues_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar []);
13 extern PetscErrorCode MatSetValues_SeqSBAIJ(Mat,PetscInt,const PetscInt [],PetscInt,const PetscInt [],const PetscScalar [],InsertMode);
14 extern PetscErrorCode MatSetValuesBlocked_SeqSBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
15 extern PetscErrorCode MatSetValuesBlocked_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
16 extern PetscErrorCode MatGetRow_SeqSBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
17 extern PetscErrorCode MatRestoreRow_SeqSBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
18 extern PetscErrorCode MatZeroRows_SeqSBAIJ(Mat,IS,PetscScalar*,Vec,Vec);
19 extern PetscErrorCode MatZeroRows_SeqBAIJ(Mat,IS,PetscScalar*,Vec,Vec);
20 extern PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat,Vec,PetscInt[]);
21 extern PetscErrorCode MatSOR_MPISBAIJ(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
22 
23 #undef __FUNCT__
24 #define __FUNCT__ "MatStoreValues_MPISBAIJ"
25 PetscErrorCode  MatStoreValues_MPISBAIJ(Mat mat)
26 {
27   Mat_MPISBAIJ   *aij = (Mat_MPISBAIJ*)mat->data;
28   PetscErrorCode ierr;
29 
30   PetscFunctionBegin;
31   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
32   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
33   PetscFunctionReturn(0);
34 }
35 
36 #undef __FUNCT__
37 #define __FUNCT__ "MatRetrieveValues_MPISBAIJ"
38 PetscErrorCode  MatRetrieveValues_MPISBAIJ(Mat mat)
39 {
40   Mat_MPISBAIJ   *aij = (Mat_MPISBAIJ*)mat->data;
41   PetscErrorCode ierr;
42 
43   PetscFunctionBegin;
44   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
45   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
46   PetscFunctionReturn(0);
47 }
48 
49 #define  MatSetValues_SeqSBAIJ_A_Private(row,col,value,addv) \
50   { \
51  \
52     brow = row/bs;  \
53     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
54     rmax = aimax[brow]; nrow = ailen[brow]; \
55     bcol = col/bs; \
56     ridx = row % bs; cidx = col % bs; \
57     low  = 0; high = nrow; \
58     while (high-low > 3) { \
59       t = (low+high)/2; \
60       if (rp[t] > bcol) high = t; \
61       else              low  = t; \
62     } \
63     for (_i=low; _i<high; _i++) { \
64       if (rp[_i] > bcol) break; \
65       if (rp[_i] == bcol) { \
66         bap = ap + bs2*_i + bs*cidx + ridx; \
67         if (addv == ADD_VALUES) *bap += value;  \
68         else                    *bap  = value;  \
69         goto a_noinsert; \
70       } \
71     } \
72     if (a->nonew == 1) goto a_noinsert; \
73     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
74     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
75     N = nrow++ - 1;  \
76     /* shift up all the later entries in this row */ \
77     for (ii=N; ii>=_i; ii--) { \
78       rp[ii+1] = rp[ii]; \
79       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
80     } \
81     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
82     rp[_i]                      = bcol;  \
83     ap[bs2*_i + bs*cidx + ridx] = value;  \
84     A->nonzerostate++;\
85 a_noinsert:; \
86     ailen[brow] = nrow; \
87   }
88 
89 #define  MatSetValues_SeqSBAIJ_B_Private(row,col,value,addv) \
90   { \
91     brow = row/bs;  \
92     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
93     rmax = bimax[brow]; nrow = bilen[brow]; \
94     bcol = col/bs; \
95     ridx = row % bs; cidx = col % bs; \
96     low  = 0; high = nrow; \
97     while (high-low > 3) { \
98       t = (low+high)/2; \
99       if (rp[t] > bcol) high = t; \
100       else              low  = t; \
101     } \
102     for (_i=low; _i<high; _i++) { \
103       if (rp[_i] > bcol) break; \
104       if (rp[_i] == bcol) { \
105         bap = ap + bs2*_i + bs*cidx + ridx; \
106         if (addv == ADD_VALUES) *bap += value;  \
107         else                    *bap  = value;  \
108         goto b_noinsert; \
109       } \
110     } \
111     if (b->nonew == 1) goto b_noinsert; \
112     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
113     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
114     N = nrow++ - 1;  \
115     /* shift up all the later entries in this row */ \
116     for (ii=N; ii>=_i; ii--) { \
117       rp[ii+1] = rp[ii]; \
118       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
119     } \
120     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
121     rp[_i]                      = bcol;  \
122     ap[bs2*_i + bs*cidx + ridx] = value;  \
123     B->nonzerostate++;\
124 b_noinsert:; \
125     bilen[brow] = nrow; \
126   }
127 
128 /* Only add/insert a(i,j) with i<=j (blocks).
129    Any a(i,j) with i>j input by user is ingored.
130 */
131 #undef __FUNCT__
132 #define __FUNCT__ "MatSetValues_MPISBAIJ"
133 PetscErrorCode MatSetValues_MPISBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
134 {
135   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
136   MatScalar      value;
137   PetscBool      roworiented = baij->roworiented;
138   PetscErrorCode ierr;
139   PetscInt       i,j,row,col;
140   PetscInt       rstart_orig=mat->rmap->rstart;
141   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
142   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
143 
144   /* Some Variables required in the macro */
145   Mat          A     = baij->A;
146   Mat_SeqSBAIJ *a    = (Mat_SeqSBAIJ*)(A)->data;
147   PetscInt     *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
148   MatScalar    *aa   =a->a;
149 
150   Mat         B     = baij->B;
151   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
152   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
153   MatScalar   *ba   =b->a;
154 
155   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
156   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
157   MatScalar *ap,*bap;
158 
159   /* for stash */
160   PetscInt  n_loc, *in_loc = NULL;
161   MatScalar *v_loc = NULL;
162 
163   PetscFunctionBegin;
164   if (!baij->donotstash) {
165     if (n > baij->n_loc) {
166       ierr = PetscFree(baij->in_loc);CHKERRQ(ierr);
167       ierr = PetscFree(baij->v_loc);CHKERRQ(ierr);
168       ierr = PetscMalloc1(n,&baij->in_loc);CHKERRQ(ierr);
169       ierr = PetscMalloc1(n,&baij->v_loc);CHKERRQ(ierr);
170 
171       baij->n_loc = n;
172     }
173     in_loc = baij->in_loc;
174     v_loc  = baij->v_loc;
175   }
176 
177   for (i=0; i<m; i++) {
178     if (im[i] < 0) continue;
179 #if defined(PETSC_USE_DEBUG)
180     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
181 #endif
182     if (im[i] >= rstart_orig && im[i] < rend_orig) { /* this processor entry */
183       row = im[i] - rstart_orig;              /* local row index */
184       for (j=0; j<n; j++) {
185         if (im[i]/bs > in[j]/bs) {
186           if (a->ignore_ltriangular) {
187             continue;    /* ignore lower triangular blocks */
188           } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
189         }
190         if (in[j] >= cstart_orig && in[j] < cend_orig) {  /* diag entry (A) */
191           col  = in[j] - cstart_orig;         /* local col index */
192           brow = row/bs; bcol = col/bs;
193           if (brow > bcol) continue;  /* ignore lower triangular blocks of A */
194           if (roworiented) value = v[i*n+j];
195           else             value = v[i+j*m];
196           MatSetValues_SeqSBAIJ_A_Private(row,col,value,addv);
197           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
198         } else if (in[j] < 0) continue;
199 #if defined(PETSC_USE_DEBUG)
200         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
201 #endif
202         else {  /* off-diag entry (B) */
203           if (mat->was_assembled) {
204             if (!baij->colmap) {
205               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
206             }
207 #if defined(PETSC_USE_CTABLE)
208             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
209             col  = col - 1;
210 #else
211             col = baij->colmap[in[j]/bs] - 1;
212 #endif
213             if (col < 0 && !((Mat_SeqSBAIJ*)(baij->A->data))->nonew) {
214               ierr = MatDisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
215               col  =  in[j];
216               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
217               B    = baij->B;
218               b    = (Mat_SeqBAIJ*)(B)->data;
219               bimax= b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
220               ba   = b->a;
221             } else col += in[j]%bs;
222           } else col = in[j];
223           if (roworiented) value = v[i*n+j];
224           else             value = v[i+j*m];
225           MatSetValues_SeqSBAIJ_B_Private(row,col,value,addv);
226           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
227         }
228       }
229     } else {  /* off processor entry */
230       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
231       if (!baij->donotstash) {
232         mat->assembled = PETSC_FALSE;
233         n_loc          = 0;
234         for (j=0; j<n; j++) {
235           if (im[i]/bs > in[j]/bs) continue; /* ignore lower triangular blocks */
236           in_loc[n_loc] = in[j];
237           if (roworiented) {
238             v_loc[n_loc] = v[i*n+j];
239           } else {
240             v_loc[n_loc] = v[j*m+i];
241           }
242           n_loc++;
243         }
244         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n_loc,in_loc,v_loc,PETSC_FALSE);CHKERRQ(ierr);
245       }
246     }
247   }
248   PetscFunctionReturn(0);
249 }
250 
251 #undef __FUNCT__
252 #define __FUNCT__ "MatSetValuesBlocked_MPISBAIJ"
253 PetscErrorCode MatSetValuesBlocked_MPISBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const MatScalar v[],InsertMode addv)
254 {
255   Mat_MPISBAIJ    *baij = (Mat_MPISBAIJ*)mat->data;
256   const MatScalar *value;
257   MatScalar       *barray     =baij->barray;
258   PetscBool       roworiented = baij->roworiented,ignore_ltriangular = ((Mat_SeqSBAIJ*)baij->A->data)->ignore_ltriangular;
259   PetscErrorCode  ierr;
260   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
261   PetscInt        rend=baij->rendbs,cstart=baij->rstartbs,stepval;
262   PetscInt        cend=baij->rendbs,bs=mat->rmap->bs,bs2=baij->bs2;
263 
264   PetscFunctionBegin;
265   if (!barray) {
266     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
267     baij->barray = barray;
268   }
269 
270   if (roworiented) {
271     stepval = (n-1)*bs;
272   } else {
273     stepval = (m-1)*bs;
274   }
275   for (i=0; i<m; i++) {
276     if (im[i] < 0) continue;
277 #if defined(PETSC_USE_DEBUG)
278     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
279 #endif
280     if (im[i] >= rstart && im[i] < rend) {
281       row = im[i] - rstart;
282       for (j=0; j<n; j++) {
283         if (im[i] > in[j]) {
284           if (ignore_ltriangular) continue; /* ignore lower triangular blocks */
285           else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
286         }
287         /* If NumCol = 1 then a copy is not required */
288         if ((roworiented) && (n == 1)) {
289           barray = (MatScalar*) v + i*bs2;
290         } else if ((!roworiented) && (m == 1)) {
291           barray = (MatScalar*) v + j*bs2;
292         } else { /* Here a copy is required */
293           if (roworiented) {
294             value = v + i*(stepval+bs)*bs + j*bs;
295           } else {
296             value = v + j*(stepval+bs)*bs + i*bs;
297           }
298           for (ii=0; ii<bs; ii++,value+=stepval) {
299             for (jj=0; jj<bs; jj++) {
300               *barray++ = *value++;
301             }
302           }
303           barray -=bs2;
304         }
305 
306         if (in[j] >= cstart && in[j] < cend) {
307           col  = in[j] - cstart;
308           ierr = MatSetValuesBlocked_SeqSBAIJ(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
309         } else if (in[j] < 0) continue;
310 #if defined(PETSC_USE_DEBUG)
311         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
312 #endif
313         else {
314           if (mat->was_assembled) {
315             if (!baij->colmap) {
316               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
317             }
318 
319 #if defined(PETSC_USE_DEBUG)
320 #if defined(PETSC_USE_CTABLE)
321             { PetscInt data;
322               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
323               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
324             }
325 #else
326             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
327 #endif
328 #endif
329 #if defined(PETSC_USE_CTABLE)
330             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
331             col  = (col - 1)/bs;
332 #else
333             col = (baij->colmap[in[j]] - 1)/bs;
334 #endif
335             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
336               ierr = MatDisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
337               col  = in[j];
338             }
339           } else col = in[j];
340           ierr = MatSetValuesBlocked_SeqBAIJ(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
341         }
342       }
343     } else {
344       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
345       if (!baij->donotstash) {
346         if (roworiented) {
347           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
348         } else {
349           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
350         }
351       }
352     }
353   }
354   PetscFunctionReturn(0);
355 }
356 
357 #undef __FUNCT__
358 #define __FUNCT__ "MatGetValues_MPISBAIJ"
359 PetscErrorCode MatGetValues_MPISBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
360 {
361   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
362   PetscErrorCode ierr;
363   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
364   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
365 
366   PetscFunctionBegin;
367   for (i=0; i<m; i++) {
368     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]); */
369     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
370     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
371       row = idxm[i] - bsrstart;
372       for (j=0; j<n; j++) {
373         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column %D",idxn[j]); */
374         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
375         if (idxn[j] >= bscstart && idxn[j] < bscend) {
376           col  = idxn[j] - bscstart;
377           ierr = MatGetValues_SeqSBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
378         } else {
379           if (!baij->colmap) {
380             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
381           }
382 #if defined(PETSC_USE_CTABLE)
383           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
384           data--;
385 #else
386           data = baij->colmap[idxn[j]/bs]-1;
387 #endif
388           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
389           else {
390             col  = data + idxn[j]%bs;
391             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
392           }
393         }
394       }
395     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
396   }
397   PetscFunctionReturn(0);
398 }
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "MatNorm_MPISBAIJ"
402 PetscErrorCode MatNorm_MPISBAIJ(Mat mat,NormType type,PetscReal *norm)
403 {
404   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
405   PetscErrorCode ierr;
406   PetscReal      sum[2],*lnorm2;
407 
408   PetscFunctionBegin;
409   if (baij->size == 1) {
410     ierr =  MatNorm(baij->A,type,norm);CHKERRQ(ierr);
411   } else {
412     if (type == NORM_FROBENIUS) {
413       ierr    = PetscMalloc1(2,&lnorm2);CHKERRQ(ierr);
414       ierr    =  MatNorm(baij->A,type,lnorm2);CHKERRQ(ierr);
415       *lnorm2 = (*lnorm2)*(*lnorm2); lnorm2++;            /* squar power of norm(A) */
416       ierr    =  MatNorm(baij->B,type,lnorm2);CHKERRQ(ierr);
417       *lnorm2 = (*lnorm2)*(*lnorm2); lnorm2--;             /* squar power of norm(B) */
418       ierr    = MPI_Allreduce(lnorm2,sum,2,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
419       *norm   = PetscSqrtReal(sum[0] + 2*sum[1]);
420       ierr    = PetscFree(lnorm2);CHKERRQ(ierr);
421     } else if (type == NORM_INFINITY || type == NORM_1) { /* max row/column sum */
422       Mat_SeqSBAIJ *amat=(Mat_SeqSBAIJ*)baij->A->data;
423       Mat_SeqBAIJ  *bmat=(Mat_SeqBAIJ*)baij->B->data;
424       PetscReal    *rsum,*rsum2,vabs;
425       PetscInt     *jj,*garray=baij->garray,rstart=baij->rstartbs,nz;
426       PetscInt     brow,bcol,col,bs=baij->A->rmap->bs,row,grow,gcol,mbs=amat->mbs;
427       MatScalar    *v;
428 
429       ierr = PetscMalloc2(mat->cmap->N,&rsum,mat->cmap->N,&rsum2);CHKERRQ(ierr);
430       ierr = PetscMemzero(rsum,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
431       /* Amat */
432       v = amat->a; jj = amat->j;
433       for (brow=0; brow<mbs; brow++) {
434         grow = bs*(rstart + brow);
435         nz   = amat->i[brow+1] - amat->i[brow];
436         for (bcol=0; bcol<nz; bcol++) {
437           gcol = bs*(rstart + *jj); jj++;
438           for (col=0; col<bs; col++) {
439             for (row=0; row<bs; row++) {
440               vabs            = PetscAbsScalar(*v); v++;
441               rsum[gcol+col] += vabs;
442               /* non-diagonal block */
443               if (bcol > 0 && vabs > 0.0) rsum[grow+row] += vabs;
444             }
445           }
446         }
447       }
448       /* Bmat */
449       v = bmat->a; jj = bmat->j;
450       for (brow=0; brow<mbs; brow++) {
451         grow = bs*(rstart + brow);
452         nz = bmat->i[brow+1] - bmat->i[brow];
453         for (bcol=0; bcol<nz; bcol++) {
454           gcol = bs*garray[*jj]; jj++;
455           for (col=0; col<bs; col++) {
456             for (row=0; row<bs; row++) {
457               vabs            = PetscAbsScalar(*v); v++;
458               rsum[gcol+col] += vabs;
459               rsum[grow+row] += vabs;
460             }
461           }
462         }
463       }
464       ierr  = MPI_Allreduce(rsum,rsum2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
465       *norm = 0.0;
466       for (col=0; col<mat->cmap->N; col++) {
467         if (rsum2[col] > *norm) *norm = rsum2[col];
468       }
469       ierr = PetscFree2(rsum,rsum2);CHKERRQ(ierr);
470     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support for this norm yet");
471   }
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "MatAssemblyBegin_MPISBAIJ"
477 PetscErrorCode MatAssemblyBegin_MPISBAIJ(Mat mat,MatAssemblyType mode)
478 {
479   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
480   PetscErrorCode ierr;
481   PetscInt       nstash,reallocs;
482   InsertMode     addv;
483 
484   PetscFunctionBegin;
485   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
486 
487   /* make sure all processors are either in INSERTMODE or ADDMODE */
488   ierr = MPI_Allreduce((PetscEnum*)&mat->insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
489   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
490   mat->insertmode = addv; /* in case this processor had no cache */
491 
492   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
493   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
494   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
495   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
496   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
497   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
498   PetscFunctionReturn(0);
499 }
500 
501 #undef __FUNCT__
502 #define __FUNCT__ "MatAssemblyEnd_MPISBAIJ"
503 PetscErrorCode MatAssemblyEnd_MPISBAIJ(Mat mat,MatAssemblyType mode)
504 {
505   Mat_MPISBAIJ   *baij=(Mat_MPISBAIJ*)mat->data;
506   Mat_SeqSBAIJ   *a   =(Mat_SeqSBAIJ*)baij->A->data;
507   PetscErrorCode ierr;
508   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
509   PetscInt       *row,*col;
510   PetscBool      other_disassembled;
511   PetscMPIInt    n;
512   PetscBool      r1,r2,r3;
513   MatScalar      *val;
514   InsertMode     addv = mat->insertmode;
515 
516   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
517   PetscFunctionBegin;
518   if (!baij->donotstash &&  !mat->nooffprocentries) {
519     while (1) {
520       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
521       if (!flg) break;
522 
523       for (i=0; i<n;) {
524         /* Now identify the consecutive vals belonging to the same row */
525         for (j=i,rstart=row[j]; j<n; j++) {
526           if (row[j] != rstart) break;
527         }
528         if (j < n) ncols = j-i;
529         else       ncols = n-i;
530         /* Now assemble all these values with a single function call */
531         ierr = MatSetValues_MPISBAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
532         i    = j;
533       }
534     }
535     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
536     /* Now process the block-stash. Since the values are stashed column-oriented,
537        set the roworiented flag to column oriented, and after MatSetValues()
538        restore the original flags */
539     r1 = baij->roworiented;
540     r2 = a->roworiented;
541     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
542 
543     baij->roworiented = PETSC_FALSE;
544     a->roworiented    = PETSC_FALSE;
545 
546     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = PETSC_FALSE; /* b->roworinted */
547     while (1) {
548       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
549       if (!flg) break;
550 
551       for (i=0; i<n;) {
552         /* Now identify the consecutive vals belonging to the same row */
553         for (j=i,rstart=row[j]; j<n; j++) {
554           if (row[j] != rstart) break;
555         }
556         if (j < n) ncols = j-i;
557         else       ncols = n-i;
558         ierr = MatSetValuesBlocked_MPISBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,addv);CHKERRQ(ierr);
559         i    = j;
560       }
561     }
562     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
563 
564     baij->roworiented = r1;
565     a->roworiented    = r2;
566 
567     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworinted */
568   }
569 
570   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
571   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
572 
573   /* determine if any processor has disassembled, if so we must
574      also disassemble ourselfs, in order that we may reassemble. */
575   /*
576      if nonzero structure of submatrix B cannot change then we know that
577      no processor disassembled thus we can skip this stuff
578   */
579   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
580     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
581     if (mat->was_assembled && !other_disassembled) {
582       ierr = MatDisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
583     }
584   }
585 
586   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
587     ierr = MatSetUpMultiply_MPISBAIJ(mat);CHKERRQ(ierr); /* setup Mvctx and sMvctx */
588   }
589   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
590   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
591 
592   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
593 
594   baij->rowvalues = 0;
595 
596   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
597   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
598     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
599     ierr = MPI_Allreduce(&state,&mat->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
600   }
601   PetscFunctionReturn(0);
602 }
603 
604 extern PetscErrorCode MatView_SeqSBAIJ_ASCII(Mat,PetscViewer);
605 extern PetscErrorCode MatSetValues_MPIBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
606 #include <petscdraw.h>
607 #undef __FUNCT__
608 #define __FUNCT__ "MatView_MPISBAIJ_ASCIIorDraworSocket"
609 static PetscErrorCode MatView_MPISBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
610 {
611   Mat_MPISBAIJ      *baij = (Mat_MPISBAIJ*)mat->data;
612   PetscErrorCode    ierr;
613   PetscInt          bs   = mat->rmap->bs;
614   PetscMPIInt       rank = baij->rank;
615   PetscBool         iascii,isdraw;
616   PetscViewer       sviewer;
617   PetscViewerFormat format;
618 
619   PetscFunctionBegin;
620   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
621   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
622   if (iascii) {
623     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
624     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
625       MatInfo info;
626       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
627       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
628       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
629       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
630       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
631       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
632       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
633       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
634       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
635       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
636       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
637       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
638       PetscFunctionReturn(0);
639     } else if (format == PETSC_VIEWER_ASCII_INFO) {
640       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
641       PetscFunctionReturn(0);
642     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
643       PetscFunctionReturn(0);
644     }
645   }
646 
647   if (isdraw) {
648     PetscDraw draw;
649     PetscBool isnull;
650     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
651     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
652   }
653 
654   {
655     /* assemble the entire matrix onto first processor. */
656     Mat          A;
657     Mat_SeqSBAIJ *Aloc;
658     Mat_SeqBAIJ  *Bloc;
659     PetscInt     M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
660     MatScalar    *a;
661 
662     /* Should this be the same type as mat? */
663     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
664     if (!rank) {
665       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
666     } else {
667       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
668     }
669     ierr = MatSetType(A,MATMPISBAIJ);CHKERRQ(ierr);
670     ierr = MatMPISBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
671     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
672     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
673 
674     /* copy over the A part */
675     Aloc = (Mat_SeqSBAIJ*)baij->A->data;
676     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
677     ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
678 
679     for (i=0; i<mbs; i++) {
680       rvals[0] = bs*(baij->rstartbs + i);
681       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
682       for (j=ai[i]; j<ai[i+1]; j++) {
683         col = (baij->cstartbs+aj[j])*bs;
684         for (k=0; k<bs; k++) {
685           ierr = MatSetValues_MPISBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
686           col++;
687           a += bs;
688         }
689       }
690     }
691     /* copy over the B part */
692     Bloc = (Mat_SeqBAIJ*)baij->B->data;
693     ai   = Bloc->i; aj = Bloc->j; a = Bloc->a;
694     for (i=0; i<mbs; i++) {
695 
696       rvals[0] = bs*(baij->rstartbs + i);
697       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
698       for (j=ai[i]; j<ai[i+1]; j++) {
699         col = baij->garray[aj[j]]*bs;
700         for (k=0; k<bs; k++) {
701           ierr = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
702           col++;
703           a += bs;
704         }
705       }
706     }
707     ierr = PetscFree(rvals);CHKERRQ(ierr);
708     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
709     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
710     /*
711        Everyone has to call to draw the matrix since the graphics waits are
712        synchronized across all processors that share the PetscDraw object
713     */
714     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
715     if (!rank) {
716         ierr = MatView_SeqSBAIJ_ASCII(((Mat_MPISBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
717     }
718     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
719     ierr = MatDestroy(&A);CHKERRQ(ierr);
720   }
721   PetscFunctionReturn(0);
722 }
723 
724 #undef __FUNCT__
725 #define __FUNCT__ "MatView_MPISBAIJ"
726 PetscErrorCode MatView_MPISBAIJ(Mat mat,PetscViewer viewer)
727 {
728   PetscErrorCode ierr;
729   PetscBool      iascii,isdraw,issocket,isbinary;
730 
731   PetscFunctionBegin;
732   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
733   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
734   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
735   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
736   if (iascii || isdraw || issocket || isbinary) {
737     ierr = MatView_MPISBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
738   }
739   PetscFunctionReturn(0);
740 }
741 
742 #undef __FUNCT__
743 #define __FUNCT__ "MatDestroy_MPISBAIJ"
744 PetscErrorCode MatDestroy_MPISBAIJ(Mat mat)
745 {
746   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
747   PetscErrorCode ierr;
748 
749   PetscFunctionBegin;
750 #if defined(PETSC_USE_LOG)
751   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
752 #endif
753   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
754   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
755   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
756   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
757 #if defined(PETSC_USE_CTABLE)
758   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
759 #else
760   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
761 #endif
762   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
763   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
764   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
765   ierr = VecDestroy(&baij->slvec0);CHKERRQ(ierr);
766   ierr = VecDestroy(&baij->slvec0b);CHKERRQ(ierr);
767   ierr = VecDestroy(&baij->slvec1);CHKERRQ(ierr);
768   ierr = VecDestroy(&baij->slvec1a);CHKERRQ(ierr);
769   ierr = VecDestroy(&baij->slvec1b);CHKERRQ(ierr);
770   ierr = VecScatterDestroy(&baij->sMvctx);CHKERRQ(ierr);
771   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
772   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
773   ierr = PetscFree(baij->hd);CHKERRQ(ierr);
774   ierr = VecDestroy(&baij->diag);CHKERRQ(ierr);
775   ierr = VecDestroy(&baij->bb1);CHKERRQ(ierr);
776   ierr = VecDestroy(&baij->xx1);CHKERRQ(ierr);
777 #if defined(PETSC_USE_REAL_MAT_SINGLE)
778   ierr = PetscFree(baij->setvaluescopy);CHKERRQ(ierr);
779 #endif
780   ierr = PetscFree(baij->in_loc);CHKERRQ(ierr);
781   ierr = PetscFree(baij->v_loc);CHKERRQ(ierr);
782   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
783   ierr = PetscFree(mat->data);CHKERRQ(ierr);
784 
785   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
786   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
787   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
788   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr);
789   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPISBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
790   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpisbaij_mpisbstrm_C",NULL);CHKERRQ(ierr);
791   PetscFunctionReturn(0);
792 }
793 
794 #undef __FUNCT__
795 #define __FUNCT__ "MatMult_MPISBAIJ_Hermitian"
796 PetscErrorCode MatMult_MPISBAIJ_Hermitian(Mat A,Vec xx,Vec yy)
797 {
798   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
799   PetscErrorCode ierr;
800   PetscInt       nt,mbs=a->mbs,bs=A->rmap->bs;
801   PetscScalar    *x,*from;
802 
803   PetscFunctionBegin;
804   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
805   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
806 
807   /* diagonal part */
808   ierr = (*a->A->ops->mult)(a->A,xx,a->slvec1a);CHKERRQ(ierr);
809   ierr = VecSet(a->slvec1b,0.0);CHKERRQ(ierr);
810 
811   /* subdiagonal part */
812   ierr = (*a->B->ops->multhermitiantranspose)(a->B,xx,a->slvec0b);CHKERRQ(ierr);
813 
814   /* copy x into the vec slvec0 */
815   ierr = VecGetArray(a->slvec0,&from);CHKERRQ(ierr);
816   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
817 
818   ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
819   ierr = VecRestoreArray(a->slvec0,&from);CHKERRQ(ierr);
820   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
821 
822   ierr = VecScatterBegin(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
823   ierr = VecScatterEnd(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
824   /* supperdiagonal part */
825   ierr = (*a->B->ops->multadd)(a->B,a->slvec1b,a->slvec1a,yy);CHKERRQ(ierr);
826   PetscFunctionReturn(0);
827 }
828 
829 #undef __FUNCT__
830 #define __FUNCT__ "MatMult_MPISBAIJ"
831 PetscErrorCode MatMult_MPISBAIJ(Mat A,Vec xx,Vec yy)
832 {
833   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
834   PetscErrorCode ierr;
835   PetscInt       nt,mbs=a->mbs,bs=A->rmap->bs;
836   PetscScalar    *x,*from;
837 
838   PetscFunctionBegin;
839   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
840   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
841 
842   /* diagonal part */
843   ierr = (*a->A->ops->mult)(a->A,xx,a->slvec1a);CHKERRQ(ierr);
844   ierr = VecSet(a->slvec1b,0.0);CHKERRQ(ierr);
845 
846   /* subdiagonal part */
847   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->slvec0b);CHKERRQ(ierr);
848 
849   /* copy x into the vec slvec0 */
850   ierr = VecGetArray(a->slvec0,&from);CHKERRQ(ierr);
851   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
852 
853   ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
854   ierr = VecRestoreArray(a->slvec0,&from);CHKERRQ(ierr);
855   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
856 
857   ierr = VecScatterBegin(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
858   ierr = VecScatterEnd(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
859   /* supperdiagonal part */
860   ierr = (*a->B->ops->multadd)(a->B,a->slvec1b,a->slvec1a,yy);CHKERRQ(ierr);
861   PetscFunctionReturn(0);
862 }
863 
864 #undef __FUNCT__
865 #define __FUNCT__ "MatMult_MPISBAIJ_2comm"
866 PetscErrorCode MatMult_MPISBAIJ_2comm(Mat A,Vec xx,Vec yy)
867 {
868   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
869   PetscErrorCode ierr;
870   PetscInt       nt;
871 
872   PetscFunctionBegin;
873   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
874   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
875 
876   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
877   if (nt != A->rmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
878 
879   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
880   /* do diagonal part */
881   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
882   /* do supperdiagonal part */
883   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
884   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
885   /* do subdiagonal part */
886   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
887   ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
888   ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "MatMultAdd_MPISBAIJ"
894 PetscErrorCode MatMultAdd_MPISBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
895 {
896   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
897   PetscErrorCode ierr;
898   PetscInt       mbs=a->mbs,bs=A->rmap->bs;
899   PetscScalar    *x,*from,zero=0.0;
900 
901   PetscFunctionBegin;
902   /*
903   PetscSynchronizedPrintf(PetscObjectComm((PetscObject)A)," MatMultAdd is called ...\n");
904   PetscSynchronizedFlush(PetscObjectComm((PetscObject)A),PETSC_STDOUT);
905   */
906   /* diagonal part */
907   ierr = (*a->A->ops->multadd)(a->A,xx,yy,a->slvec1a);CHKERRQ(ierr);
908   ierr = VecSet(a->slvec1b,zero);CHKERRQ(ierr);
909 
910   /* subdiagonal part */
911   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->slvec0b);CHKERRQ(ierr);
912 
913   /* copy x into the vec slvec0 */
914   ierr = VecGetArray(a->slvec0,&from);CHKERRQ(ierr);
915   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
916   ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
917   ierr = VecRestoreArray(a->slvec0,&from);CHKERRQ(ierr);
918 
919   ierr = VecScatterBegin(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
920   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
921   ierr = VecScatterEnd(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
922 
923   /* supperdiagonal part */
924   ierr = (*a->B->ops->multadd)(a->B,a->slvec1b,a->slvec1a,zz);CHKERRQ(ierr);
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "MatMultAdd_MPISBAIJ_2comm"
930 PetscErrorCode MatMultAdd_MPISBAIJ_2comm(Mat A,Vec xx,Vec yy,Vec zz)
931 {
932   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
933   PetscErrorCode ierr;
934 
935   PetscFunctionBegin;
936   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
937   /* do diagonal part */
938   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
939   /* do supperdiagonal part */
940   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
941   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
942 
943   /* do subdiagonal part */
944   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
945   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
946   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
947   PetscFunctionReturn(0);
948 }
949 
950 /*
951   This only works correctly for square matrices where the subblock A->A is the
952    diagonal block
953 */
954 #undef __FUNCT__
955 #define __FUNCT__ "MatGetDiagonal_MPISBAIJ"
956 PetscErrorCode MatGetDiagonal_MPISBAIJ(Mat A,Vec v)
957 {
958   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
959   PetscErrorCode ierr;
960 
961   PetscFunctionBegin;
962   /* if (a->rmap->N != a->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); */
963   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
964   PetscFunctionReturn(0);
965 }
966 
967 #undef __FUNCT__
968 #define __FUNCT__ "MatScale_MPISBAIJ"
969 PetscErrorCode MatScale_MPISBAIJ(Mat A,PetscScalar aa)
970 {
971   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
972   PetscErrorCode ierr;
973 
974   PetscFunctionBegin;
975   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
976   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
977   PetscFunctionReturn(0);
978 }
979 
980 #undef __FUNCT__
981 #define __FUNCT__ "MatGetRow_MPISBAIJ"
982 PetscErrorCode MatGetRow_MPISBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
983 {
984   Mat_MPISBAIJ   *mat = (Mat_MPISBAIJ*)matin->data;
985   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
986   PetscErrorCode ierr;
987   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
988   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
989   PetscInt       *cmap,*idx_p,cstart = mat->rstartbs;
990 
991   PetscFunctionBegin;
992   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
993   mat->getrowactive = PETSC_TRUE;
994 
995   if (!mat->rowvalues && (idx || v)) {
996     /*
997         allocate enough space to hold information from the longest row.
998     */
999     Mat_SeqSBAIJ *Aa = (Mat_SeqSBAIJ*)mat->A->data;
1000     Mat_SeqBAIJ  *Ba = (Mat_SeqBAIJ*)mat->B->data;
1001     PetscInt     max = 1,mbs = mat->mbs,tmp;
1002     for (i=0; i<mbs; i++) {
1003       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; /* row length */
1004       if (max < tmp) max = tmp;
1005     }
1006     ierr = PetscMalloc2(max*bs2,&mat->rowvalues,max*bs2,&mat->rowindices);CHKERRQ(ierr);
1007   }
1008 
1009   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1010   lrow = row - brstart;  /* local row index */
1011 
1012   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1013   if (!v)   {pvA = 0; pvB = 0;}
1014   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1015   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1016   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1017   nztot = nzA + nzB;
1018 
1019   cmap = mat->garray;
1020   if (v  || idx) {
1021     if (nztot) {
1022       /* Sort by increasing column numbers, assuming A and B already sorted */
1023       PetscInt imark = -1;
1024       if (v) {
1025         *v = v_p = mat->rowvalues;
1026         for (i=0; i<nzB; i++) {
1027           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1028           else break;
1029         }
1030         imark = i;
1031         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1032         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1033       }
1034       if (idx) {
1035         *idx = idx_p = mat->rowindices;
1036         if (imark > -1) {
1037           for (i=0; i<imark; i++) {
1038             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1039           }
1040         } else {
1041           for (i=0; i<nzB; i++) {
1042             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1043             else break;
1044           }
1045           imark = i;
1046         }
1047         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1048         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1049       }
1050     } else {
1051       if (idx) *idx = 0;
1052       if (v)   *v   = 0;
1053     }
1054   }
1055   *nz  = nztot;
1056   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1057   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1058   PetscFunctionReturn(0);
1059 }
1060 
1061 #undef __FUNCT__
1062 #define __FUNCT__ "MatRestoreRow_MPISBAIJ"
1063 PetscErrorCode MatRestoreRow_MPISBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1064 {
1065   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
1066 
1067   PetscFunctionBegin;
1068   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first");
1069   baij->getrowactive = PETSC_FALSE;
1070   PetscFunctionReturn(0);
1071 }
1072 
1073 #undef __FUNCT__
1074 #define __FUNCT__ "MatGetRowUpperTriangular_MPISBAIJ"
1075 PetscErrorCode MatGetRowUpperTriangular_MPISBAIJ(Mat A)
1076 {
1077   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ*)A->data;
1078   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ*)a->A->data;
1079 
1080   PetscFunctionBegin;
1081   aA->getrow_utriangular = PETSC_TRUE;
1082   PetscFunctionReturn(0);
1083 }
1084 #undef __FUNCT__
1085 #define __FUNCT__ "MatRestoreRowUpperTriangular_MPISBAIJ"
1086 PetscErrorCode MatRestoreRowUpperTriangular_MPISBAIJ(Mat A)
1087 {
1088   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ*)A->data;
1089   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ*)a->A->data;
1090 
1091   PetscFunctionBegin;
1092   aA->getrow_utriangular = PETSC_FALSE;
1093   PetscFunctionReturn(0);
1094 }
1095 
1096 #undef __FUNCT__
1097 #define __FUNCT__ "MatRealPart_MPISBAIJ"
1098 PetscErrorCode MatRealPart_MPISBAIJ(Mat A)
1099 {
1100   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1101   PetscErrorCode ierr;
1102 
1103   PetscFunctionBegin;
1104   ierr = MatRealPart(a->A);CHKERRQ(ierr);
1105   ierr = MatRealPart(a->B);CHKERRQ(ierr);
1106   PetscFunctionReturn(0);
1107 }
1108 
1109 #undef __FUNCT__
1110 #define __FUNCT__ "MatImaginaryPart_MPISBAIJ"
1111 PetscErrorCode MatImaginaryPart_MPISBAIJ(Mat A)
1112 {
1113   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1114   PetscErrorCode ierr;
1115 
1116   PetscFunctionBegin;
1117   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
1118   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
1119   PetscFunctionReturn(0);
1120 }
1121 
1122 #undef __FUNCT__
1123 #define __FUNCT__ "MatZeroEntries_MPISBAIJ"
1124 PetscErrorCode MatZeroEntries_MPISBAIJ(Mat A)
1125 {
1126   Mat_MPISBAIJ   *l = (Mat_MPISBAIJ*)A->data;
1127   PetscErrorCode ierr;
1128 
1129   PetscFunctionBegin;
1130   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1131   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1132   PetscFunctionReturn(0);
1133 }
1134 
1135 #undef __FUNCT__
1136 #define __FUNCT__ "MatGetInfo_MPISBAIJ"
1137 PetscErrorCode MatGetInfo_MPISBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1138 {
1139   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)matin->data;
1140   Mat            A  = a->A,B = a->B;
1141   PetscErrorCode ierr;
1142   PetscReal      isend[5],irecv[5];
1143 
1144   PetscFunctionBegin;
1145   info->block_size = (PetscReal)matin->rmap->bs;
1146 
1147   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1148 
1149   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1150   isend[3] = info->memory;  isend[4] = info->mallocs;
1151 
1152   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1153 
1154   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1155   isend[3] += info->memory;  isend[4] += info->mallocs;
1156   if (flag == MAT_LOCAL) {
1157     info->nz_used      = isend[0];
1158     info->nz_allocated = isend[1];
1159     info->nz_unneeded  = isend[2];
1160     info->memory       = isend[3];
1161     info->mallocs      = isend[4];
1162   } else if (flag == MAT_GLOBAL_MAX) {
1163     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1164 
1165     info->nz_used      = irecv[0];
1166     info->nz_allocated = irecv[1];
1167     info->nz_unneeded  = irecv[2];
1168     info->memory       = irecv[3];
1169     info->mallocs      = irecv[4];
1170   } else if (flag == MAT_GLOBAL_SUM) {
1171     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1172 
1173     info->nz_used      = irecv[0];
1174     info->nz_allocated = irecv[1];
1175     info->nz_unneeded  = irecv[2];
1176     info->memory       = irecv[3];
1177     info->mallocs      = irecv[4];
1178   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1179   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1180   info->fill_ratio_needed = 0;
1181   info->factor_mallocs    = 0;
1182   PetscFunctionReturn(0);
1183 }
1184 
1185 #undef __FUNCT__
1186 #define __FUNCT__ "MatSetOption_MPISBAIJ"
1187 PetscErrorCode MatSetOption_MPISBAIJ(Mat A,MatOption op,PetscBool flg)
1188 {
1189   Mat_MPISBAIJ   *a  = (Mat_MPISBAIJ*)A->data;
1190   Mat_SeqSBAIJ   *aA = (Mat_SeqSBAIJ*)a->A->data;
1191   PetscErrorCode ierr;
1192 
1193   PetscFunctionBegin;
1194   switch (op) {
1195   case MAT_NEW_NONZERO_LOCATIONS:
1196   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1197   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1198   case MAT_KEEP_NONZERO_PATTERN:
1199   case MAT_NEW_NONZERO_LOCATION_ERR:
1200     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1201     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1202     break;
1203   case MAT_ROW_ORIENTED:
1204     a->roworiented = flg;
1205 
1206     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1207     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1208     break;
1209   case MAT_NEW_DIAGONALS:
1210     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1211     break;
1212   case MAT_IGNORE_OFF_PROC_ENTRIES:
1213     a->donotstash = flg;
1214     break;
1215   case MAT_USE_HASH_TABLE:
1216     a->ht_flag = flg;
1217     break;
1218   case MAT_HERMITIAN:
1219     if (!A->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call MatAssemblyEnd() first");
1220     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1221 
1222     A->ops->mult = MatMult_MPISBAIJ_Hermitian;
1223     break;
1224   case MAT_SPD:
1225     A->spd_set = PETSC_TRUE;
1226     A->spd     = flg;
1227     if (flg) {
1228       A->symmetric                  = PETSC_TRUE;
1229       A->structurally_symmetric     = PETSC_TRUE;
1230       A->symmetric_set              = PETSC_TRUE;
1231       A->structurally_symmetric_set = PETSC_TRUE;
1232     }
1233     break;
1234   case MAT_SYMMETRIC:
1235     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1236     break;
1237   case MAT_STRUCTURALLY_SYMMETRIC:
1238     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1239     break;
1240   case MAT_SYMMETRY_ETERNAL:
1241     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix must be symmetric");
1242     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1243     break;
1244   case MAT_IGNORE_LOWER_TRIANGULAR:
1245     aA->ignore_ltriangular = flg;
1246     break;
1247   case MAT_ERROR_LOWER_TRIANGULAR:
1248     aA->ignore_ltriangular = flg;
1249     break;
1250   case MAT_GETROW_UPPERTRIANGULAR:
1251     aA->getrow_utriangular = flg;
1252     break;
1253   default:
1254     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %d",op);
1255   }
1256   PetscFunctionReturn(0);
1257 }
1258 
1259 #undef __FUNCT__
1260 #define __FUNCT__ "MatTranspose_MPISBAIJ"
1261 PetscErrorCode MatTranspose_MPISBAIJ(Mat A,MatReuse reuse,Mat *B)
1262 {
1263   PetscErrorCode ierr;
1264 
1265   PetscFunctionBegin;
1266   if (MAT_INITIAL_MATRIX || *B != A) {
1267     ierr = MatDuplicate(A,MAT_COPY_VALUES,B);CHKERRQ(ierr);
1268   }
1269   PetscFunctionReturn(0);
1270 }
1271 
1272 #undef __FUNCT__
1273 #define __FUNCT__ "MatDiagonalScale_MPISBAIJ"
1274 PetscErrorCode MatDiagonalScale_MPISBAIJ(Mat mat,Vec ll,Vec rr)
1275 {
1276   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
1277   Mat            a     = baij->A, b=baij->B;
1278   PetscErrorCode ierr;
1279   PetscInt       nv,m,n;
1280   PetscBool      flg;
1281 
1282   PetscFunctionBegin;
1283   if (ll != rr) {
1284     ierr = VecEqual(ll,rr,&flg);CHKERRQ(ierr);
1285     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"For symmetric format, left and right scaling vectors must be same\n");
1286   }
1287   if (!ll) PetscFunctionReturn(0);
1288 
1289   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
1290   if (m != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"For symmetric format, local size %d %d must be same",m,n);
1291 
1292   ierr = VecGetLocalSize(rr,&nv);CHKERRQ(ierr);
1293   if (nv!=n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Left and right vector non-conforming local size");
1294 
1295   ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1296 
1297   /* left diagonalscale the off-diagonal part */
1298   ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1299 
1300   /* scale the diagonal part */
1301   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1302 
1303   /* right diagonalscale the off-diagonal part */
1304   ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1305   ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1306   PetscFunctionReturn(0);
1307 }
1308 
1309 #undef __FUNCT__
1310 #define __FUNCT__ "MatSetUnfactored_MPISBAIJ"
1311 PetscErrorCode MatSetUnfactored_MPISBAIJ(Mat A)
1312 {
1313   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1314   PetscErrorCode ierr;
1315 
1316   PetscFunctionBegin;
1317   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1318   PetscFunctionReturn(0);
1319 }
1320 
1321 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat,MatDuplicateOption,Mat*);
1322 
1323 #undef __FUNCT__
1324 #define __FUNCT__ "MatEqual_MPISBAIJ"
1325 PetscErrorCode MatEqual_MPISBAIJ(Mat A,Mat B,PetscBool  *flag)
1326 {
1327   Mat_MPISBAIJ   *matB = (Mat_MPISBAIJ*)B->data,*matA = (Mat_MPISBAIJ*)A->data;
1328   Mat            a,b,c,d;
1329   PetscBool      flg;
1330   PetscErrorCode ierr;
1331 
1332   PetscFunctionBegin;
1333   a = matA->A; b = matA->B;
1334   c = matB->A; d = matB->B;
1335 
1336   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1337   if (flg) {
1338     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1339   }
1340   ierr = MPI_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1341   PetscFunctionReturn(0);
1342 }
1343 
1344 #undef __FUNCT__
1345 #define __FUNCT__ "MatCopy_MPISBAIJ"
1346 PetscErrorCode MatCopy_MPISBAIJ(Mat A,Mat B,MatStructure str)
1347 {
1348   PetscErrorCode ierr;
1349   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1350   Mat_MPISBAIJ   *b = (Mat_MPISBAIJ*)B->data;
1351 
1352   PetscFunctionBegin;
1353   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1354   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1355     ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1356     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1357     ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1358   } else {
1359     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1360     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1361   }
1362   PetscFunctionReturn(0);
1363 }
1364 
1365 #undef __FUNCT__
1366 #define __FUNCT__ "MatSetUp_MPISBAIJ"
1367 PetscErrorCode MatSetUp_MPISBAIJ(Mat A)
1368 {
1369   PetscErrorCode ierr;
1370 
1371   PetscFunctionBegin;
1372   ierr = MatMPISBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 #undef __FUNCT__
1377 #define __FUNCT__ "MatAXPY_MPISBAIJ"
1378 PetscErrorCode MatAXPY_MPISBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1379 {
1380   PetscErrorCode ierr;
1381   Mat_MPISBAIJ   *xx=(Mat_MPISBAIJ*)X->data,*yy=(Mat_MPISBAIJ*)Y->data;
1382   PetscBLASInt   bnz,one=1;
1383   Mat_SeqSBAIJ   *xa,*ya;
1384   Mat_SeqBAIJ    *xb,*yb;
1385 
1386   PetscFunctionBegin;
1387   if (str == SAME_NONZERO_PATTERN) {
1388     PetscScalar alpha = a;
1389     xa   = (Mat_SeqSBAIJ*)xx->A->data;
1390     ya   = (Mat_SeqSBAIJ*)yy->A->data;
1391     ierr = PetscBLASIntCast(xa->nz,&bnz);CHKERRQ(ierr);
1392     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,xa->a,&one,ya->a,&one));
1393     xb   = (Mat_SeqBAIJ*)xx->B->data;
1394     yb   = (Mat_SeqBAIJ*)yy->B->data;
1395     ierr = PetscBLASIntCast(xb->nz,&bnz);CHKERRQ(ierr);
1396     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,xb->a,&one,yb->a,&one));
1397     ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
1398   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
1399     ierr = MatSetOption(X,MAT_GETROW_UPPERTRIANGULAR,PETSC_TRUE);CHKERRQ(ierr);
1400     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1401     ierr = MatSetOption(X,MAT_GETROW_UPPERTRIANGULAR,PETSC_FALSE);CHKERRQ(ierr);
1402   } else {
1403     Mat      B;
1404     PetscInt *nnz_d,*nnz_o,bs=Y->rmap->bs;
1405     if (bs != X->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrices must have same block size");
1406     ierr = MatGetRowUpperTriangular(X);CHKERRQ(ierr);
1407     ierr = MatGetRowUpperTriangular(Y);CHKERRQ(ierr);
1408     ierr = PetscMalloc1(yy->A->rmap->N,&nnz_d);CHKERRQ(ierr);
1409     ierr = PetscMalloc1(yy->B->rmap->N,&nnz_o);CHKERRQ(ierr);
1410     ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr);
1411     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
1412     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
1413     ierr = MatSetBlockSizesFromMats(B,Y,Y);CHKERRQ(ierr);
1414     ierr = MatSetType(B,MATMPISBAIJ);CHKERRQ(ierr);
1415     ierr = MatAXPYGetPreallocation_SeqSBAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
1416     ierr = MatAXPYGetPreallocation_MPIBAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr);
1417     ierr = MatMPISBAIJSetPreallocation(B,bs,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
1418     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
1419     ierr = MatHeaderReplace(Y,B);CHKERRQ(ierr);
1420     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
1421     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
1422     ierr = MatRestoreRowUpperTriangular(X);CHKERRQ(ierr);
1423     ierr = MatRestoreRowUpperTriangular(Y);CHKERRQ(ierr);
1424   }
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 #undef __FUNCT__
1429 #define __FUNCT__ "MatGetSubMatrices_MPISBAIJ"
1430 PetscErrorCode MatGetSubMatrices_MPISBAIJ(Mat A,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *B[])
1431 {
1432   PetscErrorCode ierr;
1433   PetscInt       i;
1434   PetscBool      flg;
1435 
1436   PetscFunctionBegin;
1437   ierr = MatGetSubMatrices_MPIBAIJ(A,n,irow,icol,scall,B);CHKERRQ(ierr);
1438   for (i=0; i<n; i++) {
1439     ierr = ISEqual(irow[i],icol[i],&flg);CHKERRQ(ierr);
1440     if (!flg) { /* *B[i] is non-symmetric, set flag */
1441       ierr = MatSetOption(*B[i],MAT_SYMMETRIC,PETSC_FALSE);CHKERRQ(ierr);
1442     }
1443   }
1444   PetscFunctionReturn(0);
1445 }
1446 
1447 /* -------------------------------------------------------------------*/
1448 static struct _MatOps MatOps_Values = {MatSetValues_MPISBAIJ,
1449                                        MatGetRow_MPISBAIJ,
1450                                        MatRestoreRow_MPISBAIJ,
1451                                        MatMult_MPISBAIJ,
1452                                /*  4*/ MatMultAdd_MPISBAIJ,
1453                                        MatMult_MPISBAIJ,       /* transpose versions are same as non-transpose */
1454                                        MatMultAdd_MPISBAIJ,
1455                                        0,
1456                                        0,
1457                                        0,
1458                                /* 10*/ 0,
1459                                        0,
1460                                        0,
1461                                        MatSOR_MPISBAIJ,
1462                                        MatTranspose_MPISBAIJ,
1463                                /* 15*/ MatGetInfo_MPISBAIJ,
1464                                        MatEqual_MPISBAIJ,
1465                                        MatGetDiagonal_MPISBAIJ,
1466                                        MatDiagonalScale_MPISBAIJ,
1467                                        MatNorm_MPISBAIJ,
1468                                /* 20*/ MatAssemblyBegin_MPISBAIJ,
1469                                        MatAssemblyEnd_MPISBAIJ,
1470                                        MatSetOption_MPISBAIJ,
1471                                        MatZeroEntries_MPISBAIJ,
1472                                /* 24*/ 0,
1473                                        0,
1474                                        0,
1475                                        0,
1476                                        0,
1477                                /* 29*/ MatSetUp_MPISBAIJ,
1478                                        0,
1479                                        0,
1480                                        0,
1481                                        0,
1482                                /* 34*/ MatDuplicate_MPISBAIJ,
1483                                        0,
1484                                        0,
1485                                        0,
1486                                        0,
1487                                /* 39*/ MatAXPY_MPISBAIJ,
1488                                        MatGetSubMatrices_MPISBAIJ,
1489                                        MatIncreaseOverlap_MPISBAIJ,
1490                                        MatGetValues_MPISBAIJ,
1491                                        MatCopy_MPISBAIJ,
1492                                /* 44*/ 0,
1493                                        MatScale_MPISBAIJ,
1494                                        0,
1495                                        0,
1496                                        0,
1497                                /* 49*/ 0,
1498                                        0,
1499                                        0,
1500                                        0,
1501                                        0,
1502                                /* 54*/ 0,
1503                                        0,
1504                                        MatSetUnfactored_MPISBAIJ,
1505                                        0,
1506                                        MatSetValuesBlocked_MPISBAIJ,
1507                                /* 59*/ 0,
1508                                        0,
1509                                        0,
1510                                        0,
1511                                        0,
1512                                /* 64*/ 0,
1513                                        0,
1514                                        0,
1515                                        0,
1516                                        0,
1517                                /* 69*/ MatGetRowMaxAbs_MPISBAIJ,
1518                                        0,
1519                                        0,
1520                                        0,
1521                                        0,
1522                                /* 74*/ 0,
1523                                        0,
1524                                        0,
1525                                        0,
1526                                        0,
1527                                /* 79*/ 0,
1528                                        0,
1529                                        0,
1530                                        0,
1531                                        MatLoad_MPISBAIJ,
1532                                /* 84*/ 0,
1533                                        0,
1534                                        0,
1535                                        0,
1536                                        0,
1537                                /* 89*/ 0,
1538                                        0,
1539                                        0,
1540                                        0,
1541                                        0,
1542                                /* 94*/ 0,
1543                                        0,
1544                                        0,
1545                                        0,
1546                                        0,
1547                                /* 99*/ 0,
1548                                        0,
1549                                        0,
1550                                        0,
1551                                        0,
1552                                /*104*/ 0,
1553                                        MatRealPart_MPISBAIJ,
1554                                        MatImaginaryPart_MPISBAIJ,
1555                                        MatGetRowUpperTriangular_MPISBAIJ,
1556                                        MatRestoreRowUpperTriangular_MPISBAIJ,
1557                                /*109*/ 0,
1558                                        0,
1559                                        0,
1560                                        0,
1561                                        0,
1562                                /*114*/ 0,
1563                                        0,
1564                                        0,
1565                                        0,
1566                                        0,
1567                                /*119*/ 0,
1568                                        0,
1569                                        0,
1570                                        0,
1571                                        0,
1572                                /*124*/ 0,
1573                                        0,
1574                                        0,
1575                                        0,
1576                                        0,
1577                                /*129*/ 0,
1578                                        0,
1579                                        0,
1580                                        0,
1581                                        0,
1582                                /*134*/ 0,
1583                                        0,
1584                                        0,
1585                                        0,
1586                                        0,
1587                                /*139*/ 0,
1588                                        0,
1589                                        0,
1590                                        0,
1591                                        0,
1592                                 /*144*/MatCreateMPIMatConcatenateSeqMat_MPISBAIJ
1593 };
1594 
1595 #undef __FUNCT__
1596 #define __FUNCT__ "MatGetDiagonalBlock_MPISBAIJ"
1597 PetscErrorCode  MatGetDiagonalBlock_MPISBAIJ(Mat A,Mat *a)
1598 {
1599   PetscFunctionBegin;
1600   *a = ((Mat_MPISBAIJ*)A->data)->A;
1601   PetscFunctionReturn(0);
1602 }
1603 
1604 #undef __FUNCT__
1605 #define __FUNCT__ "MatMPISBAIJSetPreallocation_MPISBAIJ"
1606 PetscErrorCode  MatMPISBAIJSetPreallocation_MPISBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
1607 {
1608   Mat_MPISBAIJ   *b;
1609   PetscErrorCode ierr;
1610   PetscInt       i,mbs,Mbs;
1611 
1612   PetscFunctionBegin;
1613   ierr = MatSetBlockSize(B,PetscAbs(bs));CHKERRQ(ierr);
1614   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
1615   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
1616   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
1617 
1618   b   = (Mat_MPISBAIJ*)B->data;
1619   mbs = B->rmap->n/bs;
1620   Mbs = B->rmap->N/bs;
1621   if (mbs*bs != B->rmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"No of local rows %D must be divisible by blocksize %D",B->rmap->N,bs);
1622 
1623   B->rmap->bs = bs;
1624   b->bs2      = bs*bs;
1625   b->mbs      = mbs;
1626   b->Mbs      = Mbs;
1627   b->nbs      = B->cmap->n/bs;
1628   b->Nbs      = B->cmap->N/bs;
1629 
1630   for (i=0; i<=b->size; i++) {
1631     b->rangebs[i] = B->rmap->range[i]/bs;
1632   }
1633   b->rstartbs = B->rmap->rstart/bs;
1634   b->rendbs   = B->rmap->rend/bs;
1635 
1636   b->cstartbs = B->cmap->rstart/bs;
1637   b->cendbs   = B->cmap->rend/bs;
1638 
1639   if (!B->preallocated) {
1640     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
1641     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
1642     ierr = MatSetType(b->A,MATSEQSBAIJ);CHKERRQ(ierr);
1643     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
1644     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
1645     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
1646     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
1647     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
1648     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
1649   }
1650 
1651   ierr = MatSeqSBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
1652   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
1653 
1654   B->preallocated = PETSC_TRUE;
1655   PetscFunctionReturn(0);
1656 }
1657 
1658 #undef __FUNCT__
1659 #define __FUNCT__ "MatMPISBAIJSetPreallocationCSR_MPISBAIJ"
1660 PetscErrorCode MatMPISBAIJSetPreallocationCSR_MPISBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
1661 {
1662   PetscInt       m,rstart,cstart,cend;
1663   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
1664   const PetscInt *JJ    =0;
1665   PetscScalar    *values=0;
1666   PetscErrorCode ierr;
1667 
1668   PetscFunctionBegin;
1669   if (bs < 1) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_OUTOFRANGE,"Invalid block size specified, must be positive but it is %D",bs);
1670   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
1671   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
1672   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
1673   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
1674   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
1675   m      = B->rmap->n/bs;
1676   rstart = B->rmap->rstart/bs;
1677   cstart = B->cmap->rstart/bs;
1678   cend   = B->cmap->rend/bs;
1679 
1680   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
1681   ierr = PetscMalloc2(m,&d_nnz,m,&o_nnz);CHKERRQ(ierr);
1682   for (i=0; i<m; i++) {
1683     nz = ii[i+1] - ii[i];
1684     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
1685     nz_max = PetscMax(nz_max,nz);
1686     JJ     = jj + ii[i];
1687     for (j=0; j<nz; j++) {
1688       if (*JJ >= cstart) break;
1689       JJ++;
1690     }
1691     d = 0;
1692     for (; j<nz; j++) {
1693       if (*JJ++ >= cend) break;
1694       d++;
1695     }
1696     d_nnz[i] = d;
1697     o_nnz[i] = nz - d;
1698   }
1699   ierr = MatMPISBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
1700   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
1701 
1702   values = (PetscScalar*)V;
1703   if (!values) {
1704     ierr = PetscMalloc1(bs*bs*nz_max,&values);CHKERRQ(ierr);
1705     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
1706   }
1707   for (i=0; i<m; i++) {
1708     PetscInt          row    = i + rstart;
1709     PetscInt          ncols  = ii[i+1] - ii[i];
1710     const PetscInt    *icols = jj + ii[i];
1711     const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
1712     ierr = MatSetValuesBlocked_MPISBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
1713   }
1714 
1715   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
1716   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1717   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1718   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
1719   PetscFunctionReturn(0);
1720 }
1721 
1722 /*MC
1723    MATMPISBAIJ - MATMPISBAIJ = "mpisbaij" - A matrix type to be used for distributed symmetric sparse block matrices,
1724    based on block compressed sparse row format.  Only the upper triangular portion of the "diagonal" portion of
1725    the matrix is stored.
1726 
1727   For complex numbers by default this matrix is symmetric, NOT Hermitian symmetric. To make it Hermitian symmetric you
1728   can call MatSetOption(Mat, MAT_HERMITIAN);
1729 
1730    Options Database Keys:
1731 . -mat_type mpisbaij - sets the matrix type to "mpisbaij" during a call to MatSetFromOptions()
1732 
1733   Level: beginner
1734 
1735 .seealso: MatCreateMPISBAIJ
1736 M*/
1737 
1738 PETSC_EXTERN PetscErrorCode MatConvert_MPISBAIJ_MPISBSTRM(Mat,MatType,MatReuse,Mat*);
1739 
1740 #undef __FUNCT__
1741 #define __FUNCT__ "MatCreate_MPISBAIJ"
1742 PETSC_EXTERN PetscErrorCode MatCreate_MPISBAIJ(Mat B)
1743 {
1744   Mat_MPISBAIJ   *b;
1745   PetscErrorCode ierr;
1746   PetscBool      flg = PETSC_FALSE;
1747 
1748   PetscFunctionBegin;
1749   ierr    = PetscNewLog(B,&b);CHKERRQ(ierr);
1750   B->data = (void*)b;
1751   ierr    = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1752 
1753   B->ops->destroy = MatDestroy_MPISBAIJ;
1754   B->ops->view    = MatView_MPISBAIJ;
1755   B->assembled    = PETSC_FALSE;
1756   B->insertmode   = NOT_SET_VALUES;
1757 
1758   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
1759   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
1760 
1761   /* build local table of row and column ownerships */
1762   ierr = PetscMalloc1(b->size+2,&b->rangebs);CHKERRQ(ierr);
1763 
1764   /* build cache for off array entries formed */
1765   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
1766 
1767   b->donotstash  = PETSC_FALSE;
1768   b->colmap      = NULL;
1769   b->garray      = NULL;
1770   b->roworiented = PETSC_TRUE;
1771 
1772   /* stuff used in block assembly */
1773   b->barray = 0;
1774 
1775   /* stuff used for matrix vector multiply */
1776   b->lvec    = 0;
1777   b->Mvctx   = 0;
1778   b->slvec0  = 0;
1779   b->slvec0b = 0;
1780   b->slvec1  = 0;
1781   b->slvec1a = 0;
1782   b->slvec1b = 0;
1783   b->sMvctx  = 0;
1784 
1785   /* stuff for MatGetRow() */
1786   b->rowindices   = 0;
1787   b->rowvalues    = 0;
1788   b->getrowactive = PETSC_FALSE;
1789 
1790   /* hash table stuff */
1791   b->ht           = 0;
1792   b->hd           = 0;
1793   b->ht_size      = 0;
1794   b->ht_flag      = PETSC_FALSE;
1795   b->ht_fact      = 0;
1796   b->ht_total_ct  = 0;
1797   b->ht_insert_ct = 0;
1798 
1799   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
1800   b->ijonly = PETSC_FALSE;
1801 
1802   b->in_loc = 0;
1803   b->v_loc  = 0;
1804   b->n_loc  = 0;
1805 
1806   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPISBAIJ);CHKERRQ(ierr);
1807   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPISBAIJ);CHKERRQ(ierr);
1808   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPISBAIJ);CHKERRQ(ierr);
1809   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPISBAIJSetPreallocation_C",MatMPISBAIJSetPreallocation_MPISBAIJ);CHKERRQ(ierr);
1810   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPISBAIJSetPreallocationCSR_C",MatMPISBAIJSetPreallocationCSR_MPISBAIJ);CHKERRQ(ierr);
1811   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpisbaij_mpisbstrm_C",MatConvert_MPISBAIJ_MPISBSTRM);CHKERRQ(ierr);
1812 
1813   B->symmetric                  = PETSC_TRUE;
1814   B->structurally_symmetric     = PETSC_TRUE;
1815   B->symmetric_set              = PETSC_TRUE;
1816   B->structurally_symmetric_set = PETSC_TRUE;
1817 
1818   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPISBAIJ);CHKERRQ(ierr);
1819   ierr      = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPISBAIJ matrix 1","Mat");CHKERRQ(ierr);
1820   ierr      = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",flg,&flg,NULL);CHKERRQ(ierr);
1821   if (flg) {
1822     PetscReal fact = 1.39;
1823     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
1824     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
1825     if (fact <= 1.0) fact = 1.39;
1826     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
1827     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
1828   }
1829   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1830   PetscFunctionReturn(0);
1831 }
1832 
1833 /*MC
1834    MATSBAIJ - MATSBAIJ = "sbaij" - A matrix type to be used for symmetric block sparse matrices.
1835 
1836    This matrix type is identical to MATSEQSBAIJ when constructed with a single process communicator,
1837    and MATMPISBAIJ otherwise.
1838 
1839    Options Database Keys:
1840 . -mat_type sbaij - sets the matrix type to "sbaij" during a call to MatSetFromOptions()
1841 
1842   Level: beginner
1843 
1844 .seealso: MatCreateMPISBAIJ,MATSEQSBAIJ,MATMPISBAIJ
1845 M*/
1846 
1847 #undef __FUNCT__
1848 #define __FUNCT__ "MatMPISBAIJSetPreallocation"
1849 /*@C
1850    MatMPISBAIJSetPreallocation - For good matrix assembly performance
1851    the user should preallocate the matrix storage by setting the parameters
1852    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
1853    performance can be increased by more than a factor of 50.
1854 
1855    Collective on Mat
1856 
1857    Input Parameters:
1858 +  B - the matrix
1859 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
1860           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
1861 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
1862            submatrix  (same for all local rows)
1863 .  d_nnz - array containing the number of block nonzeros in the various block rows
1864            in the upper triangular and diagonal part of the in diagonal portion of the local
1865            (possibly different for each block row) or NULL.  If you plan to factor the matrix you must leave room
1866            for the diagonal entry and set a value even if it is zero.
1867 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
1868            submatrix (same for all local rows).
1869 -  o_nnz - array containing the number of nonzeros in the various block rows of the
1870            off-diagonal portion of the local submatrix that is right of the diagonal
1871            (possibly different for each block row) or NULL.
1872 
1873 
1874    Options Database Keys:
1875 .   -mat_no_unroll - uses code that does not unroll the loops in the
1876                      block calculations (much slower)
1877 .   -mat_block_size - size of the blocks to use
1878 
1879    Notes:
1880 
1881    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
1882    than it must be used on all processors that share the object for that argument.
1883 
1884    If the *_nnz parameter is given then the *_nz parameter is ignored
1885 
1886    Storage Information:
1887    For a square global matrix we define each processor's diagonal portion
1888    to be its local rows and the corresponding columns (a square submatrix);
1889    each processor's off-diagonal portion encompasses the remainder of the
1890    local matrix (a rectangular submatrix).
1891 
1892    The user can specify preallocated storage for the diagonal part of
1893    the local submatrix with either d_nz or d_nnz (not both).  Set
1894    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
1895    memory allocation.  Likewise, specify preallocated storage for the
1896    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
1897 
1898    You can call MatGetInfo() to get information on how effective the preallocation was;
1899    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
1900    You can also run with the option -info and look for messages with the string
1901    malloc in them to see if additional memory allocation was needed.
1902 
1903    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
1904    the figure below we depict these three local rows and all columns (0-11).
1905 
1906 .vb
1907            0 1 2 3 4 5 6 7 8 9 10 11
1908           --------------------------
1909    row 3  |. . . d d d o o o o  o  o
1910    row 4  |. . . d d d o o o o  o  o
1911    row 5  |. . . d d d o o o o  o  o
1912           --------------------------
1913 .ve
1914 
1915    Thus, any entries in the d locations are stored in the d (diagonal)
1916    submatrix, and any entries in the o locations are stored in the
1917    o (off-diagonal) submatrix.  Note that the d matrix is stored in
1918    MatSeqSBAIJ format and the o submatrix in MATSEQBAIJ format.
1919 
1920    Now d_nz should indicate the number of block nonzeros per row in the upper triangular
1921    plus the diagonal part of the d matrix,
1922    and o_nz should indicate the number of block nonzeros per row in the o matrix
1923 
1924    In general, for PDE problems in which most nonzeros are near the diagonal,
1925    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
1926    or you will get TERRIBLE performance; see the users' manual chapter on
1927    matrices.
1928 
1929    Level: intermediate
1930 
1931 .keywords: matrix, block, aij, compressed row, sparse, parallel
1932 
1933 .seealso: MatCreate(), MatCreateSeqSBAIJ(), MatSetValues(), MatCreateBAIJ(), PetscSplitOwnership()
1934 @*/
1935 PetscErrorCode  MatMPISBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
1936 {
1937   PetscErrorCode ierr;
1938 
1939   PetscFunctionBegin;
1940   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
1941   PetscValidType(B,1);
1942   PetscValidLogicalCollectiveInt(B,bs,2);
1943   ierr = PetscTryMethod(B,"MatMPISBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
1944   PetscFunctionReturn(0);
1945 }
1946 
1947 #undef __FUNCT__
1948 #define __FUNCT__ "MatCreateSBAIJ"
1949 /*@C
1950    MatCreateSBAIJ - Creates a sparse parallel matrix in symmetric block AIJ format
1951    (block compressed row).  For good matrix assembly performance
1952    the user should preallocate the matrix storage by setting the parameters
1953    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
1954    performance can be increased by more than a factor of 50.
1955 
1956    Collective on MPI_Comm
1957 
1958    Input Parameters:
1959 +  comm - MPI communicator
1960 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
1961           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
1962 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
1963            This value should be the same as the local size used in creating the
1964            y vector for the matrix-vector product y = Ax.
1965 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
1966            This value should be the same as the local size used in creating the
1967            x vector for the matrix-vector product y = Ax.
1968 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
1969 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
1970 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
1971            submatrix  (same for all local rows)
1972 .  d_nnz - array containing the number of block nonzeros in the various block rows
1973            in the upper triangular portion of the in diagonal portion of the local
1974            (possibly different for each block block row) or NULL.
1975            If you plan to factor the matrix you must leave room for the diagonal entry and
1976            set its value even if it is zero.
1977 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
1978            submatrix (same for all local rows).
1979 -  o_nnz - array containing the number of nonzeros in the various block rows of the
1980            off-diagonal portion of the local submatrix (possibly different for
1981            each block row) or NULL.
1982 
1983    Output Parameter:
1984 .  A - the matrix
1985 
1986    Options Database Keys:
1987 .   -mat_no_unroll - uses code that does not unroll the loops in the
1988                      block calculations (much slower)
1989 .   -mat_block_size - size of the blocks to use
1990 .   -mat_mpi - use the parallel matrix data structures even on one processor
1991                (defaults to using SeqBAIJ format on one processor)
1992 
1993    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
1994    MatXXXXSetPreallocation() paradgm instead of this routine directly.
1995    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
1996 
1997    Notes:
1998    The number of rows and columns must be divisible by blocksize.
1999    This matrix type does not support complex Hermitian operation.
2000 
2001    The user MUST specify either the local or global matrix dimensions
2002    (possibly both).
2003 
2004    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
2005    than it must be used on all processors that share the object for that argument.
2006 
2007    If the *_nnz parameter is given then the *_nz parameter is ignored
2008 
2009    Storage Information:
2010    For a square global matrix we define each processor's diagonal portion
2011    to be its local rows and the corresponding columns (a square submatrix);
2012    each processor's off-diagonal portion encompasses the remainder of the
2013    local matrix (a rectangular submatrix).
2014 
2015    The user can specify preallocated storage for the diagonal part of
2016    the local submatrix with either d_nz or d_nnz (not both).  Set
2017    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
2018    memory allocation.  Likewise, specify preallocated storage for the
2019    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
2020 
2021    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2022    the figure below we depict these three local rows and all columns (0-11).
2023 
2024 .vb
2025            0 1 2 3 4 5 6 7 8 9 10 11
2026           --------------------------
2027    row 3  |. . . d d d o o o o  o  o
2028    row 4  |. . . d d d o o o o  o  o
2029    row 5  |. . . d d d o o o o  o  o
2030           --------------------------
2031 .ve
2032 
2033    Thus, any entries in the d locations are stored in the d (diagonal)
2034    submatrix, and any entries in the o locations are stored in the
2035    o (off-diagonal) submatrix.  Note that the d matrix is stored in
2036    MatSeqSBAIJ format and the o submatrix in MATSEQBAIJ format.
2037 
2038    Now d_nz should indicate the number of block nonzeros per row in the upper triangular
2039    plus the diagonal part of the d matrix,
2040    and o_nz should indicate the number of block nonzeros per row in the o matrix.
2041    In general, for PDE problems in which most nonzeros are near the diagonal,
2042    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
2043    or you will get TERRIBLE performance; see the users' manual chapter on
2044    matrices.
2045 
2046    Level: intermediate
2047 
2048 .keywords: matrix, block, aij, compressed row, sparse, parallel
2049 
2050 .seealso: MatCreate(), MatCreateSeqSBAIJ(), MatSetValues(), MatCreateBAIJ()
2051 @*/
2052 
2053 PetscErrorCode  MatCreateSBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
2054 {
2055   PetscErrorCode ierr;
2056   PetscMPIInt    size;
2057 
2058   PetscFunctionBegin;
2059   ierr = MatCreate(comm,A);CHKERRQ(ierr);
2060   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
2061   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2062   if (size > 1) {
2063     ierr = MatSetType(*A,MATMPISBAIJ);CHKERRQ(ierr);
2064     ierr = MatMPISBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
2065   } else {
2066     ierr = MatSetType(*A,MATSEQSBAIJ);CHKERRQ(ierr);
2067     ierr = MatSeqSBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
2068   }
2069   PetscFunctionReturn(0);
2070 }
2071 
2072 
2073 #undef __FUNCT__
2074 #define __FUNCT__ "MatDuplicate_MPISBAIJ"
2075 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
2076 {
2077   Mat            mat;
2078   Mat_MPISBAIJ   *a,*oldmat = (Mat_MPISBAIJ*)matin->data;
2079   PetscErrorCode ierr;
2080   PetscInt       len=0,nt,bs=matin->rmap->bs,mbs=oldmat->mbs;
2081   PetscScalar    *array;
2082 
2083   PetscFunctionBegin;
2084   *newmat = 0;
2085 
2086   ierr = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
2087   ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
2088   ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
2089   ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
2090   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
2091   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
2092 
2093   mat->factortype   = matin->factortype;
2094   mat->preallocated = PETSC_TRUE;
2095   mat->assembled    = PETSC_TRUE;
2096   mat->insertmode   = NOT_SET_VALUES;
2097 
2098   a      = (Mat_MPISBAIJ*)mat->data;
2099   a->bs2 = oldmat->bs2;
2100   a->mbs = oldmat->mbs;
2101   a->nbs = oldmat->nbs;
2102   a->Mbs = oldmat->Mbs;
2103   a->Nbs = oldmat->Nbs;
2104 
2105 
2106   a->size         = oldmat->size;
2107   a->rank         = oldmat->rank;
2108   a->donotstash   = oldmat->donotstash;
2109   a->roworiented  = oldmat->roworiented;
2110   a->rowindices   = 0;
2111   a->rowvalues    = 0;
2112   a->getrowactive = PETSC_FALSE;
2113   a->barray       = 0;
2114   a->rstartbs     = oldmat->rstartbs;
2115   a->rendbs       = oldmat->rendbs;
2116   a->cstartbs     = oldmat->cstartbs;
2117   a->cendbs       = oldmat->cendbs;
2118 
2119   /* hash table stuff */
2120   a->ht           = 0;
2121   a->hd           = 0;
2122   a->ht_size      = 0;
2123   a->ht_flag      = oldmat->ht_flag;
2124   a->ht_fact      = oldmat->ht_fact;
2125   a->ht_total_ct  = 0;
2126   a->ht_insert_ct = 0;
2127 
2128   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+2)*sizeof(PetscInt));CHKERRQ(ierr);
2129   if (oldmat->colmap) {
2130 #if defined(PETSC_USE_CTABLE)
2131     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
2132 #else
2133     ierr = PetscMalloc1(a->Nbs,&a->colmap);CHKERRQ(ierr);
2134     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
2135     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
2136 #endif
2137   } else a->colmap = 0;
2138 
2139   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
2140     ierr = PetscMalloc1(len,&a->garray);CHKERRQ(ierr);
2141     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
2142     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
2143   } else a->garray = 0;
2144 
2145   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
2146   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
2147   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
2148   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
2149   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
2150 
2151   ierr =  VecDuplicate(oldmat->slvec0,&a->slvec0);CHKERRQ(ierr);
2152   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec0);CHKERRQ(ierr);
2153   ierr =  VecDuplicate(oldmat->slvec1,&a->slvec1);CHKERRQ(ierr);
2154   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1);CHKERRQ(ierr);
2155 
2156   ierr = VecGetLocalSize(a->slvec1,&nt);CHKERRQ(ierr);
2157   ierr = VecGetArray(a->slvec1,&array);CHKERRQ(ierr);
2158   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs*mbs,array,&a->slvec1a);CHKERRQ(ierr);
2159   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nt-bs*mbs,array+bs*mbs,&a->slvec1b);CHKERRQ(ierr);
2160   ierr = VecRestoreArray(a->slvec1,&array);CHKERRQ(ierr);
2161   ierr = VecGetArray(a->slvec0,&array);CHKERRQ(ierr);
2162   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nt-bs*mbs,array+bs*mbs,&a->slvec0b);CHKERRQ(ierr);
2163   ierr = VecRestoreArray(a->slvec0,&array);CHKERRQ(ierr);
2164   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec0);CHKERRQ(ierr);
2165   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1);CHKERRQ(ierr);
2166   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec0b);CHKERRQ(ierr);
2167   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1a);CHKERRQ(ierr);
2168   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1b);CHKERRQ(ierr);
2169 
2170   /* ierr =  VecScatterCopy(oldmat->sMvctx,&a->sMvctx); - not written yet, replaced by the lazy trick: */
2171   ierr      = PetscObjectReference((PetscObject)oldmat->sMvctx);CHKERRQ(ierr);
2172   a->sMvctx = oldmat->sMvctx;
2173   ierr      = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->sMvctx);CHKERRQ(ierr);
2174 
2175   ierr    =  MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
2176   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
2177   ierr    =  MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
2178   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
2179   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
2180   *newmat = mat;
2181   PetscFunctionReturn(0);
2182 }
2183 
2184 #undef __FUNCT__
2185 #define __FUNCT__ "MatLoad_MPISBAIJ"
2186 PetscErrorCode MatLoad_MPISBAIJ(Mat newmat,PetscViewer viewer)
2187 {
2188   PetscErrorCode ierr;
2189   PetscInt       i,nz,j,rstart,rend;
2190   PetscScalar    *vals,*buf;
2191   MPI_Comm       comm;
2192   MPI_Status     status;
2193   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag,*sndcounts = 0,*browners,maxnz,*rowners,mmbs;
2194   PetscInt       header[4],*rowlengths = 0,M,N,m,*cols,*locrowlens;
2195   PetscInt       *procsnz = 0,jj,*mycols,*ibuf;
2196   PetscInt       bs = newmat->rmap->bs,Mbs,mbs,extra_rows;
2197   PetscInt       *dlens,*odlens,*mask,*masked1,*masked2,rowcount,odcount;
2198   PetscInt       dcount,kmax,k,nzcount,tmp,sizesset=1,grows,gcols;
2199   int            fd;
2200 
2201   PetscFunctionBegin;
2202   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
2203   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPISBAIJ matrix 2","Mat");CHKERRQ(ierr);
2204   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
2205   ierr = PetscOptionsEnd();CHKERRQ(ierr);
2206   if (bs < 0) bs = 1;
2207 
2208   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2209   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2210   if (!rank) {
2211     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
2212     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
2213     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
2214     if (header[3] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format, cannot load as MPISBAIJ");
2215   }
2216 
2217   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) sizesset = 0;
2218 
2219   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
2220   M    = header[1];
2221   N    = header[2];
2222 
2223   /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */
2224   if (sizesset && newmat->rmap->N < 0) newmat->rmap->N = M;
2225   if (sizesset && newmat->cmap->N < 0) newmat->cmap->N = N;
2226 
2227   /* If global sizes are set, check if they are consistent with that given in the file */
2228   if (sizesset) {
2229     ierr = MatGetSize(newmat,&grows,&gcols);CHKERRQ(ierr);
2230   }
2231   if (sizesset && newmat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows);
2232   if (sizesset && newmat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols);
2233 
2234   if (M != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only do square matrices");
2235 
2236   /*
2237      This code adds extra rows to make sure the number of rows is
2238      divisible by the blocksize
2239   */
2240   Mbs        = M/bs;
2241   extra_rows = bs - M + bs*(Mbs);
2242   if (extra_rows == bs) extra_rows = 0;
2243   else                  Mbs++;
2244   if (extra_rows &&!rank) {
2245     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
2246   }
2247 
2248   /* determine ownership of all rows */
2249   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
2250     mbs = Mbs/size + ((Mbs % size) > rank);
2251     m   = mbs*bs;
2252   } else { /* User Set */
2253     m   = newmat->rmap->n;
2254     mbs = m/bs;
2255   }
2256   ierr       = PetscMalloc2(size+1,&rowners,size+1,&browners);CHKERRQ(ierr);
2257   ierr       = PetscMPIIntCast(mbs,&mmbs);CHKERRQ(ierr);
2258   ierr       = MPI_Allgather(&mmbs,1,MPI_INT,rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
2259   rowners[0] = 0;
2260   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
2261   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
2262   rstart = rowners[rank];
2263   rend   = rowners[rank+1];
2264 
2265   /* distribute row lengths to all processors */
2266   ierr = PetscMalloc1((rend-rstart)*bs,&locrowlens);CHKERRQ(ierr);
2267   if (!rank) {
2268     ierr = PetscMalloc1(M+extra_rows,&rowlengths);CHKERRQ(ierr);
2269     ierr = PetscBinaryRead(fd,rowlengths,M,PETSC_INT);CHKERRQ(ierr);
2270     for (i=0; i<extra_rows; i++) rowlengths[M+i] = 1;
2271     ierr = PetscMalloc1(size,&sndcounts);CHKERRQ(ierr);
2272     for (i=0; i<size; i++) sndcounts[i] = browners[i+1] - browners[i];
2273     ierr = MPI_Scatterv(rowlengths,sndcounts,browners,MPIU_INT,locrowlens,(rend-rstart)*bs,MPIU_INT,0,comm);CHKERRQ(ierr);
2274     ierr = PetscFree(sndcounts);CHKERRQ(ierr);
2275   } else {
2276     ierr = MPI_Scatterv(0,0,0,MPIU_INT,locrowlens,(rend-rstart)*bs,MPIU_INT,0,comm);CHKERRQ(ierr);
2277   }
2278 
2279   if (!rank) {   /* procs[0] */
2280     /* calculate the number of nonzeros on each processor */
2281     ierr = PetscMalloc1(size,&procsnz);CHKERRQ(ierr);
2282     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
2283     for (i=0; i<size; i++) {
2284       for (j=rowners[i]*bs; j< rowners[i+1]*bs; j++) {
2285         procsnz[i] += rowlengths[j];
2286       }
2287     }
2288     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
2289 
2290     /* determine max buffer needed and allocate it */
2291     maxnz = 0;
2292     for (i=0; i<size; i++) {
2293       maxnz = PetscMax(maxnz,procsnz[i]);
2294     }
2295     ierr = PetscMalloc1(maxnz,&cols);CHKERRQ(ierr);
2296 
2297     /* read in my part of the matrix column indices  */
2298     nz     = procsnz[0];
2299     ierr   = PetscMalloc1(nz,&ibuf);CHKERRQ(ierr);
2300     mycols = ibuf;
2301     if (size == 1) nz -= extra_rows;
2302     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
2303     if (size == 1) {
2304       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
2305     }
2306 
2307     /* read in every ones (except the last) and ship off */
2308     for (i=1; i<size-1; i++) {
2309       nz   = procsnz[i];
2310       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
2311       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
2312     }
2313     /* read in the stuff for the last proc */
2314     if (size != 1) {
2315       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
2316       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
2317       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
2318       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
2319     }
2320     ierr = PetscFree(cols);CHKERRQ(ierr);
2321   } else {  /* procs[i], i>0 */
2322     /* determine buffer space needed for message */
2323     nz = 0;
2324     for (i=0; i<m; i++) nz += locrowlens[i];
2325     ierr   = PetscMalloc1(nz,&ibuf);CHKERRQ(ierr);
2326     mycols = ibuf;
2327     /* receive message of column indices*/
2328     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
2329     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
2330     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
2331   }
2332 
2333   /* loop over local rows, determining number of off diagonal entries */
2334   ierr     = PetscMalloc2(rend-rstart,&dlens,rend-rstart,&odlens);CHKERRQ(ierr);
2335   ierr     = PetscMalloc3(Mbs,&mask,Mbs,&masked1,Mbs,&masked2);CHKERRQ(ierr);
2336   ierr     = PetscMemzero(mask,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
2337   ierr     = PetscMemzero(masked1,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
2338   ierr     = PetscMemzero(masked2,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
2339   rowcount = 0;
2340   nzcount  = 0;
2341   for (i=0; i<mbs; i++) {
2342     dcount  = 0;
2343     odcount = 0;
2344     for (j=0; j<bs; j++) {
2345       kmax = locrowlens[rowcount];
2346       for (k=0; k<kmax; k++) {
2347         tmp = mycols[nzcount++]/bs; /* block col. index */
2348         if (!mask[tmp]) {
2349           mask[tmp] = 1;
2350           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp; /* entry in off-diag portion */
2351           else masked1[dcount++] = tmp; /* entry in diag portion */
2352         }
2353       }
2354       rowcount++;
2355     }
2356 
2357     dlens[i]  = dcount;  /* d_nzz[i] */
2358     odlens[i] = odcount; /* o_nzz[i] */
2359 
2360     /* zero out the mask elements we set */
2361     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
2362     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
2363   }
2364   if (!sizesset) {
2365     ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
2366   }
2367   ierr = MatMPISBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
2368   ierr = MatSetOption(newmat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE);CHKERRQ(ierr);
2369 
2370   if (!rank) {
2371     ierr = PetscMalloc1(maxnz,&buf);CHKERRQ(ierr);
2372     /* read in my part of the matrix numerical values  */
2373     nz     = procsnz[0];
2374     vals   = buf;
2375     mycols = ibuf;
2376     if (size == 1) nz -= extra_rows;
2377     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2378     if (size == 1) {
2379       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
2380     }
2381 
2382     /* insert into matrix */
2383     jj = rstart*bs;
2384     for (i=0; i<m; i++) {
2385       ierr    = MatSetValues(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
2386       mycols += locrowlens[i];
2387       vals   += locrowlens[i];
2388       jj++;
2389     }
2390 
2391     /* read in other processors (except the last one) and ship out */
2392     for (i=1; i<size-1; i++) {
2393       nz   = procsnz[i];
2394       vals = buf;
2395       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2396       ierr = MPI_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
2397     }
2398     /* the last proc */
2399     if (size != 1) {
2400       nz   = procsnz[i] - extra_rows;
2401       vals = buf;
2402       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2403       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
2404       ierr = MPI_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
2405     }
2406     ierr = PetscFree(procsnz);CHKERRQ(ierr);
2407 
2408   } else {
2409     /* receive numeric values */
2410     ierr = PetscMalloc1(nz,&buf);CHKERRQ(ierr);
2411 
2412     /* receive message of values*/
2413     vals   = buf;
2414     mycols = ibuf;
2415     ierr   = MPI_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm,&status);CHKERRQ(ierr);
2416     ierr   = MPI_Get_count(&status,MPIU_SCALAR,&maxnz);CHKERRQ(ierr);
2417     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
2418 
2419     /* insert into matrix */
2420     jj = rstart*bs;
2421     for (i=0; i<m; i++) {
2422       ierr    = MatSetValues_MPISBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
2423       mycols += locrowlens[i];
2424       vals   += locrowlens[i];
2425       jj++;
2426     }
2427   }
2428 
2429   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
2430   ierr = PetscFree(buf);CHKERRQ(ierr);
2431   ierr = PetscFree(ibuf);CHKERRQ(ierr);
2432   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
2433   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
2434   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
2435   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2436   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2437   PetscFunctionReturn(0);
2438 }
2439 
2440 #undef __FUNCT__
2441 #define __FUNCT__ "MatMPISBAIJSetHashTableFactor"
2442 /*XXXXX@
2443    MatMPISBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
2444 
2445    Input Parameters:
2446 .  mat  - the matrix
2447 .  fact - factor
2448 
2449    Not Collective on Mat, each process can have a different hash factor
2450 
2451    Level: advanced
2452 
2453   Notes:
2454    This can also be set by the command line option: -mat_use_hash_table fact
2455 
2456 .keywords: matrix, hashtable, factor, HT
2457 
2458 .seealso: MatSetOption()
2459 @XXXXX*/
2460 
2461 
2462 #undef __FUNCT__
2463 #define __FUNCT__ "MatGetRowMaxAbs_MPISBAIJ"
2464 PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat A,Vec v,PetscInt idx[])
2465 {
2466   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
2467   Mat_SeqBAIJ    *b = (Mat_SeqBAIJ*)(a->B)->data;
2468   PetscReal      atmp;
2469   PetscReal      *work,*svalues,*rvalues;
2470   PetscErrorCode ierr;
2471   PetscInt       i,bs,mbs,*bi,*bj,brow,j,ncols,krow,kcol,col,row,Mbs,bcol;
2472   PetscMPIInt    rank,size;
2473   PetscInt       *rowners_bs,dest,count,source;
2474   PetscScalar    *va;
2475   MatScalar      *ba;
2476   MPI_Status     stat;
2477 
2478   PetscFunctionBegin;
2479   if (idx) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Send email to petsc-maint@mcs.anl.gov");
2480   ierr = MatGetRowMaxAbs(a->A,v,NULL);CHKERRQ(ierr);
2481   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
2482 
2483   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2484   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2485 
2486   bs  = A->rmap->bs;
2487   mbs = a->mbs;
2488   Mbs = a->Mbs;
2489   ba  = b->a;
2490   bi  = b->i;
2491   bj  = b->j;
2492 
2493   /* find ownerships */
2494   rowners_bs = A->rmap->range;
2495 
2496   /* each proc creates an array to be distributed */
2497   ierr = PetscMalloc1(bs*Mbs,&work);CHKERRQ(ierr);
2498   ierr = PetscMemzero(work,bs*Mbs*sizeof(PetscReal));CHKERRQ(ierr);
2499 
2500   /* row_max for B */
2501   if (rank != size-1) {
2502     for (i=0; i<mbs; i++) {
2503       ncols = bi[1] - bi[0]; bi++;
2504       brow  = bs*i;
2505       for (j=0; j<ncols; j++) {
2506         bcol = bs*(*bj);
2507         for (kcol=0; kcol<bs; kcol++) {
2508           col  = bcol + kcol;                /* local col index */
2509           col += rowners_bs[rank+1];      /* global col index */
2510           for (krow=0; krow<bs; krow++) {
2511             atmp = PetscAbsScalar(*ba); ba++;
2512             row  = brow + krow;   /* local row index */
2513             if (PetscRealPart(va[row]) < atmp) va[row] = atmp;
2514             if (work[col] < atmp) work[col] = atmp;
2515           }
2516         }
2517         bj++;
2518       }
2519     }
2520 
2521     /* send values to its owners */
2522     for (dest=rank+1; dest<size; dest++) {
2523       svalues = work + rowners_bs[dest];
2524       count   = rowners_bs[dest+1]-rowners_bs[dest];
2525       ierr    = MPI_Send(svalues,count,MPIU_REAL,dest,rank,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2526     }
2527   }
2528 
2529   /* receive values */
2530   if (rank) {
2531     rvalues = work;
2532     count   = rowners_bs[rank+1]-rowners_bs[rank];
2533     for (source=0; source<rank; source++) {
2534       ierr = MPI_Recv(rvalues,count,MPIU_REAL,MPI_ANY_SOURCE,MPI_ANY_TAG,PetscObjectComm((PetscObject)A),&stat);CHKERRQ(ierr);
2535       /* process values */
2536       for (i=0; i<count; i++) {
2537         if (PetscRealPart(va[i]) < rvalues[i]) va[i] = rvalues[i];
2538       }
2539     }
2540   }
2541 
2542   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
2543   ierr = PetscFree(work);CHKERRQ(ierr);
2544   PetscFunctionReturn(0);
2545 }
2546 
2547 #undef __FUNCT__
2548 #define __FUNCT__ "MatSOR_MPISBAIJ"
2549 PetscErrorCode MatSOR_MPISBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2550 {
2551   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ*)matin->data;
2552   PetscErrorCode    ierr;
2553   PetscInt          mbs=mat->mbs,bs=matin->rmap->bs;
2554   PetscScalar       *x,*ptr,*from;
2555   Vec               bb1;
2556   const PetscScalar *b;
2557 
2558   PetscFunctionBegin;
2559   if (its <= 0 || lits <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
2560   if (bs > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"SSOR for block size > 1 is not yet implemented");
2561 
2562   if (flag == SOR_APPLY_UPPER) {
2563     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2564     PetscFunctionReturn(0);
2565   }
2566 
2567   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2568     if (flag & SOR_ZERO_INITIAL_GUESS) {
2569       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr);
2570       its--;
2571     }
2572 
2573     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2574     while (its--) {
2575 
2576       /* lower triangular part: slvec0b = - B^T*xx */
2577       ierr = (*mat->B->ops->multtranspose)(mat->B,xx,mat->slvec0b);CHKERRQ(ierr);
2578 
2579       /* copy xx into slvec0a */
2580       ierr = VecGetArray(mat->slvec0,&ptr);CHKERRQ(ierr);
2581       ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
2582       ierr = PetscMemcpy(ptr,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
2583       ierr = VecRestoreArray(mat->slvec0,&ptr);CHKERRQ(ierr);
2584 
2585       ierr = VecScale(mat->slvec0,-1.0);CHKERRQ(ierr);
2586 
2587       /* copy bb into slvec1a */
2588       ierr = VecGetArray(mat->slvec1,&ptr);CHKERRQ(ierr);
2589       ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
2590       ierr = PetscMemcpy(ptr,b,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
2591       ierr = VecRestoreArray(mat->slvec1,&ptr);CHKERRQ(ierr);
2592 
2593       /* set slvec1b = 0 */
2594       ierr = VecSet(mat->slvec1b,0.0);CHKERRQ(ierr);
2595 
2596       ierr = VecScatterBegin(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2597       ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
2598       ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
2599       ierr = VecScatterEnd(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2600 
2601       /* upper triangular part: bb1 = bb1 - B*x */
2602       ierr = (*mat->B->ops->multadd)(mat->B,mat->slvec1b,mat->slvec1a,bb1);CHKERRQ(ierr);
2603 
2604       /* local diagonal sweep */
2605       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr);
2606     }
2607     ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2608   } else if ((flag & SOR_LOCAL_FORWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2609     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2610   } else if ((flag & SOR_LOCAL_BACKWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2611     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2612   } else if (flag & SOR_EISENSTAT) {
2613     Vec               xx1;
2614     PetscBool         hasop;
2615     const PetscScalar *diag;
2616     PetscScalar       *sl,scale = (omega - 2.0)/omega;
2617     PetscInt          i,n;
2618 
2619     if (!mat->xx1) {
2620       ierr = VecDuplicate(bb,&mat->xx1);CHKERRQ(ierr);
2621       ierr = VecDuplicate(bb,&mat->bb1);CHKERRQ(ierr);
2622     }
2623     xx1 = mat->xx1;
2624     bb1 = mat->bb1;
2625 
2626     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP),fshift,lits,1,xx);CHKERRQ(ierr);
2627 
2628     if (!mat->diag) {
2629       /* this is wrong for same matrix with new nonzero values */
2630       ierr = MatCreateVecs(matin,&mat->diag,NULL);CHKERRQ(ierr);
2631       ierr = MatGetDiagonal(matin,mat->diag);CHKERRQ(ierr);
2632     }
2633     ierr = MatHasOperation(matin,MATOP_MULT_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
2634 
2635     if (hasop) {
2636       ierr = MatMultDiagonalBlock(matin,xx,bb1);CHKERRQ(ierr);
2637       ierr = VecAYPX(mat->slvec1a,scale,bb);CHKERRQ(ierr);
2638     } else {
2639       /*
2640           These two lines are replaced by code that may be a bit faster for a good compiler
2641       ierr = VecPointwiseMult(mat->slvec1a,mat->diag,xx);CHKERRQ(ierr);
2642       ierr = VecAYPX(mat->slvec1a,scale,bb);CHKERRQ(ierr);
2643       */
2644       ierr = VecGetArray(mat->slvec1a,&sl);CHKERRQ(ierr);
2645       ierr = VecGetArrayRead(mat->diag,&diag);CHKERRQ(ierr);
2646       ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
2647       ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
2648       ierr = VecGetLocalSize(xx,&n);CHKERRQ(ierr);
2649       if (omega == 1.0) {
2650         for (i=0; i<n; i++) sl[i] = b[i] - diag[i]*x[i];
2651         ierr = PetscLogFlops(2.0*n);CHKERRQ(ierr);
2652       } else {
2653         for (i=0; i<n; i++) sl[i] = b[i] + scale*diag[i]*x[i];
2654         ierr = PetscLogFlops(3.0*n);CHKERRQ(ierr);
2655       }
2656       ierr = VecRestoreArray(mat->slvec1a,&sl);CHKERRQ(ierr);
2657       ierr = VecRestoreArrayRead(mat->diag,&diag);CHKERRQ(ierr);
2658       ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
2659       ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
2660     }
2661 
2662     /* multiply off-diagonal portion of matrix */
2663     ierr = VecSet(mat->slvec1b,0.0);CHKERRQ(ierr);
2664     ierr = (*mat->B->ops->multtranspose)(mat->B,xx,mat->slvec0b);CHKERRQ(ierr);
2665     ierr = VecGetArray(mat->slvec0,&from);CHKERRQ(ierr);
2666     ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
2667     ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
2668     ierr = VecRestoreArray(mat->slvec0,&from);CHKERRQ(ierr);
2669     ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
2670     ierr = VecScatterBegin(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2671     ierr = VecScatterEnd(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2672     ierr = (*mat->B->ops->multadd)(mat->B,mat->slvec1b,mat->slvec1a,mat->slvec1a);CHKERRQ(ierr);
2673 
2674     /* local sweep */
2675     ierr = (*mat->A->ops->sor)(mat->A,mat->slvec1a,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP),fshift,lits,1,xx1);CHKERRQ(ierr);
2676     ierr = VecAXPY(xx,1.0,xx1);CHKERRQ(ierr);
2677   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatSORType is not supported for SBAIJ matrix format");
2678   PetscFunctionReturn(0);
2679 }
2680 
2681 #undef __FUNCT__
2682 #define __FUNCT__ "MatSOR_MPISBAIJ_2comm"
2683 PetscErrorCode MatSOR_MPISBAIJ_2comm(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2684 {
2685   Mat_MPISBAIJ   *mat = (Mat_MPISBAIJ*)matin->data;
2686   PetscErrorCode ierr;
2687   Vec            lvec1,bb1;
2688 
2689   PetscFunctionBegin;
2690   if (its <= 0 || lits <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
2691   if (matin->rmap->bs > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"SSOR for block size > 1 is not yet implemented");
2692 
2693   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2694     if (flag & SOR_ZERO_INITIAL_GUESS) {
2695       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr);
2696       its--;
2697     }
2698 
2699     ierr = VecDuplicate(mat->lvec,&lvec1);CHKERRQ(ierr);
2700     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2701     while (its--) {
2702       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2703 
2704       /* lower diagonal part: bb1 = bb - B^T*xx */
2705       ierr = (*mat->B->ops->multtranspose)(mat->B,xx,lvec1);CHKERRQ(ierr);
2706       ierr = VecScale(lvec1,-1.0);CHKERRQ(ierr);
2707 
2708       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2709       ierr = VecCopy(bb,bb1);CHKERRQ(ierr);
2710       ierr = VecScatterBegin(mat->Mvctx,lvec1,bb1,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
2711 
2712       /* upper diagonal part: bb1 = bb1 - B*x */
2713       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2714       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb1,bb1);CHKERRQ(ierr);
2715 
2716       ierr = VecScatterEnd(mat->Mvctx,lvec1,bb1,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
2717 
2718       /* diagonal sweep */
2719       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr);
2720     }
2721     ierr = VecDestroy(&lvec1);CHKERRQ(ierr);
2722     ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2723   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatSORType is not supported for SBAIJ matrix format");
2724   PetscFunctionReturn(0);
2725 }
2726 
2727 #undef __FUNCT__
2728 #define __FUNCT__ "MatCreateMPISBAIJWithArrays"
2729 /*@
2730      MatCreateMPISBAIJWithArrays - creates a MPI SBAIJ matrix using arrays that contain in standard
2731          CSR format the local rows.
2732 
2733    Collective on MPI_Comm
2734 
2735    Input Parameters:
2736 +  comm - MPI communicator
2737 .  bs - the block size, only a block size of 1 is supported
2738 .  m - number of local rows (Cannot be PETSC_DECIDE)
2739 .  n - This value should be the same as the local size used in creating the
2740        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
2741        calculated if N is given) For square matrices n is almost always m.
2742 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
2743 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
2744 .   i - row indices
2745 .   j - column indices
2746 -   a - matrix values
2747 
2748    Output Parameter:
2749 .   mat - the matrix
2750 
2751    Level: intermediate
2752 
2753    Notes:
2754        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
2755      thus you CANNOT change the matrix entries by changing the values of a[] after you have
2756      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
2757 
2758        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
2759 
2760 .keywords: matrix, aij, compressed row, sparse, parallel
2761 
2762 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
2763           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
2764 @*/
2765 PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
2766 {
2767   PetscErrorCode ierr;
2768 
2769 
2770   PetscFunctionBegin;
2771   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
2772   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
2773   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
2774   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
2775   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
2776   ierr = MatMPISBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
2777   PetscFunctionReturn(0);
2778 }
2779 
2780 
2781 #undef __FUNCT__
2782 #define __FUNCT__ "MatMPISBAIJSetPreallocationCSR"
2783 /*@C
2784    MatMPISBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2785    (the default parallel PETSc format).
2786 
2787    Collective on MPI_Comm
2788 
2789    Input Parameters:
2790 +  B - the matrix
2791 .  bs - the block size
2792 .  i - the indices into j for the start of each local row (starts with zero)
2793 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2794 -  v - optional values in the matrix
2795 
2796    Level: developer
2797 
2798 .keywords: matrix, aij, compressed row, sparse, parallel
2799 
2800 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ
2801 @*/
2802 PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2803 {
2804   PetscErrorCode ierr;
2805 
2806   PetscFunctionBegin;
2807   ierr = PetscTryMethod(B,"MatMPISBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2808   PetscFunctionReturn(0);
2809 }
2810 
2811 #undef __FUNCT__
2812 #define __FUNCT__ "MatCreateMPIMatConcatenateSeqMat_MPISBAIJ"
2813 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPISBAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
2814 {
2815   PetscErrorCode ierr;
2816   PetscInt       m,N,i,rstart,nnz,Ii,bs,cbs;
2817   PetscInt       *indx;
2818   PetscScalar    *values;
2819 
2820   PetscFunctionBegin;
2821   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
2822   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
2823     Mat_SeqSBAIJ   *a = (Mat_SeqSBAIJ*)inmat->data;
2824     PetscInt       *dnz,*onz,sum,bs,cbs,mbs,Nbs;
2825     PetscInt       *bindx,rmax=a->rmax,j;
2826 
2827     ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
2828     mbs = m/bs; Nbs = N/cbs;
2829     if (n == PETSC_DECIDE) {
2830       ierr = PetscSplitOwnership(comm,&n,&Nbs);CHKERRQ(ierr);
2831     }
2832     /* Check sum(n) = Nbs */
2833     ierr = MPI_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2834     if (sum != Nbs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",Nbs);
2835 
2836     ierr    = MPI_Scan(&mbs, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2837     rstart -= mbs;
2838 
2839     ierr = PetscMalloc1(rmax,&bindx);CHKERRQ(ierr);
2840     ierr = MatPreallocateInitialize(comm,mbs,n,dnz,onz);CHKERRQ(ierr);
2841     ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_TRUE);CHKERRQ(ierr);
2842     for (i=0; i<mbs; i++) {
2843       ierr = MatGetRow_SeqSBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr); /* non-blocked nnz and indx */
2844       nnz = nnz/bs;
2845       for (j=0; j<nnz; j++) bindx[j] = indx[j*bs]/bs;
2846       ierr = MatPreallocateSet(i+rstart,nnz,bindx,dnz,onz);CHKERRQ(ierr);
2847       ierr = MatRestoreRow_SeqSBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr);
2848     }
2849     ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_FALSE);CHKERRQ(ierr);
2850     ierr = PetscFree(bindx);CHKERRQ(ierr);
2851 
2852     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
2853     ierr = MatSetSizes(*outmat,m,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2854     ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr);
2855     ierr = MatSetType(*outmat,MATMPISBAIJ);CHKERRQ(ierr);
2856     ierr = MatMPISBAIJSetPreallocation(*outmat,bs,0,dnz,0,onz);CHKERRQ(ierr);
2857     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
2858   }
2859 
2860   /* numeric phase */
2861   ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
2862   ierr = MatGetOwnershipRange(*outmat,&rstart,NULL);CHKERRQ(ierr);
2863 
2864   ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_TRUE);CHKERRQ(ierr);
2865   for (i=0; i<m; i++) {
2866     ierr = MatGetRow_SeqSBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
2867     Ii   = i + rstart;
2868     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
2869     ierr = MatRestoreRow_SeqSBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
2870   }
2871   ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_FALSE);CHKERRQ(ierr);
2872   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2873   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2874   PetscFunctionReturn(0);
2875 }
2876