xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision e37c518b3c178882b922d1d3faeb3ee252cb498a)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 
4 #include <petscblaslapack.h>
5 #include <petscsf.h>
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatGetRowMaxAbs_MPIBAIJ"
9 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
10 {
11   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
12   PetscErrorCode ierr;
13   PetscInt       i,*idxb = 0;
14   PetscScalar    *va,*vb;
15   Vec            vtmp;
16 
17   PetscFunctionBegin;
18   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
19   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
20   if (idx) {
21     for (i=0; i<A->rmap->n; i++) {
22       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
23     }
24   }
25 
26   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
27   if (idx) {ierr = PetscMalloc1(A->rmap->n,&idxb);CHKERRQ(ierr);}
28   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
29   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
30 
31   for (i=0; i<A->rmap->n; i++) {
32     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
33       va[i] = vb[i];
34       if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);
35     }
36   }
37 
38   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
39   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
40   ierr = PetscFree(idxb);CHKERRQ(ierr);
41   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "MatStoreValues_MPIBAIJ"
47 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
48 {
49   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
50   PetscErrorCode ierr;
51 
52   PetscFunctionBegin;
53   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
54   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatRetrieveValues_MPIBAIJ"
60 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
61 {
62   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
67   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
68   PetscFunctionReturn(0);
69 }
70 
71 /*
72      Local utility routine that creates a mapping from the global column
73    number to the local number in the off-diagonal part of the local
74    storage of the matrix.  This is done in a non scalable way since the
75    length of colmap equals the global matrix length.
76 */
77 #undef __FUNCT__
78 #define __FUNCT__ "MatCreateColmap_MPIBAIJ_Private"
79 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
80 {
81   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
82   Mat_SeqBAIJ    *B    = (Mat_SeqBAIJ*)baij->B->data;
83   PetscErrorCode ierr;
84   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
85 
86   PetscFunctionBegin;
87 #if defined(PETSC_USE_CTABLE)
88   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
89   for (i=0; i<nbs; i++) {
90     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
91   }
92 #else
93   ierr = PetscMalloc1(baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
94   ierr = PetscLogObjectMemory((PetscObject)mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
95   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
96   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
97 #endif
98   PetscFunctionReturn(0);
99 }
100 
101 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,orow,ocol)       \
102   { \
103  \
104     brow = row/bs;  \
105     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
106     rmax = aimax[brow]; nrow = ailen[brow]; \
107     bcol = col/bs; \
108     ridx = row % bs; cidx = col % bs; \
109     low  = 0; high = nrow; \
110     while (high-low > 3) { \
111       t = (low+high)/2; \
112       if (rp[t] > bcol) high = t; \
113       else              low  = t; \
114     } \
115     for (_i=low; _i<high; _i++) { \
116       if (rp[_i] > bcol) break; \
117       if (rp[_i] == bcol) { \
118         bap = ap +  bs2*_i + bs*cidx + ridx; \
119         if (addv == ADD_VALUES) *bap += value;  \
120         else                    *bap  = value;  \
121         goto a_noinsert; \
122       } \
123     } \
124     if (a->nonew == 1) goto a_noinsert; \
125     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column (%D, %D) into matrix", orow, ocol); \
126     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
127     N = nrow++ - 1;  \
128     /* shift up all the later entries in this row */ \
129     for (ii=N; ii>=_i; ii--) { \
130       rp[ii+1] = rp[ii]; \
131       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
132     } \
133     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
134     rp[_i]                      = bcol;  \
135     ap[bs2*_i + bs*cidx + ridx] = value;  \
136 a_noinsert:; \
137     ailen[brow] = nrow; \
138   }
139 
140 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,orow,ocol)       \
141   { \
142     brow = row/bs;  \
143     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
144     rmax = bimax[brow]; nrow = bilen[brow]; \
145     bcol = col/bs; \
146     ridx = row % bs; cidx = col % bs; \
147     low  = 0; high = nrow; \
148     while (high-low > 3) { \
149       t = (low+high)/2; \
150       if (rp[t] > bcol) high = t; \
151       else              low  = t; \
152     } \
153     for (_i=low; _i<high; _i++) { \
154       if (rp[_i] > bcol) break; \
155       if (rp[_i] == bcol) { \
156         bap = ap +  bs2*_i + bs*cidx + ridx; \
157         if (addv == ADD_VALUES) *bap += value;  \
158         else                    *bap  = value;  \
159         goto b_noinsert; \
160       } \
161     } \
162     if (b->nonew == 1) goto b_noinsert; \
163     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column  (%D, %D) into matrix", orow, ocol); \
164     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
165     N = nrow++ - 1;  \
166     /* shift up all the later entries in this row */ \
167     for (ii=N; ii>=_i; ii--) { \
168       rp[ii+1] = rp[ii]; \
169       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
170     } \
171     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
172     rp[_i]                      = bcol;  \
173     ap[bs2*_i + bs*cidx + ridx] = value;  \
174 b_noinsert:; \
175     bilen[brow] = nrow; \
176   }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "MatSetValues_MPIBAIJ"
180 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
181 {
182   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
183   MatScalar      value;
184   PetscBool      roworiented = baij->roworiented;
185   PetscErrorCode ierr;
186   PetscInt       i,j,row,col;
187   PetscInt       rstart_orig=mat->rmap->rstart;
188   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
189   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
190 
191   /* Some Variables required in the macro */
192   Mat         A     = baij->A;
193   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ*)(A)->data;
194   PetscInt    *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
195   MatScalar   *aa   =a->a;
196 
197   Mat         B     = baij->B;
198   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
199   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
200   MatScalar   *ba   =b->a;
201 
202   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
203   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
204   MatScalar *ap,*bap;
205 
206   PetscFunctionBegin;
207   for (i=0; i<m; i++) {
208     if (im[i] < 0) continue;
209 #if defined(PETSC_USE_DEBUG)
210     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
211 #endif
212     if (im[i] >= rstart_orig && im[i] < rend_orig) {
213       row = im[i] - rstart_orig;
214       for (j=0; j<n; j++) {
215         if (in[j] >= cstart_orig && in[j] < cend_orig) {
216           col = in[j] - cstart_orig;
217           if (roworiented) value = v[i*n+j];
218           else             value = v[i+j*m];
219           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,im[i],in[j]);
220           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
221         } else if (in[j] < 0) continue;
222 #if defined(PETSC_USE_DEBUG)
223         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
224 #endif
225         else {
226           if (mat->was_assembled) {
227             if (!baij->colmap) {
228               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
229             }
230 #if defined(PETSC_USE_CTABLE)
231             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
232             col  = col - 1;
233 #else
234             col = baij->colmap[in[j]/bs] - 1;
235 #endif
236             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
237               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
238               col  =  in[j];
239               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
240               B    = baij->B;
241               b    = (Mat_SeqBAIJ*)(B)->data;
242               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
243               ba   =b->a;
244             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
245             else col += in[j]%bs;
246           } else col = in[j];
247           if (roworiented) value = v[i*n+j];
248           else             value = v[i+j*m];
249           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,im[i],in[j]);
250           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
251         }
252       }
253     } else {
254       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
255       if (!baij->donotstash) {
256         mat->assembled = PETSC_FALSE;
257         if (roworiented) {
258           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
259         } else {
260           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
261         }
262       }
263     }
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "MatSetValuesBlocked_SeqBAIJ_Inlined"
270 PETSC_STATIC_INLINE PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A,PetscInt row,PetscInt col,const PetscScalar v[],InsertMode is,PetscInt orow,PetscInt ocol)
271 {
272   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ*)A->data;
273   PetscInt          *rp,low,high,t,ii,jj,nrow,i,rmax,N;
274   PetscInt          *imax=a->imax,*ai=a->i,*ailen=a->ilen;
275   PetscErrorCode    ierr;
276   PetscInt          *aj        =a->j,nonew=a->nonew,bs2=a->bs2,bs=A->rmap->bs;
277   PetscBool         roworiented=a->roworiented;
278   const PetscScalar *value     = v;
279   MatScalar         *ap,*aa = a->a,*bap;
280 
281   PetscFunctionBegin;
282   rp   = aj + ai[row];
283   ap   = aa + bs2*ai[row];
284   rmax = imax[row];
285   nrow = ailen[row];
286   value = v;
287   low = 0;
288   high = nrow;
289   while (high-low > 7) {
290     t = (low+high)/2;
291     if (rp[t] > col) high = t;
292     else             low  = t;
293   }
294   for (i=low; i<high; i++) {
295     if (rp[i] > col) break;
296     if (rp[i] == col) {
297       bap = ap +  bs2*i;
298       if (roworiented) {
299         if (is == ADD_VALUES) {
300           for (ii=0; ii<bs; ii++) {
301             for (jj=ii; jj<bs2; jj+=bs) {
302               bap[jj] += *value++;
303             }
304           }
305         } else {
306           for (ii=0; ii<bs; ii++) {
307             for (jj=ii; jj<bs2; jj+=bs) {
308               bap[jj] = *value++;
309             }
310           }
311         }
312       } else {
313         if (is == ADD_VALUES) {
314           for (ii=0; ii<bs; ii++,value+=bs) {
315             for (jj=0; jj<bs; jj++) {
316               bap[jj] += value[jj];
317             }
318             bap += bs;
319           }
320         } else {
321           for (ii=0; ii<bs; ii++,value+=bs) {
322             for (jj=0; jj<bs; jj++) {
323               bap[jj]  = value[jj];
324             }
325             bap += bs;
326           }
327         }
328       }
329       goto noinsert2;
330     }
331   }
332   if (nonew == 1) goto noinsert2;
333   if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new global block indexed nonzero block (%D, %D) in the matrix", orow, ocol);
334   MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,row,col,rmax,aa,ai,aj,rp,ap,imax,nonew,MatScalar);
335   N = nrow++ - 1; high++;
336   /* shift up all the later entries in this row */
337   for (ii=N; ii>=i; ii--) {
338     rp[ii+1] = rp[ii];
339     ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr);
340   }
341   if (N >= i) {
342     ierr = PetscMemzero(ap+bs2*i,bs2*sizeof(MatScalar));CHKERRQ(ierr);
343   }
344   rp[i] = col;
345   bap   = ap +  bs2*i;
346   if (roworiented) {
347     for (ii=0; ii<bs; ii++) {
348       for (jj=ii; jj<bs2; jj+=bs) {
349         bap[jj] = *value++;
350       }
351     }
352   } else {
353     for (ii=0; ii<bs; ii++) {
354       for (jj=0; jj<bs; jj++) {
355         *bap++ = *value++;
356       }
357     }
358   }
359   noinsert2:;
360   ailen[row] = nrow;
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ"
366 /*
367     This routine should be optimized so that the block copy at ** Here a copy is required ** below is not needed
368     by passing additional stride information into the MatSetValuesBlocked_SeqBAIJ_Inlined() routine
369 */
370 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
371 {
372   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
373   const PetscScalar *value;
374   MatScalar         *barray     = baij->barray;
375   PetscBool         roworiented = baij->roworiented;
376   PetscErrorCode    ierr;
377   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
378   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
379   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
380 
381   PetscFunctionBegin;
382   if (!barray) {
383     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
384     baij->barray = barray;
385   }
386 
387   if (roworiented) stepval = (n-1)*bs;
388   else stepval = (m-1)*bs;
389 
390   for (i=0; i<m; i++) {
391     if (im[i] < 0) continue;
392 #if defined(PETSC_USE_DEBUG)
393     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed row too large %D max %D",im[i],baij->Mbs-1);
394 #endif
395     if (im[i] >= rstart && im[i] < rend) {
396       row = im[i] - rstart;
397       for (j=0; j<n; j++) {
398         /* If NumCol = 1 then a copy is not required */
399         if ((roworiented) && (n == 1)) {
400           barray = (MatScalar*)v + i*bs2;
401         } else if ((!roworiented) && (m == 1)) {
402           barray = (MatScalar*)v + j*bs2;
403         } else { /* Here a copy is required */
404           if (roworiented) {
405             value = v + (i*(stepval+bs) + j)*bs;
406           } else {
407             value = v + (j*(stepval+bs) + i)*bs;
408           }
409           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
410             for (jj=0; jj<bs; jj++) barray[jj] = value[jj];
411             barray += bs;
412           }
413           barray -= bs2;
414         }
415 
416         if (in[j] >= cstart && in[j] < cend) {
417           col  = in[j] - cstart;
418           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
419         } else if (in[j] < 0) continue;
420 #if defined(PETSC_USE_DEBUG)
421         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed column too large %D max %D",in[j],baij->Nbs-1);
422 #endif
423         else {
424           if (mat->was_assembled) {
425             if (!baij->colmap) {
426               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
427             }
428 
429 #if defined(PETSC_USE_DEBUG)
430 #if defined(PETSC_USE_CTABLE)
431             { PetscInt data;
432               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
433               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
434             }
435 #else
436             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
437 #endif
438 #endif
439 #if defined(PETSC_USE_CTABLE)
440             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
441             col  = (col - 1)/bs;
442 #else
443             col = (baij->colmap[in[j]] - 1)/bs;
444 #endif
445             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
446               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
447               col  =  in[j];
448             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new blocked indexed nonzero block (%D, %D) into matrix",im[i],in[j]);
449           } else col = in[j];
450           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
451         }
452       }
453     } else {
454       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process block indexed row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
455       if (!baij->donotstash) {
456         if (roworiented) {
457           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
458         } else {
459           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
460         }
461       }
462     }
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #define HASH_KEY 0.6180339887
468 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
469 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
470 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
471 #undef __FUNCT__
472 #define __FUNCT__ "MatSetValues_MPIBAIJ_HT"
473 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
474 {
475   Mat_MPIBAIJ    *baij       = (Mat_MPIBAIJ*)mat->data;
476   PetscBool      roworiented = baij->roworiented;
477   PetscErrorCode ierr;
478   PetscInt       i,j,row,col;
479   PetscInt       rstart_orig=mat->rmap->rstart;
480   PetscInt       rend_orig  =mat->rmap->rend,Nbs=baij->Nbs;
481   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
482   PetscReal      tmp;
483   MatScalar      **HD = baij->hd,value;
484 #if defined(PETSC_USE_DEBUG)
485   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
486 #endif
487 
488   PetscFunctionBegin;
489   for (i=0; i<m; i++) {
490 #if defined(PETSC_USE_DEBUG)
491     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
492     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
493 #endif
494     row = im[i];
495     if (row >= rstart_orig && row < rend_orig) {
496       for (j=0; j<n; j++) {
497         col = in[j];
498         if (roworiented) value = v[i*n+j];
499         else             value = v[i+j*m];
500         /* Look up PetscInto the Hash Table */
501         key = (row/bs)*Nbs+(col/bs)+1;
502         h1  = HASH(size,key,tmp);
503 
504 
505         idx = h1;
506 #if defined(PETSC_USE_DEBUG)
507         insert_ct++;
508         total_ct++;
509         if (HT[idx] != key) {
510           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
511           if (idx == size) {
512             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
513             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
514           }
515         }
516 #else
517         if (HT[idx] != key) {
518           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
519           if (idx == size) {
520             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
521             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
522           }
523         }
524 #endif
525         /* A HASH table entry is found, so insert the values at the correct address */
526         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
527         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
528       }
529     } else if (!baij->donotstash) {
530       if (roworiented) {
531         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
532       } else {
533         ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
534       }
535     }
536   }
537 #if defined(PETSC_USE_DEBUG)
538   baij->ht_total_ct  = total_ct;
539   baij->ht_insert_ct = insert_ct;
540 #endif
541   PetscFunctionReturn(0);
542 }
543 
544 #undef __FUNCT__
545 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ_HT"
546 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
547 {
548   Mat_MPIBAIJ       *baij       = (Mat_MPIBAIJ*)mat->data;
549   PetscBool         roworiented = baij->roworiented;
550   PetscErrorCode    ierr;
551   PetscInt          i,j,ii,jj,row,col;
552   PetscInt          rstart=baij->rstartbs;
553   PetscInt          rend  =mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
554   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
555   PetscReal         tmp;
556   MatScalar         **HD = baij->hd,*baij_a;
557   const PetscScalar *v_t,*value;
558 #if defined(PETSC_USE_DEBUG)
559   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
560 #endif
561 
562   PetscFunctionBegin;
563   if (roworiented) stepval = (n-1)*bs;
564   else stepval = (m-1)*bs;
565 
566   for (i=0; i<m; i++) {
567 #if defined(PETSC_USE_DEBUG)
568     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
569     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
570 #endif
571     row = im[i];
572     v_t = v + i*nbs2;
573     if (row >= rstart && row < rend) {
574       for (j=0; j<n; j++) {
575         col = in[j];
576 
577         /* Look up into the Hash Table */
578         key = row*Nbs+col+1;
579         h1  = HASH(size,key,tmp);
580 
581         idx = h1;
582 #if defined(PETSC_USE_DEBUG)
583         total_ct++;
584         insert_ct++;
585         if (HT[idx] != key) {
586           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
587           if (idx == size) {
588             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
589             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
590           }
591         }
592 #else
593         if (HT[idx] != key) {
594           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
595           if (idx == size) {
596             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
597             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
598           }
599         }
600 #endif
601         baij_a = HD[idx];
602         if (roworiented) {
603           /*value = v + i*(stepval+bs)*bs + j*bs;*/
604           /* value = v + (i*(stepval+bs)+j)*bs; */
605           value = v_t;
606           v_t  += bs;
607           if (addv == ADD_VALUES) {
608             for (ii=0; ii<bs; ii++,value+=stepval) {
609               for (jj=ii; jj<bs2; jj+=bs) {
610                 baij_a[jj] += *value++;
611               }
612             }
613           } else {
614             for (ii=0; ii<bs; ii++,value+=stepval) {
615               for (jj=ii; jj<bs2; jj+=bs) {
616                 baij_a[jj] = *value++;
617               }
618             }
619           }
620         } else {
621           value = v + j*(stepval+bs)*bs + i*bs;
622           if (addv == ADD_VALUES) {
623             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
624               for (jj=0; jj<bs; jj++) {
625                 baij_a[jj] += *value++;
626               }
627             }
628           } else {
629             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
630               for (jj=0; jj<bs; jj++) {
631                 baij_a[jj] = *value++;
632               }
633             }
634           }
635         }
636       }
637     } else {
638       if (!baij->donotstash) {
639         if (roworiented) {
640           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
641         } else {
642           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
643         }
644       }
645     }
646   }
647 #if defined(PETSC_USE_DEBUG)
648   baij->ht_total_ct  = total_ct;
649   baij->ht_insert_ct = insert_ct;
650 #endif
651   PetscFunctionReturn(0);
652 }
653 
654 #undef __FUNCT__
655 #define __FUNCT__ "MatGetValues_MPIBAIJ"
656 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
657 {
658   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
659   PetscErrorCode ierr;
660   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
661   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
662 
663   PetscFunctionBegin;
664   for (i=0; i<m; i++) {
665     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
666     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
667     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
668       row = idxm[i] - bsrstart;
669       for (j=0; j<n; j++) {
670         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
671         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
672         if (idxn[j] >= bscstart && idxn[j] < bscend) {
673           col  = idxn[j] - bscstart;
674           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
675         } else {
676           if (!baij->colmap) {
677             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
678           }
679 #if defined(PETSC_USE_CTABLE)
680           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
681           data--;
682 #else
683           data = baij->colmap[idxn[j]/bs]-1;
684 #endif
685           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
686           else {
687             col  = data + idxn[j]%bs;
688             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
689           }
690         }
691       }
692     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
693   }
694   PetscFunctionReturn(0);
695 }
696 
697 #undef __FUNCT__
698 #define __FUNCT__ "MatNorm_MPIBAIJ"
699 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
700 {
701   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
702   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
703   PetscErrorCode ierr;
704   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
705   PetscReal      sum = 0.0;
706   MatScalar      *v;
707 
708   PetscFunctionBegin;
709   if (baij->size == 1) {
710     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
711   } else {
712     if (type == NORM_FROBENIUS) {
713       v  = amat->a;
714       nz = amat->nz*bs2;
715       for (i=0; i<nz; i++) {
716         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
717       }
718       v  = bmat->a;
719       nz = bmat->nz*bs2;
720       for (i=0; i<nz; i++) {
721         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
722       }
723       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
724       *nrm = PetscSqrtReal(*nrm);
725     } else if (type == NORM_1) { /* max column sum */
726       PetscReal *tmp,*tmp2;
727       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
728       ierr = PetscMalloc2(mat->cmap->N,&tmp,mat->cmap->N,&tmp2);CHKERRQ(ierr);
729       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
730       v    = amat->a; jj = amat->j;
731       for (i=0; i<amat->nz; i++) {
732         for (j=0; j<bs; j++) {
733           col = bs*(cstart + *jj) + j; /* column index */
734           for (row=0; row<bs; row++) {
735             tmp[col] += PetscAbsScalar(*v);  v++;
736           }
737         }
738         jj++;
739       }
740       v = bmat->a; jj = bmat->j;
741       for (i=0; i<bmat->nz; i++) {
742         for (j=0; j<bs; j++) {
743           col = bs*garray[*jj] + j;
744           for (row=0; row<bs; row++) {
745             tmp[col] += PetscAbsScalar(*v); v++;
746           }
747         }
748         jj++;
749       }
750       ierr = MPIU_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
751       *nrm = 0.0;
752       for (j=0; j<mat->cmap->N; j++) {
753         if (tmp2[j] > *nrm) *nrm = tmp2[j];
754       }
755       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
756     } else if (type == NORM_INFINITY) { /* max row sum */
757       PetscReal *sums;
758       ierr = PetscMalloc1(bs,&sums);CHKERRQ(ierr);
759       sum  = 0.0;
760       for (j=0; j<amat->mbs; j++) {
761         for (row=0; row<bs; row++) sums[row] = 0.0;
762         v  = amat->a + bs2*amat->i[j];
763         nz = amat->i[j+1]-amat->i[j];
764         for (i=0; i<nz; i++) {
765           for (col=0; col<bs; col++) {
766             for (row=0; row<bs; row++) {
767               sums[row] += PetscAbsScalar(*v); v++;
768             }
769           }
770         }
771         v  = bmat->a + bs2*bmat->i[j];
772         nz = bmat->i[j+1]-bmat->i[j];
773         for (i=0; i<nz; i++) {
774           for (col=0; col<bs; col++) {
775             for (row=0; row<bs; row++) {
776               sums[row] += PetscAbsScalar(*v); v++;
777             }
778           }
779         }
780         for (row=0; row<bs; row++) {
781           if (sums[row] > sum) sum = sums[row];
782         }
783       }
784       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
785       ierr = PetscFree(sums);CHKERRQ(ierr);
786     } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for this norm yet");
787   }
788   PetscFunctionReturn(0);
789 }
790 
791 /*
792   Creates the hash table, and sets the table
793   This table is created only once.
794   If new entried need to be added to the matrix
795   then the hash table has to be destroyed and
796   recreated.
797 */
798 #undef __FUNCT__
799 #define __FUNCT__ "MatCreateHashTable_MPIBAIJ_Private"
800 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
801 {
802   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
803   Mat            A     = baij->A,B=baij->B;
804   Mat_SeqBAIJ    *a    = (Mat_SeqBAIJ*)A->data,*b=(Mat_SeqBAIJ*)B->data;
805   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
806   PetscErrorCode ierr;
807   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
808   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
809   PetscInt       *HT,key;
810   MatScalar      **HD;
811   PetscReal      tmp;
812 #if defined(PETSC_USE_INFO)
813   PetscInt ct=0,max=0;
814 #endif
815 
816   PetscFunctionBegin;
817   if (baij->ht) PetscFunctionReturn(0);
818 
819   baij->ht_size = (PetscInt)(factor*nz);
820   ht_size       = baij->ht_size;
821 
822   /* Allocate Memory for Hash Table */
823   ierr = PetscCalloc2(ht_size,&baij->hd,ht_size,&baij->ht);CHKERRQ(ierr);
824   HD   = baij->hd;
825   HT   = baij->ht;
826 
827   /* Loop Over A */
828   for (i=0; i<a->mbs; i++) {
829     for (j=ai[i]; j<ai[i+1]; j++) {
830       row = i+rstart;
831       col = aj[j]+cstart;
832 
833       key = row*Nbs + col + 1;
834       h1  = HASH(ht_size,key,tmp);
835       for (k=0; k<ht_size; k++) {
836         if (!HT[(h1+k)%ht_size]) {
837           HT[(h1+k)%ht_size] = key;
838           HD[(h1+k)%ht_size] = a->a + j*bs2;
839           break;
840 #if defined(PETSC_USE_INFO)
841         } else {
842           ct++;
843 #endif
844         }
845       }
846 #if defined(PETSC_USE_INFO)
847       if (k> max) max = k;
848 #endif
849     }
850   }
851   /* Loop Over B */
852   for (i=0; i<b->mbs; i++) {
853     for (j=bi[i]; j<bi[i+1]; j++) {
854       row = i+rstart;
855       col = garray[bj[j]];
856       key = row*Nbs + col + 1;
857       h1  = HASH(ht_size,key,tmp);
858       for (k=0; k<ht_size; k++) {
859         if (!HT[(h1+k)%ht_size]) {
860           HT[(h1+k)%ht_size] = key;
861           HD[(h1+k)%ht_size] = b->a + j*bs2;
862           break;
863 #if defined(PETSC_USE_INFO)
864         } else {
865           ct++;
866 #endif
867         }
868       }
869 #if defined(PETSC_USE_INFO)
870       if (k> max) max = k;
871 #endif
872     }
873   }
874 
875   /* Print Summary */
876 #if defined(PETSC_USE_INFO)
877   for (i=0,j=0; i<ht_size; i++) {
878     if (HT[i]) j++;
879   }
880   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
881 #endif
882   PetscFunctionReturn(0);
883 }
884 
885 #undef __FUNCT__
886 #define __FUNCT__ "MatAssemblyBegin_MPIBAIJ"
887 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
888 {
889   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
890   PetscErrorCode ierr;
891   PetscInt       nstash,reallocs;
892 
893   PetscFunctionBegin;
894   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
895 
896   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
897   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
898   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
899   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
900   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
901   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "MatAssemblyEnd_MPIBAIJ"
907 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
908 {
909   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
910   Mat_SeqBAIJ    *a   =(Mat_SeqBAIJ*)baij->A->data;
911   PetscErrorCode ierr;
912   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
913   PetscInt       *row,*col;
914   PetscBool      r1,r2,r3,other_disassembled;
915   MatScalar      *val;
916   PetscMPIInt    n;
917 
918   PetscFunctionBegin;
919   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
920   if (!baij->donotstash && !mat->nooffprocentries) {
921     while (1) {
922       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
923       if (!flg) break;
924 
925       for (i=0; i<n;) {
926         /* Now identify the consecutive vals belonging to the same row */
927         for (j=i,rstart=row[j]; j<n; j++) {
928           if (row[j] != rstart) break;
929         }
930         if (j < n) ncols = j-i;
931         else       ncols = n-i;
932         /* Now assemble all these values with a single function call */
933         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,mat->insertmode);CHKERRQ(ierr);
934         i    = j;
935       }
936     }
937     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
938     /* Now process the block-stash. Since the values are stashed column-oriented,
939        set the roworiented flag to column oriented, and after MatSetValues()
940        restore the original flags */
941     r1 = baij->roworiented;
942     r2 = a->roworiented;
943     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
944 
945     baij->roworiented = PETSC_FALSE;
946     a->roworiented    = PETSC_FALSE;
947 
948     (((Mat_SeqBAIJ*)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
949     while (1) {
950       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
951       if (!flg) break;
952 
953       for (i=0; i<n;) {
954         /* Now identify the consecutive vals belonging to the same row */
955         for (j=i,rstart=row[j]; j<n; j++) {
956           if (row[j] != rstart) break;
957         }
958         if (j < n) ncols = j-i;
959         else       ncols = n-i;
960         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,mat->insertmode);CHKERRQ(ierr);
961         i    = j;
962       }
963     }
964     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
965 
966     baij->roworiented = r1;
967     a->roworiented    = r2;
968 
969     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworiented */
970   }
971 
972   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
973   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
974 
975   /* determine if any processor has disassembled, if so we must
976      also disassemble ourselfs, in order that we may reassemble. */
977   /*
978      if nonzero structure of submatrix B cannot change then we know that
979      no processor disassembled thus we can skip this stuff
980   */
981   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
982     ierr = MPIU_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
983     if (mat->was_assembled && !other_disassembled) {
984       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
985     }
986   }
987 
988   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
989     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
990   }
991   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
992   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
993 
994 #if defined(PETSC_USE_INFO)
995   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
996     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
997 
998     baij->ht_total_ct  = 0;
999     baij->ht_insert_ct = 0;
1000   }
1001 #endif
1002   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
1003     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
1004 
1005     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
1006     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
1007   }
1008 
1009   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1010 
1011   baij->rowvalues = 0;
1012 
1013   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
1014   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
1015     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
1016     ierr = MPIU_Allreduce(&state,&mat->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1017   }
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 extern PetscErrorCode MatView_SeqBAIJ(Mat,PetscViewer);
1022 #include <petscdraw.h>
1023 #undef __FUNCT__
1024 #define __FUNCT__ "MatView_MPIBAIJ_ASCIIorDraworSocket"
1025 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1026 {
1027   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
1028   PetscErrorCode    ierr;
1029   PetscMPIInt       rank = baij->rank;
1030   PetscInt          bs   = mat->rmap->bs;
1031   PetscBool         iascii,isdraw;
1032   PetscViewer       sviewer;
1033   PetscViewerFormat format;
1034 
1035   PetscFunctionBegin;
1036   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1037   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1038   if (iascii) {
1039     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1040     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1041       MatInfo info;
1042       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1043       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1044       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1045       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
1046                                                 rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
1047       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
1048       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1049       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
1050       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1051       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1052       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1053       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1054       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
1055       PetscFunctionReturn(0);
1056     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1057       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
1058       PetscFunctionReturn(0);
1059     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
1060       PetscFunctionReturn(0);
1061     }
1062   }
1063 
1064   if (isdraw) {
1065     PetscDraw draw;
1066     PetscBool isnull;
1067     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1068     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr);
1069     if (isnull) PetscFunctionReturn(0);
1070   }
1071 
1072   {
1073     /* assemble the entire matrix onto first processor. */
1074     Mat         A;
1075     Mat_SeqBAIJ *Aloc;
1076     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
1077     MatScalar   *a;
1078     const char  *matname;
1079 
1080     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
1081     /* Perhaps this should be the type of mat? */
1082     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
1083     if (!rank) {
1084       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
1085     } else {
1086       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
1087     }
1088     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1089     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1090     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1091     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
1092 
1093     /* copy over the A part */
1094     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1095     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1096     ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1097 
1098     for (i=0; i<mbs; i++) {
1099       rvals[0] = bs*(baij->rstartbs + i);
1100       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1101       for (j=ai[i]; j<ai[i+1]; j++) {
1102         col = (baij->cstartbs+aj[j])*bs;
1103         for (k=0; k<bs; k++) {
1104           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1105           col++; a += bs;
1106         }
1107       }
1108     }
1109     /* copy over the B part */
1110     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1111     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1112     for (i=0; i<mbs; i++) {
1113       rvals[0] = bs*(baij->rstartbs + i);
1114       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1115       for (j=ai[i]; j<ai[i+1]; j++) {
1116         col = baij->garray[aj[j]]*bs;
1117         for (k=0; k<bs; k++) {
1118           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1119           col++; a += bs;
1120         }
1121       }
1122     }
1123     ierr = PetscFree(rvals);CHKERRQ(ierr);
1124     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1125     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1126     /*
1127        Everyone has to call to draw the matrix since the graphics waits are
1128        synchronized across all processors that share the PetscDraw object
1129     */
1130     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1131     ierr = PetscObjectGetName((PetscObject)mat,&matname);CHKERRQ(ierr);
1132     if (!rank) {
1133       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,matname);CHKERRQ(ierr);
1134       ierr = MatView_SeqBAIJ(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1135     }
1136     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1137     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1138     ierr = MatDestroy(&A);CHKERRQ(ierr);
1139   }
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 #undef __FUNCT__
1144 #define __FUNCT__ "MatView_MPIBAIJ_Binary"
1145 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1146 {
1147   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1148   Mat_SeqBAIJ    *A = (Mat_SeqBAIJ*)a->A->data;
1149   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)a->B->data;
1150   PetscErrorCode ierr;
1151   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1152   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1153   int            fd;
1154   PetscScalar    *column_values;
1155   FILE           *file;
1156   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1157   PetscInt       message_count,flowcontrolcount;
1158 
1159   PetscFunctionBegin;
1160   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1161   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
1162   nz   = bs2*(A->nz + B->nz);
1163   rlen = mat->rmap->n;
1164   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1165   if (!rank) {
1166     header[0] = MAT_FILE_CLASSID;
1167     header[1] = mat->rmap->N;
1168     header[2] = mat->cmap->N;
1169 
1170     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1171     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1172     /* get largest number of rows any processor has */
1173     range = mat->rmap->range;
1174     for (i=1; i<size; i++) {
1175       rlen = PetscMax(rlen,range[i+1] - range[i]);
1176     }
1177   } else {
1178     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1179   }
1180 
1181   ierr = PetscMalloc1(rlen/bs,&crow_lens);CHKERRQ(ierr);
1182   /* compute lengths of each row  */
1183   for (i=0; i<a->mbs; i++) {
1184     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1185   }
1186   /* store the row lengths to the file */
1187   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1188   if (!rank) {
1189     MPI_Status status;
1190     ierr = PetscMalloc1(rlen,&row_lens);CHKERRQ(ierr);
1191     rlen = (range[1] - range[0])/bs;
1192     for (i=0; i<rlen; i++) {
1193       for (j=0; j<bs; j++) {
1194         row_lens[i*bs+j] = bs*crow_lens[i];
1195       }
1196     }
1197     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1198     for (i=1; i<size; i++) {
1199       rlen = (range[i+1] - range[i])/bs;
1200       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1201       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1202       for (k=0; k<rlen; k++) {
1203         for (j=0; j<bs; j++) {
1204           row_lens[k*bs+j] = bs*crow_lens[k];
1205         }
1206       }
1207       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1208     }
1209     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1210     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1211   } else {
1212     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1213     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1214     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1215   }
1216   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1217 
1218   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1219      information needed to make it for each row from a block row. This does require more communication but still not more than
1220      the communication needed for the nonzero values  */
1221   nzmax = nz; /*  space a largest processor needs */
1222   ierr  = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1223   ierr  = PetscMalloc1(nzmax,&column_indices);CHKERRQ(ierr);
1224   cnt   = 0;
1225   for (i=0; i<a->mbs; i++) {
1226     pcnt = cnt;
1227     for (j=B->i[i]; j<B->i[i+1]; j++) {
1228       if ((col = garray[B->j[j]]) > cstart) break;
1229       for (l=0; l<bs; l++) {
1230         column_indices[cnt++] = bs*col+l;
1231       }
1232     }
1233     for (k=A->i[i]; k<A->i[i+1]; k++) {
1234       for (l=0; l<bs; l++) {
1235         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1236       }
1237     }
1238     for (; j<B->i[i+1]; j++) {
1239       for (l=0; l<bs; l++) {
1240         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1241       }
1242     }
1243     len = cnt - pcnt;
1244     for (k=1; k<bs; k++) {
1245       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1246       cnt += len;
1247     }
1248   }
1249   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1250 
1251   /* store the columns to the file */
1252   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1253   if (!rank) {
1254     MPI_Status status;
1255     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1256     for (i=1; i<size; i++) {
1257       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1258       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1259       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1260       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1261     }
1262     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1263   } else {
1264     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1265     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1266     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1267     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1268   }
1269   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1270 
1271   /* load up the numerical values */
1272   ierr = PetscMalloc1(nzmax,&column_values);CHKERRQ(ierr);
1273   cnt  = 0;
1274   for (i=0; i<a->mbs; i++) {
1275     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1276     for (j=B->i[i]; j<B->i[i+1]; j++) {
1277       if (garray[B->j[j]] > cstart) break;
1278       for (l=0; l<bs; l++) {
1279         for (ll=0; ll<bs; ll++) {
1280           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1281         }
1282       }
1283       cnt += bs;
1284     }
1285     for (k=A->i[i]; k<A->i[i+1]; k++) {
1286       for (l=0; l<bs; l++) {
1287         for (ll=0; ll<bs; ll++) {
1288           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1289         }
1290       }
1291       cnt += bs;
1292     }
1293     for (; j<B->i[i+1]; j++) {
1294       for (l=0; l<bs; l++) {
1295         for (ll=0; ll<bs; ll++) {
1296           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1297         }
1298       }
1299       cnt += bs;
1300     }
1301     cnt += (bs-1)*rlen;
1302   }
1303   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1304 
1305   /* store the column values to the file */
1306   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1307   if (!rank) {
1308     MPI_Status status;
1309     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1310     for (i=1; i<size; i++) {
1311       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1312       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1313       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1314       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1315     }
1316     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1317   } else {
1318     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1319     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1320     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1321     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1322   }
1323   ierr = PetscFree(column_values);CHKERRQ(ierr);
1324 
1325   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1326   if (file) {
1327     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1328   }
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 #undef __FUNCT__
1333 #define __FUNCT__ "MatView_MPIBAIJ"
1334 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1335 {
1336   PetscErrorCode ierr;
1337   PetscBool      iascii,isdraw,issocket,isbinary;
1338 
1339   PetscFunctionBegin;
1340   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1341   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1342   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1343   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1344   if (iascii || isdraw || issocket) {
1345     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1346   } else if (isbinary) {
1347     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1348   }
1349   PetscFunctionReturn(0);
1350 }
1351 
1352 #undef __FUNCT__
1353 #define __FUNCT__ "MatDestroy_MPIBAIJ"
1354 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1355 {
1356   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1357   PetscErrorCode ierr;
1358 
1359   PetscFunctionBegin;
1360 #if defined(PETSC_USE_LOG)
1361   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1362 #endif
1363   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1364   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1365   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1366   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1367 #if defined(PETSC_USE_CTABLE)
1368   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1369 #else
1370   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1371 #endif
1372   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1373   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1374   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1375   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1376   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1377   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1378   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1379   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1380 
1381   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1382   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
1383   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
1384   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr);
1385   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
1386   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr);
1387   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr);
1388   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C",NULL);CHKERRQ(ierr);
1389   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C",NULL);CHKERRQ(ierr);
1390   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C",NULL);CHKERRQ(ierr);
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 #undef __FUNCT__
1395 #define __FUNCT__ "MatMult_MPIBAIJ"
1396 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1397 {
1398   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1399   PetscErrorCode ierr;
1400   PetscInt       nt;
1401 
1402   PetscFunctionBegin;
1403   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1404   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1405   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1406   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1407   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1408   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1409   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1410   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1411   PetscFunctionReturn(0);
1412 }
1413 
1414 #undef __FUNCT__
1415 #define __FUNCT__ "MatMultAdd_MPIBAIJ"
1416 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1417 {
1418   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1419   PetscErrorCode ierr;
1420 
1421   PetscFunctionBegin;
1422   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1423   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1424   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1425   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1426   PetscFunctionReturn(0);
1427 }
1428 
1429 #undef __FUNCT__
1430 #define __FUNCT__ "MatMultTranspose_MPIBAIJ"
1431 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1432 {
1433   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1434   PetscErrorCode ierr;
1435   PetscBool      merged;
1436 
1437   PetscFunctionBegin;
1438   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1439   /* do nondiagonal part */
1440   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1441   if (!merged) {
1442     /* send it on its way */
1443     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1444     /* do local part */
1445     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1446     /* receive remote parts: note this assumes the values are not actually */
1447     /* inserted in yy until the next line */
1448     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1449   } else {
1450     /* do local part */
1451     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1452     /* send it on its way */
1453     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1454     /* values actually were received in the Begin() but we need to call this nop */
1455     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1456   }
1457   PetscFunctionReturn(0);
1458 }
1459 
1460 #undef __FUNCT__
1461 #define __FUNCT__ "MatMultTransposeAdd_MPIBAIJ"
1462 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1463 {
1464   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1465   PetscErrorCode ierr;
1466 
1467   PetscFunctionBegin;
1468   /* do nondiagonal part */
1469   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1470   /* send it on its way */
1471   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1472   /* do local part */
1473   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1474   /* receive remote parts: note this assumes the values are not actually */
1475   /* inserted in yy until the next line, which is true for my implementation*/
1476   /* but is not perhaps always true. */
1477   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1478   PetscFunctionReturn(0);
1479 }
1480 
1481 /*
1482   This only works correctly for square matrices where the subblock A->A is the
1483    diagonal block
1484 */
1485 #undef __FUNCT__
1486 #define __FUNCT__ "MatGetDiagonal_MPIBAIJ"
1487 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1488 {
1489   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1490   PetscErrorCode ierr;
1491 
1492   PetscFunctionBegin;
1493   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1494   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1495   PetscFunctionReturn(0);
1496 }
1497 
1498 #undef __FUNCT__
1499 #define __FUNCT__ "MatScale_MPIBAIJ"
1500 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1501 {
1502   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1503   PetscErrorCode ierr;
1504 
1505   PetscFunctionBegin;
1506   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1507   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1508   PetscFunctionReturn(0);
1509 }
1510 
1511 #undef __FUNCT__
1512 #define __FUNCT__ "MatGetRow_MPIBAIJ"
1513 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1514 {
1515   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1516   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1517   PetscErrorCode ierr;
1518   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1519   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1520   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1521 
1522   PetscFunctionBegin;
1523   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1524   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1525   mat->getrowactive = PETSC_TRUE;
1526 
1527   if (!mat->rowvalues && (idx || v)) {
1528     /*
1529         allocate enough space to hold information from the longest row.
1530     */
1531     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1532     PetscInt    max = 1,mbs = mat->mbs,tmp;
1533     for (i=0; i<mbs; i++) {
1534       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1535       if (max < tmp) max = tmp;
1536     }
1537     ierr = PetscMalloc2(max*bs2,&mat->rowvalues,max*bs2,&mat->rowindices);CHKERRQ(ierr);
1538   }
1539   lrow = row - brstart;
1540 
1541   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1542   if (!v)   {pvA = 0; pvB = 0;}
1543   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1544   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1545   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1546   nztot = nzA + nzB;
1547 
1548   cmap = mat->garray;
1549   if (v  || idx) {
1550     if (nztot) {
1551       /* Sort by increasing column numbers, assuming A and B already sorted */
1552       PetscInt imark = -1;
1553       if (v) {
1554         *v = v_p = mat->rowvalues;
1555         for (i=0; i<nzB; i++) {
1556           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1557           else break;
1558         }
1559         imark = i;
1560         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1561         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1562       }
1563       if (idx) {
1564         *idx = idx_p = mat->rowindices;
1565         if (imark > -1) {
1566           for (i=0; i<imark; i++) {
1567             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1568           }
1569         } else {
1570           for (i=0; i<nzB; i++) {
1571             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1572             else break;
1573           }
1574           imark = i;
1575         }
1576         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1577         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1578       }
1579     } else {
1580       if (idx) *idx = 0;
1581       if (v)   *v   = 0;
1582     }
1583   }
1584   *nz  = nztot;
1585   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1586   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1587   PetscFunctionReturn(0);
1588 }
1589 
1590 #undef __FUNCT__
1591 #define __FUNCT__ "MatRestoreRow_MPIBAIJ"
1592 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1593 {
1594   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1595 
1596   PetscFunctionBegin;
1597   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1598   baij->getrowactive = PETSC_FALSE;
1599   PetscFunctionReturn(0);
1600 }
1601 
1602 #undef __FUNCT__
1603 #define __FUNCT__ "MatZeroEntries_MPIBAIJ"
1604 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1605 {
1606   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1607   PetscErrorCode ierr;
1608 
1609   PetscFunctionBegin;
1610   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1611   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1612   PetscFunctionReturn(0);
1613 }
1614 
1615 #undef __FUNCT__
1616 #define __FUNCT__ "MatGetInfo_MPIBAIJ"
1617 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1618 {
1619   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1620   Mat            A  = a->A,B = a->B;
1621   PetscErrorCode ierr;
1622   PetscReal      isend[5],irecv[5];
1623 
1624   PetscFunctionBegin;
1625   info->block_size = (PetscReal)matin->rmap->bs;
1626 
1627   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1628 
1629   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1630   isend[3] = info->memory;  isend[4] = info->mallocs;
1631 
1632   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1633 
1634   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1635   isend[3] += info->memory;  isend[4] += info->mallocs;
1636 
1637   if (flag == MAT_LOCAL) {
1638     info->nz_used      = isend[0];
1639     info->nz_allocated = isend[1];
1640     info->nz_unneeded  = isend[2];
1641     info->memory       = isend[3];
1642     info->mallocs      = isend[4];
1643   } else if (flag == MAT_GLOBAL_MAX) {
1644     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1645 
1646     info->nz_used      = irecv[0];
1647     info->nz_allocated = irecv[1];
1648     info->nz_unneeded  = irecv[2];
1649     info->memory       = irecv[3];
1650     info->mallocs      = irecv[4];
1651   } else if (flag == MAT_GLOBAL_SUM) {
1652     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1653 
1654     info->nz_used      = irecv[0];
1655     info->nz_allocated = irecv[1];
1656     info->nz_unneeded  = irecv[2];
1657     info->memory       = irecv[3];
1658     info->mallocs      = irecv[4];
1659   } else SETERRQ1(PetscObjectComm((PetscObject)matin),PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1660   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1661   info->fill_ratio_needed = 0;
1662   info->factor_mallocs    = 0;
1663   PetscFunctionReturn(0);
1664 }
1665 
1666 #undef __FUNCT__
1667 #define __FUNCT__ "MatSetOption_MPIBAIJ"
1668 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool flg)
1669 {
1670   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1671   PetscErrorCode ierr;
1672 
1673   PetscFunctionBegin;
1674   switch (op) {
1675   case MAT_NEW_NONZERO_LOCATIONS:
1676   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1677   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1678   case MAT_KEEP_NONZERO_PATTERN:
1679   case MAT_NEW_NONZERO_LOCATION_ERR:
1680     MatCheckPreallocated(A,1);
1681     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1682     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1683     break;
1684   case MAT_ROW_ORIENTED:
1685     MatCheckPreallocated(A,1);
1686     a->roworiented = flg;
1687 
1688     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1689     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1690     break;
1691   case MAT_NEW_DIAGONALS:
1692     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1693     break;
1694   case MAT_IGNORE_OFF_PROC_ENTRIES:
1695     a->donotstash = flg;
1696     break;
1697   case MAT_USE_HASH_TABLE:
1698     a->ht_flag = flg;
1699     break;
1700   case MAT_SYMMETRIC:
1701   case MAT_STRUCTURALLY_SYMMETRIC:
1702   case MAT_HERMITIAN:
1703   case MAT_SYMMETRY_ETERNAL:
1704     MatCheckPreallocated(A,1);
1705     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1706     break;
1707   default:
1708     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"unknown option %d",op);
1709   }
1710   PetscFunctionReturn(0);
1711 }
1712 
1713 #undef __FUNCT__
1714 #define __FUNCT__ "MatTranspose_MPIBAIJ"
1715 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1716 {
1717   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1718   Mat_SeqBAIJ    *Aloc;
1719   Mat            B;
1720   PetscErrorCode ierr;
1721   PetscInt       M =A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1722   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1723   MatScalar      *a;
1724 
1725   PetscFunctionBegin;
1726   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1727   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1728     ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1729     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1730     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1731     /* Do not know preallocation information, but must set block size */
1732     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,NULL,PETSC_DECIDE,NULL);CHKERRQ(ierr);
1733   } else {
1734     B = *matout;
1735   }
1736 
1737   /* copy over the A part */
1738   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1739   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1740   ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1741 
1742   for (i=0; i<mbs; i++) {
1743     rvals[0] = bs*(baij->rstartbs + i);
1744     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1745     for (j=ai[i]; j<ai[i+1]; j++) {
1746       col = (baij->cstartbs+aj[j])*bs;
1747       for (k=0; k<bs; k++) {
1748         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1749 
1750         col++; a += bs;
1751       }
1752     }
1753   }
1754   /* copy over the B part */
1755   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1756   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1757   for (i=0; i<mbs; i++) {
1758     rvals[0] = bs*(baij->rstartbs + i);
1759     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1760     for (j=ai[i]; j<ai[i+1]; j++) {
1761       col = baij->garray[aj[j]]*bs;
1762       for (k=0; k<bs; k++) {
1763         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1764         col++;
1765         a += bs;
1766       }
1767     }
1768   }
1769   ierr = PetscFree(rvals);CHKERRQ(ierr);
1770   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1771   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1772 
1773   if (reuse == MAT_INITIAL_MATRIX || *matout != A) *matout = B;
1774   else {
1775     ierr = MatHeaderMerge(A,&B);CHKERRQ(ierr);
1776   }
1777   PetscFunctionReturn(0);
1778 }
1779 
1780 #undef __FUNCT__
1781 #define __FUNCT__ "MatDiagonalScale_MPIBAIJ"
1782 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1783 {
1784   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1785   Mat            a     = baij->A,b = baij->B;
1786   PetscErrorCode ierr;
1787   PetscInt       s1,s2,s3;
1788 
1789   PetscFunctionBegin;
1790   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1791   if (rr) {
1792     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1793     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1794     /* Overlap communication with computation. */
1795     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1796   }
1797   if (ll) {
1798     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1799     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1800     ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1801   }
1802   /* scale  the diagonal block */
1803   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1804 
1805   if (rr) {
1806     /* Do a scatter end and then right scale the off-diagonal block */
1807     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1808     ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1809   }
1810   PetscFunctionReturn(0);
1811 }
1812 
1813 #undef __FUNCT__
1814 #define __FUNCT__ "MatZeroRows_MPIBAIJ"
1815 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1816 {
1817   Mat_MPIBAIJ   *l      = (Mat_MPIBAIJ *) A->data;
1818   PetscInt      *owners = A->rmap->range;
1819   PetscInt       n      = A->rmap->n;
1820   PetscSF        sf;
1821   PetscInt      *lrows;
1822   PetscSFNode   *rrows;
1823   PetscInt       r, p = 0, len = 0;
1824   PetscErrorCode ierr;
1825 
1826   PetscFunctionBegin;
1827   /* Create SF where leaves are input rows and roots are owned rows */
1828   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1829   for (r = 0; r < n; ++r) lrows[r] = -1;
1830   if (!A->nooffproczerorows) {ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);}
1831   for (r = 0; r < N; ++r) {
1832     const PetscInt idx   = rows[r];
1833     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1834     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1835       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1836     }
1837     if (A->nooffproczerorows) {
1838       if (p != l->rank) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"MAT_NO_OFF_PROC_ZERO_ROWS set, but row %D is not owned by rank %d",idx,l->rank);
1839       lrows[len++] = idx - owners[p];
1840     } else {
1841       rrows[r].rank = p;
1842       rrows[r].index = rows[r] - owners[p];
1843     }
1844   }
1845   if (!A->nooffproczerorows) {
1846     ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1847     ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1848     /* Collect flags for rows to be zeroed */
1849     ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1850     ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1851     ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1852     /* Compress and put in row numbers */
1853     for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1854   }
1855   /* fix right hand side if needed */
1856   if (x && b) {
1857     const PetscScalar *xx;
1858     PetscScalar       *bb;
1859 
1860     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1861     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1862     for (r = 0; r < len; ++r) bb[lrows[r]] = diag*xx[lrows[r]];
1863     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1864     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1865   }
1866 
1867   /* actually zap the local rows */
1868   /*
1869         Zero the required rows. If the "diagonal block" of the matrix
1870      is square and the user wishes to set the diagonal we use separate
1871      code so that MatSetValues() is not called for each diagonal allocating
1872      new memory, thus calling lots of mallocs and slowing things down.
1873 
1874   */
1875   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1876   ierr = MatZeroRows_SeqBAIJ(l->B,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1877   if (A->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
1878     PetscBool cong;
1879     ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr);
1880     if (cong) A->congruentlayouts = 1;
1881     else      A->congruentlayouts = 0;
1882   }
1883   if ((diag != 0.0) && A->congruentlayouts) {
1884     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,diag,NULL,NULL);CHKERRQ(ierr);
1885   } else if (diag != 0.0) {
1886     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,0,0);CHKERRQ(ierr);
1887     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1888        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1889     for (r = 0; r < len; ++r) {
1890       const PetscInt row = lrows[r] + A->rmap->rstart;
1891       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1892     }
1893     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1894     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1895   } else {
1896     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1897   }
1898   ierr = PetscFree(lrows);CHKERRQ(ierr);
1899 
1900   /* only change matrix nonzero state if pattern was allowed to be changed */
1901   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1902     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1903     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1904   }
1905   PetscFunctionReturn(0);
1906 }
1907 
1908 #undef __FUNCT__
1909 #define __FUNCT__ "MatZeroRowsColumns_MPIBAIJ"
1910 PetscErrorCode MatZeroRowsColumns_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1911 {
1912   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1913   PetscErrorCode    ierr;
1914   PetscMPIInt       n = A->rmap->n;
1915   PetscInt          i,j,k,r,p = 0,len = 0,row,col,count;
1916   PetscInt          *lrows,*owners = A->rmap->range;
1917   PetscSFNode       *rrows;
1918   PetscSF           sf;
1919   const PetscScalar *xx;
1920   PetscScalar       *bb,*mask;
1921   Vec               xmask,lmask;
1922   Mat_SeqBAIJ       *baij = (Mat_SeqBAIJ*)l->B->data;
1923   PetscInt           bs = A->rmap->bs, bs2 = baij->bs2;
1924   PetscScalar       *aa;
1925 
1926   PetscFunctionBegin;
1927   /* Create SF where leaves are input rows and roots are owned rows */
1928   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1929   for (r = 0; r < n; ++r) lrows[r] = -1;
1930   ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);
1931   for (r = 0; r < N; ++r) {
1932     const PetscInt idx   = rows[r];
1933     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1934     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1935       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1936     }
1937     rrows[r].rank  = p;
1938     rrows[r].index = rows[r] - owners[p];
1939   }
1940   ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1941   ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1942   /* Collect flags for rows to be zeroed */
1943   ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1944   ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1945   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1946   /* Compress and put in row numbers */
1947   for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1948   /* zero diagonal part of matrix */
1949   ierr = MatZeroRowsColumns(l->A,len,lrows,diag,x,b);CHKERRQ(ierr);
1950   /* handle off diagonal part of matrix */
1951   ierr = MatCreateVecs(A,&xmask,NULL);CHKERRQ(ierr);
1952   ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr);
1953   ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr);
1954   for (i=0; i<len; i++) bb[lrows[i]] = 1;
1955   ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr);
1956   ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1957   ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1958   ierr = VecDestroy(&xmask);CHKERRQ(ierr);
1959   if (x) {
1960     ierr = VecScatterBegin(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1961     ierr = VecScatterEnd(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1962     ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1963     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1964   }
1965   ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr);
1966   /* remove zeroed rows of off diagonal matrix */
1967   for (i = 0; i < len; ++i) {
1968     row   = lrows[i];
1969     count = (baij->i[row/bs +1] - baij->i[row/bs])*bs;
1970     aa    = ((MatScalar*)(baij->a)) + baij->i[row/bs]*bs2 + (row%bs);
1971     for (k = 0; k < count; ++k) {
1972       aa[0] = 0.0;
1973       aa   += bs;
1974     }
1975   }
1976   /* loop over all elements of off process part of matrix zeroing removed columns*/
1977   for (i = 0; i < l->B->rmap->N; ++i) {
1978     row = i/bs;
1979     for (j = baij->i[row]; j < baij->i[row+1]; ++j) {
1980       for (k = 0; k < bs; ++k) {
1981         col = bs*baij->j[j] + k;
1982         if (PetscAbsScalar(mask[col])) {
1983           aa = ((MatScalar*)(baij->a)) + j*bs2 + (i%bs) + bs*k;
1984           if (x) bb[i] -= aa[0]*xx[col];
1985           aa[0] = 0.0;
1986         }
1987       }
1988     }
1989   }
1990   if (x) {
1991     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1992     ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1993   }
1994   ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr);
1995   ierr = VecDestroy(&lmask);CHKERRQ(ierr);
1996   ierr = PetscFree(lrows);CHKERRQ(ierr);
1997 
1998   /* only change matrix nonzero state if pattern was allowed to be changed */
1999   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
2000     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
2001     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2002   }
2003   PetscFunctionReturn(0);
2004 }
2005 
2006 #undef __FUNCT__
2007 #define __FUNCT__ "MatSetUnfactored_MPIBAIJ"
2008 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
2009 {
2010   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2011   PetscErrorCode ierr;
2012 
2013   PetscFunctionBegin;
2014   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
2015   PetscFunctionReturn(0);
2016 }
2017 
2018 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat*);
2019 
2020 #undef __FUNCT__
2021 #define __FUNCT__ "MatEqual_MPIBAIJ"
2022 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
2023 {
2024   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
2025   Mat            a,b,c,d;
2026   PetscBool      flg;
2027   PetscErrorCode ierr;
2028 
2029   PetscFunctionBegin;
2030   a = matA->A; b = matA->B;
2031   c = matB->A; d = matB->B;
2032 
2033   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
2034   if (flg) {
2035     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
2036   }
2037   ierr = MPIU_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2038   PetscFunctionReturn(0);
2039 }
2040 
2041 #undef __FUNCT__
2042 #define __FUNCT__ "MatCopy_MPIBAIJ"
2043 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
2044 {
2045   PetscErrorCode ierr;
2046   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2047   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
2048 
2049   PetscFunctionBegin;
2050   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
2051   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
2052     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
2053   } else {
2054     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
2055     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
2056   }
2057   PetscFunctionReturn(0);
2058 }
2059 
2060 #undef __FUNCT__
2061 #define __FUNCT__ "MatSetUp_MPIBAIJ"
2062 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
2063 {
2064   PetscErrorCode ierr;
2065 
2066   PetscFunctionBegin;
2067   ierr = MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
2068   PetscFunctionReturn(0);
2069 }
2070 
2071 #undef __FUNCT__
2072 #define __FUNCT__ "MatAXPYGetPreallocation_MPIBAIJ"
2073 PetscErrorCode MatAXPYGetPreallocation_MPIBAIJ(Mat Y,const PetscInt *yltog,Mat X,const PetscInt *xltog,PetscInt *nnz)
2074 {
2075   PetscErrorCode ierr;
2076   PetscInt       bs = Y->rmap->bs,m = Y->rmap->N/bs;
2077   Mat_SeqBAIJ    *x = (Mat_SeqBAIJ*)X->data;
2078   Mat_SeqBAIJ    *y = (Mat_SeqBAIJ*)Y->data;
2079 
2080   PetscFunctionBegin;
2081   ierr = MatAXPYGetPreallocation_MPIX_private(m,x->i,x->j,xltog,y->i,y->j,yltog,nnz);CHKERRQ(ierr);
2082   PetscFunctionReturn(0);
2083 }
2084 
2085 #undef __FUNCT__
2086 #define __FUNCT__ "MatAXPY_MPIBAIJ"
2087 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
2088 {
2089   PetscErrorCode ierr;
2090   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ*)X->data,*yy=(Mat_MPIBAIJ*)Y->data;
2091   PetscBLASInt   bnz,one=1;
2092   Mat_SeqBAIJ    *x,*y;
2093 
2094   PetscFunctionBegin;
2095   if (str == SAME_NONZERO_PATTERN) {
2096     PetscScalar alpha = a;
2097     x    = (Mat_SeqBAIJ*)xx->A->data;
2098     y    = (Mat_SeqBAIJ*)yy->A->data;
2099     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2100     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2101     x    = (Mat_SeqBAIJ*)xx->B->data;
2102     y    = (Mat_SeqBAIJ*)yy->B->data;
2103     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2104     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2105     ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
2106   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
2107     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
2108   } else {
2109     Mat      B;
2110     PetscInt *nnz_d,*nnz_o,bs=Y->rmap->bs;
2111     ierr = PetscMalloc1(yy->A->rmap->N,&nnz_d);CHKERRQ(ierr);
2112     ierr = PetscMalloc1(yy->B->rmap->N,&nnz_o);CHKERRQ(ierr);
2113     ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr);
2114     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
2115     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
2116     ierr = MatSetBlockSizesFromMats(B,Y,Y);CHKERRQ(ierr);
2117     ierr = MatSetType(B,MATMPIBAIJ);CHKERRQ(ierr);
2118     ierr = MatAXPYGetPreallocation_SeqBAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
2119     ierr = MatAXPYGetPreallocation_MPIBAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr);
2120     ierr = MatMPIBAIJSetPreallocation(B,bs,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
2121     /* MatAXPY_BasicWithPreallocation() for BAIJ matrix is much slower than AIJ, even for bs=1 ! */
2122     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2123     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2124     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
2125     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
2126   }
2127   PetscFunctionReturn(0);
2128 }
2129 
2130 #undef __FUNCT__
2131 #define __FUNCT__ "MatRealPart_MPIBAIJ"
2132 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
2133 {
2134   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2135   PetscErrorCode ierr;
2136 
2137   PetscFunctionBegin;
2138   ierr = MatRealPart(a->A);CHKERRQ(ierr);
2139   ierr = MatRealPart(a->B);CHKERRQ(ierr);
2140   PetscFunctionReturn(0);
2141 }
2142 
2143 #undef __FUNCT__
2144 #define __FUNCT__ "MatImaginaryPart_MPIBAIJ"
2145 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
2146 {
2147   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2148   PetscErrorCode ierr;
2149 
2150   PetscFunctionBegin;
2151   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2152   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2153   PetscFunctionReturn(0);
2154 }
2155 
2156 #undef __FUNCT__
2157 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ"
2158 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2159 {
2160   PetscErrorCode ierr;
2161   IS             iscol_local;
2162   PetscInt       csize;
2163 
2164   PetscFunctionBegin;
2165   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2166   if (call == MAT_REUSE_MATRIX) {
2167     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2168     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2169   } else {
2170     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2171   }
2172   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2173   if (call == MAT_INITIAL_MATRIX) {
2174     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2175     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2176   }
2177   PetscFunctionReturn(0);
2178 }
2179 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2180 #undef __FUNCT__
2181 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ_Private"
2182 /*
2183   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2184   in local and then by concatenating the local matrices the end result.
2185   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ().
2186   This routine is used for BAIJ and SBAIJ matrices (unfortunate dependency).
2187 */
2188 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2189 {
2190   PetscErrorCode ierr;
2191   PetscMPIInt    rank,size;
2192   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2193   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2194   Mat            M,Mreuse;
2195   MatScalar      *vwork,*aa;
2196   MPI_Comm       comm;
2197   IS             isrow_new, iscol_new;
2198   PetscBool      idflag,allrows, allcols;
2199   Mat_SeqBAIJ    *aij;
2200 
2201   PetscFunctionBegin;
2202   ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
2203   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2204   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2205   /* The compression and expansion should be avoided. Doesn't point
2206      out errors, might change the indices, hence buggey */
2207   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2208   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2209 
2210   /* Check for special case: each processor gets entire matrix columns */
2211   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2212   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2213   if (idflag && ncol == mat->cmap->N) allcols = PETSC_TRUE;
2214   else allcols = PETSC_FALSE;
2215 
2216   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2217   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2218   if (idflag && nrow == mat->rmap->N) allrows = PETSC_TRUE;
2219   else allrows = PETSC_FALSE;
2220 
2221   if (call ==  MAT_REUSE_MATRIX) {
2222     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr);
2223     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2224     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2225   } else {
2226     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2227   }
2228   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2229   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2230   /*
2231       m - number of local rows
2232       n - number of columns (same on all processors)
2233       rstart - first row in new global matrix generated
2234   */
2235   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2236   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2237   m    = m/bs;
2238   n    = n/bs;
2239 
2240   if (call == MAT_INITIAL_MATRIX) {
2241     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2242     ii  = aij->i;
2243     jj  = aij->j;
2244 
2245     /*
2246         Determine the number of non-zeros in the diagonal and off-diagonal
2247         portions of the matrix in order to do correct preallocation
2248     */
2249 
2250     /* first get start and end of "diagonal" columns */
2251     if (csize == PETSC_DECIDE) {
2252       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2253       if (mglobal == n*bs) { /* square matrix */
2254         nlocal = m;
2255       } else {
2256         nlocal = n/size + ((n % size) > rank);
2257       }
2258     } else {
2259       nlocal = csize/bs;
2260     }
2261     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2262     rstart = rend - nlocal;
2263     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2264 
2265     /* next, compute all the lengths */
2266     ierr  = PetscMalloc2(m+1,&dlens,m+1,&olens);CHKERRQ(ierr);
2267     for (i=0; i<m; i++) {
2268       jend = ii[i+1] - ii[i];
2269       olen = 0;
2270       dlen = 0;
2271       for (j=0; j<jend; j++) {
2272         if (*jj < rstart || *jj >= rend) olen++;
2273         else dlen++;
2274         jj++;
2275       }
2276       olens[i] = olen;
2277       dlens[i] = dlen;
2278     }
2279     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2280     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2281     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2282     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2283     ierr = MatMPISBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2284     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
2285   } else {
2286     PetscInt ml,nl;
2287 
2288     M    = *newmat;
2289     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2290     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2291     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2292     /*
2293          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2294        rather than the slower MatSetValues().
2295     */
2296     M->was_assembled = PETSC_TRUE;
2297     M->assembled     = PETSC_FALSE;
2298   }
2299   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2300   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2301   aij  = (Mat_SeqBAIJ*)(Mreuse)->data;
2302   ii   = aij->i;
2303   jj   = aij->j;
2304   aa   = aij->a;
2305   for (i=0; i<m; i++) {
2306     row   = rstart/bs + i;
2307     nz    = ii[i+1] - ii[i];
2308     cwork = jj;     jj += nz;
2309     vwork = aa;     aa += nz*bs*bs;
2310     ierr  = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2311   }
2312 
2313   ierr    = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2314   ierr    = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2315   *newmat = M;
2316 
2317   /* save submatrix used in processor for next request */
2318   if (call ==  MAT_INITIAL_MATRIX) {
2319     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2320     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2321   }
2322   PetscFunctionReturn(0);
2323 }
2324 
2325 #undef __FUNCT__
2326 #define __FUNCT__ "MatPermute_MPIBAIJ"
2327 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2328 {
2329   MPI_Comm       comm,pcomm;
2330   PetscInt       clocal_size,nrows;
2331   const PetscInt *rows;
2332   PetscMPIInt    size;
2333   IS             crowp,lcolp;
2334   PetscErrorCode ierr;
2335 
2336   PetscFunctionBegin;
2337   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2338   /* make a collective version of 'rowp' */
2339   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2340   if (pcomm==comm) {
2341     crowp = rowp;
2342   } else {
2343     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2344     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2345     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2346     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2347   }
2348   ierr = ISSetPermutation(crowp);CHKERRQ(ierr);
2349   /* make a local version of 'colp' */
2350   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2351   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2352   if (size==1) {
2353     lcolp = colp;
2354   } else {
2355     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2356   }
2357   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2358   /* now we just get the submatrix */
2359   ierr = MatGetLocalSize(A,NULL,&clocal_size);CHKERRQ(ierr);
2360   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,crowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2361   /* clean up */
2362   if (pcomm!=comm) {
2363     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2364   }
2365   if (size>1) {
2366     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2367   }
2368   PetscFunctionReturn(0);
2369 }
2370 
2371 #undef __FUNCT__
2372 #define __FUNCT__ "MatGetGhosts_MPIBAIJ"
2373 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2374 {
2375   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*) mat->data;
2376   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ*)baij->B->data;
2377 
2378   PetscFunctionBegin;
2379   if (nghosts) *nghosts = B->nbs;
2380   if (ghosts) *ghosts = baij->garray;
2381   PetscFunctionReturn(0);
2382 }
2383 
2384 #undef __FUNCT__
2385 #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIBAIJ"
2386 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2387 {
2388   Mat            B;
2389   Mat_MPIBAIJ    *a  = (Mat_MPIBAIJ*)A->data;
2390   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2391   Mat_SeqAIJ     *b;
2392   PetscErrorCode ierr;
2393   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2394   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2395   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2396 
2397   PetscFunctionBegin;
2398   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2399   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2400 
2401   /* ----------------------------------------------------------------
2402      Tell every processor the number of nonzeros per row
2403   */
2404   ierr = PetscMalloc1(A->rmap->N/bs,&lens);CHKERRQ(ierr);
2405   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2406     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2407   }
2408   ierr      = PetscMalloc1(2*size,&recvcounts);CHKERRQ(ierr);
2409   displs    = recvcounts + size;
2410   for (i=0; i<size; i++) {
2411     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2412     displs[i]     = A->rmap->range[i]/bs;
2413   }
2414 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2415   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2416 #else
2417   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2418   ierr = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2419 #endif
2420   /* ---------------------------------------------------------------
2421      Create the sequential matrix of the same type as the local block diagonal
2422   */
2423   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2424   ierr = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2425   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2426   ierr = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2427   b    = (Mat_SeqAIJ*)B->data;
2428 
2429   /*--------------------------------------------------------------------
2430     Copy my part of matrix column indices over
2431   */
2432   sendcount  = ad->nz + bd->nz;
2433   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2434   a_jsendbuf = ad->j;
2435   b_jsendbuf = bd->j;
2436   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2437   cnt        = 0;
2438   for (i=0; i<n; i++) {
2439 
2440     /* put in lower diagonal portion */
2441     m = bd->i[i+1] - bd->i[i];
2442     while (m > 0) {
2443       /* is it above diagonal (in bd (compressed) numbering) */
2444       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2445       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2446       m--;
2447     }
2448 
2449     /* put in diagonal portion */
2450     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2451       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2452     }
2453 
2454     /* put in upper diagonal portion */
2455     while (m-- > 0) {
2456       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2457     }
2458   }
2459   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2460 
2461   /*--------------------------------------------------------------------
2462     Gather all column indices to all processors
2463   */
2464   for (i=0; i<size; i++) {
2465     recvcounts[i] = 0;
2466     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2467       recvcounts[i] += lens[j];
2468     }
2469   }
2470   displs[0] = 0;
2471   for (i=1; i<size; i++) {
2472     displs[i] = displs[i-1] + recvcounts[i-1];
2473   }
2474 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2475   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2476 #else
2477   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2478 #endif
2479   /*--------------------------------------------------------------------
2480     Assemble the matrix into useable form (note numerical values not yet set)
2481   */
2482   /* set the b->ilen (length of each row) values */
2483   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2484   /* set the b->i indices */
2485   b->i[0] = 0;
2486   for (i=1; i<=A->rmap->N/bs; i++) {
2487     b->i[i] = b->i[i-1] + lens[i-1];
2488   }
2489   ierr = PetscFree(lens);CHKERRQ(ierr);
2490   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2491   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2492   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2493 
2494   if (A->symmetric) {
2495     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2496   } else if (A->hermitian) {
2497     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2498   } else if (A->structurally_symmetric) {
2499     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2500   }
2501   *newmat = B;
2502   PetscFunctionReturn(0);
2503 }
2504 
2505 #undef __FUNCT__
2506 #define __FUNCT__ "MatSOR_MPIBAIJ"
2507 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2508 {
2509   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2510   PetscErrorCode ierr;
2511   Vec            bb1 = 0;
2512 
2513   PetscFunctionBegin;
2514   if (flag == SOR_APPLY_UPPER) {
2515     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2516     PetscFunctionReturn(0);
2517   }
2518 
2519   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2520     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2521   }
2522 
2523   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2524     if (flag & SOR_ZERO_INITIAL_GUESS) {
2525       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2526       its--;
2527     }
2528 
2529     while (its--) {
2530       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2531       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2532 
2533       /* update rhs: bb1 = bb - B*x */
2534       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2535       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2536 
2537       /* local sweep */
2538       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2539     }
2540   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2541     if (flag & SOR_ZERO_INITIAL_GUESS) {
2542       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2543       its--;
2544     }
2545     while (its--) {
2546       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2547       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2548 
2549       /* update rhs: bb1 = bb - B*x */
2550       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2551       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2552 
2553       /* local sweep */
2554       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2555     }
2556   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2557     if (flag & SOR_ZERO_INITIAL_GUESS) {
2558       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2559       its--;
2560     }
2561     while (its--) {
2562       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2563       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2564 
2565       /* update rhs: bb1 = bb - B*x */
2566       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2567       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2568 
2569       /* local sweep */
2570       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2571     }
2572   } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2573 
2574   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2575   PetscFunctionReturn(0);
2576 }
2577 
2578 #undef __FUNCT__
2579 #define __FUNCT__ "MatGetColumnNorms_MPIBAIJ"
2580 PetscErrorCode MatGetColumnNorms_MPIBAIJ(Mat A,NormType type,PetscReal *norms)
2581 {
2582   PetscErrorCode ierr;
2583   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)A->data;
2584   PetscInt       N,i,*garray = aij->garray;
2585   PetscInt       ib,jb,bs = A->rmap->bs;
2586   Mat_SeqBAIJ    *a_aij = (Mat_SeqBAIJ*) aij->A->data;
2587   MatScalar      *a_val = a_aij->a;
2588   Mat_SeqBAIJ    *b_aij = (Mat_SeqBAIJ*) aij->B->data;
2589   MatScalar      *b_val = b_aij->a;
2590   PetscReal      *work;
2591 
2592   PetscFunctionBegin;
2593   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
2594   ierr = PetscCalloc1(N,&work);CHKERRQ(ierr);
2595   if (type == NORM_2) {
2596     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2597       for (jb=0; jb<bs; jb++) {
2598         for (ib=0; ib<bs; ib++) {
2599           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val);
2600           a_val++;
2601         }
2602       }
2603     }
2604     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2605       for (jb=0; jb<bs; jb++) {
2606         for (ib=0; ib<bs; ib++) {
2607           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val * *b_val);
2608           b_val++;
2609         }
2610       }
2611     }
2612   } else if (type == NORM_1) {
2613     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2614       for (jb=0; jb<bs; jb++) {
2615         for (ib=0; ib<bs; ib++) {
2616           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val);
2617           a_val++;
2618         }
2619       }
2620     }
2621     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2622       for (jb=0; jb<bs; jb++) {
2623        for (ib=0; ib<bs; ib++) {
2624           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val);
2625           b_val++;
2626         }
2627       }
2628     }
2629   } else if (type == NORM_INFINITY) {
2630     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2631       for (jb=0; jb<bs; jb++) {
2632         for (ib=0; ib<bs; ib++) {
2633           int col = A->cmap->rstart + a_aij->j[i] * bs + jb;
2634           work[col] = PetscMax(PetscAbsScalar(*a_val), work[col]);
2635           a_val++;
2636         }
2637       }
2638     }
2639     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2640       for (jb=0; jb<bs; jb++) {
2641         for (ib=0; ib<bs; ib++) {
2642           int col = garray[b_aij->j[i]] * bs + jb;
2643           work[col] = PetscMax(PetscAbsScalar(*b_val), work[col]);
2644           b_val++;
2645         }
2646       }
2647     }
2648   } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType");
2649   if (type == NORM_INFINITY) {
2650     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2651   } else {
2652     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2653   }
2654   ierr = PetscFree(work);CHKERRQ(ierr);
2655   if (type == NORM_2) {
2656     for (i=0; i<N; i++) norms[i] = PetscSqrtReal(norms[i]);
2657   }
2658   PetscFunctionReturn(0);
2659 }
2660 
2661 #undef __FUNCT__
2662 #define __FUNCT__ "MatInvertBlockDiagonal_MPIBAIJ"
2663 PetscErrorCode MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2664 {
2665   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2666   PetscErrorCode ierr;
2667 
2668   PetscFunctionBegin;
2669   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2670   A->errortype = a->A->errortype;
2671   PetscFunctionReturn(0);
2672 }
2673 
2674 #undef __FUNCT__
2675 #define __FUNCT__ "MatShift_MPIBAIJ"
2676 PetscErrorCode MatShift_MPIBAIJ(Mat Y,PetscScalar a)
2677 {
2678   PetscErrorCode ierr;
2679   Mat_MPIBAIJ    *maij = (Mat_MPIBAIJ*)Y->data;
2680   Mat_SeqBAIJ    *aij = (Mat_SeqBAIJ*)maij->A->data;
2681 
2682   PetscFunctionBegin;
2683   if (!Y->preallocated) {
2684     ierr = MatMPIBAIJSetPreallocation(Y,Y->rmap->bs,1,NULL,0,NULL);CHKERRQ(ierr);
2685   } else if (!aij->nz) {
2686     PetscInt nonew = aij->nonew;
2687     ierr = MatSeqBAIJSetPreallocation(maij->A,Y->rmap->bs,1,NULL);CHKERRQ(ierr);
2688     aij->nonew = nonew;
2689   }
2690   ierr = MatShift_Basic(Y,a);CHKERRQ(ierr);
2691   PetscFunctionReturn(0);
2692 }
2693 
2694 #undef __FUNCT__
2695 #define __FUNCT__ "MatMissingDiagonal_MPIBAIJ"
2696 PetscErrorCode MatMissingDiagonal_MPIBAIJ(Mat A,PetscBool  *missing,PetscInt *d)
2697 {
2698   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2699   PetscErrorCode ierr;
2700 
2701   PetscFunctionBegin;
2702   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only works for square matrices");
2703   ierr = MatMissingDiagonal(a->A,missing,d);CHKERRQ(ierr);
2704   if (d) {
2705     PetscInt rstart;
2706     ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
2707     *d += rstart/A->rmap->bs;
2708 
2709   }
2710   PetscFunctionReturn(0);
2711 }
2712 
2713 /* -------------------------------------------------------------------*/
2714 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2715                                        MatGetRow_MPIBAIJ,
2716                                        MatRestoreRow_MPIBAIJ,
2717                                        MatMult_MPIBAIJ,
2718                                 /* 4*/ MatMultAdd_MPIBAIJ,
2719                                        MatMultTranspose_MPIBAIJ,
2720                                        MatMultTransposeAdd_MPIBAIJ,
2721                                        0,
2722                                        0,
2723                                        0,
2724                                 /*10*/ 0,
2725                                        0,
2726                                        0,
2727                                        MatSOR_MPIBAIJ,
2728                                        MatTranspose_MPIBAIJ,
2729                                 /*15*/ MatGetInfo_MPIBAIJ,
2730                                        MatEqual_MPIBAIJ,
2731                                        MatGetDiagonal_MPIBAIJ,
2732                                        MatDiagonalScale_MPIBAIJ,
2733                                        MatNorm_MPIBAIJ,
2734                                 /*20*/ MatAssemblyBegin_MPIBAIJ,
2735                                        MatAssemblyEnd_MPIBAIJ,
2736                                        MatSetOption_MPIBAIJ,
2737                                        MatZeroEntries_MPIBAIJ,
2738                                 /*24*/ MatZeroRows_MPIBAIJ,
2739                                        0,
2740                                        0,
2741                                        0,
2742                                        0,
2743                                 /*29*/ MatSetUp_MPIBAIJ,
2744                                        0,
2745                                        0,
2746                                        0,
2747                                        0,
2748                                 /*34*/ MatDuplicate_MPIBAIJ,
2749                                        0,
2750                                        0,
2751                                        0,
2752                                        0,
2753                                 /*39*/ MatAXPY_MPIBAIJ,
2754                                        MatGetSubMatrices_MPIBAIJ,
2755                                        MatIncreaseOverlap_MPIBAIJ,
2756                                        MatGetValues_MPIBAIJ,
2757                                        MatCopy_MPIBAIJ,
2758                                 /*44*/ 0,
2759                                        MatScale_MPIBAIJ,
2760                                        MatShift_MPIBAIJ,
2761                                        0,
2762                                        MatZeroRowsColumns_MPIBAIJ,
2763                                 /*49*/ 0,
2764                                        0,
2765                                        0,
2766                                        0,
2767                                        0,
2768                                 /*54*/ MatFDColoringCreate_MPIXAIJ,
2769                                        0,
2770                                        MatSetUnfactored_MPIBAIJ,
2771                                        MatPermute_MPIBAIJ,
2772                                        MatSetValuesBlocked_MPIBAIJ,
2773                                 /*59*/ MatGetSubMatrix_MPIBAIJ,
2774                                        MatDestroy_MPIBAIJ,
2775                                        MatView_MPIBAIJ,
2776                                        0,
2777                                        0,
2778                                 /*64*/ 0,
2779                                        0,
2780                                        0,
2781                                        0,
2782                                        0,
2783                                 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2784                                        0,
2785                                        0,
2786                                        0,
2787                                        0,
2788                                 /*74*/ 0,
2789                                        MatFDColoringApply_BAIJ,
2790                                        0,
2791                                        0,
2792                                        0,
2793                                 /*79*/ 0,
2794                                        0,
2795                                        0,
2796                                        0,
2797                                        MatLoad_MPIBAIJ,
2798                                 /*84*/ 0,
2799                                        0,
2800                                        0,
2801                                        0,
2802                                        0,
2803                                 /*89*/ 0,
2804                                        0,
2805                                        0,
2806                                        0,
2807                                        0,
2808                                 /*94*/ 0,
2809                                        0,
2810                                        0,
2811                                        0,
2812                                        0,
2813                                 /*99*/ 0,
2814                                        0,
2815                                        0,
2816                                        0,
2817                                        0,
2818                                 /*104*/0,
2819                                        MatRealPart_MPIBAIJ,
2820                                        MatImaginaryPart_MPIBAIJ,
2821                                        0,
2822                                        0,
2823                                 /*109*/0,
2824                                        0,
2825                                        0,
2826                                        0,
2827                                        MatMissingDiagonal_MPIBAIJ,
2828                                 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2829                                        0,
2830                                        MatGetGhosts_MPIBAIJ,
2831                                        0,
2832                                        0,
2833                                 /*119*/0,
2834                                        0,
2835                                        0,
2836                                        0,
2837                                        MatGetMultiProcBlock_MPIBAIJ,
2838                                 /*124*/0,
2839                                        MatGetColumnNorms_MPIBAIJ,
2840                                        MatInvertBlockDiagonal_MPIBAIJ,
2841                                        0,
2842                                        0,
2843                                /*129*/ 0,
2844                                        0,
2845                                        0,
2846                                        0,
2847                                        0,
2848                                /*134*/ 0,
2849                                        0,
2850                                        0,
2851                                        0,
2852                                        0,
2853                                /*139*/ 0,
2854                                        0,
2855                                        0,
2856                                        MatFDColoringSetUp_MPIXAIJ,
2857                                        0,
2858                                 /*144*/MatCreateMPIMatConcatenateSeqMat_MPIBAIJ
2859 };
2860 
2861 #undef __FUNCT__
2862 #define __FUNCT__ "MatGetDiagonalBlock_MPIBAIJ"
2863 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2864 {
2865   PetscFunctionBegin;
2866   *a = ((Mat_MPIBAIJ*)A->data)->A;
2867   PetscFunctionReturn(0);
2868 }
2869 
2870 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2871 
2872 #undef __FUNCT__
2873 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR_MPIBAIJ"
2874 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2875 {
2876   PetscInt       m,rstart,cstart,cend;
2877   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2878   const PetscInt *JJ    =0;
2879   PetscScalar    *values=0;
2880   PetscBool      roworiented = ((Mat_MPIBAIJ*)B->data)->roworiented;
2881   PetscErrorCode ierr;
2882 
2883   PetscFunctionBegin;
2884   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2885   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2886   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2887   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2888   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2889   m      = B->rmap->n/bs;
2890   rstart = B->rmap->rstart/bs;
2891   cstart = B->cmap->rstart/bs;
2892   cend   = B->cmap->rend/bs;
2893 
2894   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2895   ierr = PetscMalloc2(m,&d_nnz,m,&o_nnz);CHKERRQ(ierr);
2896   for (i=0; i<m; i++) {
2897     nz = ii[i+1] - ii[i];
2898     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
2899     nz_max = PetscMax(nz_max,nz);
2900     JJ     = jj + ii[i];
2901     for (j=0; j<nz; j++) {
2902       if (*JJ >= cstart) break;
2903       JJ++;
2904     }
2905     d = 0;
2906     for (; j<nz; j++) {
2907       if (*JJ++ >= cend) break;
2908       d++;
2909     }
2910     d_nnz[i] = d;
2911     o_nnz[i] = nz - d;
2912   }
2913   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2914   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
2915 
2916   values = (PetscScalar*)V;
2917   if (!values) {
2918     ierr = PetscMalloc1(bs*bs*nz_max,&values);CHKERRQ(ierr);
2919     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2920   }
2921   for (i=0; i<m; i++) {
2922     PetscInt          row    = i + rstart;
2923     PetscInt          ncols  = ii[i+1] - ii[i];
2924     const PetscInt    *icols = jj + ii[i];
2925     if (!roworiented) {         /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2926       const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2927       ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2928     } else {                    /* block ordering does not match so we can only insert one block at a time. */
2929       PetscInt j;
2930       for (j=0; j<ncols; j++) {
2931         const PetscScalar *svals = values + (V ? (bs*bs*(ii[i]+j)) : 0);
2932         ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,1,&icols[j],svals,INSERT_VALUES);CHKERRQ(ierr);
2933       }
2934     }
2935   }
2936 
2937   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2938   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2939   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2940   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2941   PetscFunctionReturn(0);
2942 }
2943 
2944 #undef __FUNCT__
2945 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR"
2946 /*@C
2947    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2948    (the default parallel PETSc format).
2949 
2950    Collective on MPI_Comm
2951 
2952    Input Parameters:
2953 +  B - the matrix
2954 .  bs - the block size
2955 .  i - the indices into j for the start of each local row (starts with zero)
2956 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2957 -  v - optional values in the matrix
2958 
2959    Level: developer
2960 
2961    Notes: The order of the entries in values is specified by the MatOption MAT_ROW_ORIENTED.  For example, C programs
2962    may want to use the default MAT_ROW_ORIENTED=PETSC_TRUE and use an array v[nnz][bs][bs] where the second index is
2963    over rows within a block and the last index is over columns within a block row.  Fortran programs will likely set
2964    MAT_ROW_ORIENTED=PETSC_FALSE and use a Fortran array v(bs,bs,nnz) in which the first index is over rows within a
2965    block column and the second index is over columns within a block.
2966 
2967 .keywords: matrix, aij, compressed row, sparse, parallel
2968 
2969 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, MatCreateMPIBAIJWithArrays(), MPIBAIJ
2970 @*/
2971 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2972 {
2973   PetscErrorCode ierr;
2974 
2975   PetscFunctionBegin;
2976   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
2977   PetscValidType(B,1);
2978   PetscValidLogicalCollectiveInt(B,bs,2);
2979   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2980   PetscFunctionReturn(0);
2981 }
2982 
2983 #undef __FUNCT__
2984 #define __FUNCT__ "MatMPIBAIJSetPreallocation_MPIBAIJ"
2985 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
2986 {
2987   Mat_MPIBAIJ    *b;
2988   PetscErrorCode ierr;
2989   PetscInt       i;
2990 
2991   PetscFunctionBegin;
2992   ierr = MatSetBlockSize(B,PetscAbs(bs));CHKERRQ(ierr);
2993   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2994   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2995   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2996 
2997   if (d_nnz) {
2998     for (i=0; i<B->rmap->n/bs; i++) {
2999       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
3000     }
3001   }
3002   if (o_nnz) {
3003     for (i=0; i<B->rmap->n/bs; i++) {
3004       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
3005     }
3006   }
3007 
3008   b      = (Mat_MPIBAIJ*)B->data;
3009   b->bs2 = bs*bs;
3010   b->mbs = B->rmap->n/bs;
3011   b->nbs = B->cmap->n/bs;
3012   b->Mbs = B->rmap->N/bs;
3013   b->Nbs = B->cmap->N/bs;
3014 
3015   for (i=0; i<=b->size; i++) {
3016     b->rangebs[i] = B->rmap->range[i]/bs;
3017   }
3018   b->rstartbs = B->rmap->rstart/bs;
3019   b->rendbs   = B->rmap->rend/bs;
3020   b->cstartbs = B->cmap->rstart/bs;
3021   b->cendbs   = B->cmap->rend/bs;
3022 
3023   if (!B->preallocated) {
3024     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
3025     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
3026     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
3027     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
3028     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
3029     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
3030     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
3031     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
3032     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
3033   }
3034 
3035   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3036   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
3037   B->preallocated = PETSC_TRUE;
3038   PetscFunctionReturn(0);
3039 }
3040 
3041 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
3042 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
3043 
3044 #undef __FUNCT__
3045 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAdj"
3046 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
3047 {
3048   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
3049   PetscErrorCode ierr;
3050   Mat_SeqBAIJ    *d  = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
3051   PetscInt       M   = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
3052   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
3053 
3054   PetscFunctionBegin;
3055   ierr  = PetscMalloc1(M+1,&ii);CHKERRQ(ierr);
3056   ii[0] = 0;
3057   for (i=0; i<M; i++) {
3058     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
3059     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
3060     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
3061     /* remove one from count of matrix has diagonal */
3062     for (j=id[i]; j<id[i+1]; j++) {
3063       if (jd[j] == i) {ii[i+1]--;break;}
3064     }
3065   }
3066   ierr = PetscMalloc1(ii[M],&jj);CHKERRQ(ierr);
3067   cnt  = 0;
3068   for (i=0; i<M; i++) {
3069     for (j=io[i]; j<io[i+1]; j++) {
3070       if (garray[jo[j]] > rstart) break;
3071       jj[cnt++] = garray[jo[j]];
3072     }
3073     for (k=id[i]; k<id[i+1]; k++) {
3074       if (jd[k] != i) {
3075         jj[cnt++] = rstart + jd[k];
3076       }
3077     }
3078     for (; j<io[i+1]; j++) {
3079       jj[cnt++] = garray[jo[j]];
3080     }
3081   }
3082   ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)B),M,B->cmap->N/B->rmap->bs,ii,jj,NULL,adj);CHKERRQ(ierr);
3083   PetscFunctionReturn(0);
3084 }
3085 
3086 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3087 
3088 PETSC_INTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
3089 
3090 #undef __FUNCT__
3091 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAIJ"
3092 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
3093 {
3094   PetscErrorCode ierr;
3095   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
3096   Mat            B;
3097   Mat_MPIAIJ     *b;
3098 
3099   PetscFunctionBegin;
3100   if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix must be assembled");
3101 
3102   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
3103   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
3104   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
3105   ierr = MatSetBlockSizes(B,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr);
3106   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
3107   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
3108   b    = (Mat_MPIAIJ*) B->data;
3109 
3110   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
3111   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
3112   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
3113   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
3114   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
3115   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3116   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3117   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3118   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3119   if (reuse == MAT_INPLACE_MATRIX) {
3120     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
3121   } else {
3122    *newmat = B;
3123   }
3124   PetscFunctionReturn(0);
3125 }
3126 
3127 /*MC
3128    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
3129 
3130    Options Database Keys:
3131 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
3132 . -mat_block_size <bs> - set the blocksize used to store the matrix
3133 - -mat_use_hash_table <fact>
3134 
3135   Level: beginner
3136 
3137 .seealso: MatCreateMPIBAIJ
3138 M*/
3139 
3140 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
3141 
3142 #undef __FUNCT__
3143 #define __FUNCT__ "MatCreate_MPIBAIJ"
3144 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
3145 {
3146   Mat_MPIBAIJ    *b;
3147   PetscErrorCode ierr;
3148   PetscBool      flg = PETSC_FALSE;
3149 
3150   PetscFunctionBegin;
3151   ierr    = PetscNewLog(B,&b);CHKERRQ(ierr);
3152   B->data = (void*)b;
3153 
3154   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
3155   B->assembled = PETSC_FALSE;
3156 
3157   B->insertmode = NOT_SET_VALUES;
3158   ierr          = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
3159   ierr          = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
3160 
3161   /* build local table of row and column ownerships */
3162   ierr = PetscMalloc1(b->size+1,&b->rangebs);CHKERRQ(ierr);
3163 
3164   /* build cache for off array entries formed */
3165   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
3166 
3167   b->donotstash  = PETSC_FALSE;
3168   b->colmap      = NULL;
3169   b->garray      = NULL;
3170   b->roworiented = PETSC_TRUE;
3171 
3172   /* stuff used in block assembly */
3173   b->barray = 0;
3174 
3175   /* stuff used for matrix vector multiply */
3176   b->lvec  = 0;
3177   b->Mvctx = 0;
3178 
3179   /* stuff for MatGetRow() */
3180   b->rowindices   = 0;
3181   b->rowvalues    = 0;
3182   b->getrowactive = PETSC_FALSE;
3183 
3184   /* hash table stuff */
3185   b->ht           = 0;
3186   b->hd           = 0;
3187   b->ht_size      = 0;
3188   b->ht_flag      = PETSC_FALSE;
3189   b->ht_fact      = 0;
3190   b->ht_total_ct  = 0;
3191   b->ht_insert_ct = 0;
3192 
3193   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
3194   b->ijonly = PETSC_FALSE;
3195 
3196 
3197   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
3198   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
3199   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
3200   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
3201   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
3202   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPIBAIJ);CHKERRQ(ierr);
3203   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
3204   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
3205   ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
3206   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSetHashTableFactor_C",MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
3207   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
3208 
3209   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
3210   ierr = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",flg,&flg,NULL);CHKERRQ(ierr);
3211   if (flg) {
3212     PetscReal fact = 1.39;
3213     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
3214     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
3215     if (fact <= 1.0) fact = 1.39;
3216     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
3217     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
3218   }
3219   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3220   PetscFunctionReturn(0);
3221 }
3222 
3223 /*MC
3224    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
3225 
3226    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
3227    and MATMPIBAIJ otherwise.
3228 
3229    Options Database Keys:
3230 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
3231 
3232   Level: beginner
3233 
3234 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3235 M*/
3236 
3237 #undef __FUNCT__
3238 #define __FUNCT__ "MatMPIBAIJSetPreallocation"
3239 /*@C
3240    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
3241    (block compressed row).  For good matrix assembly performance
3242    the user should preallocate the matrix storage by setting the parameters
3243    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3244    performance can be increased by more than a factor of 50.
3245 
3246    Collective on Mat
3247 
3248    Input Parameters:
3249 +  B - the matrix
3250 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3251           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3252 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
3253            submatrix  (same for all local rows)
3254 .  d_nnz - array containing the number of block nonzeros in the various block rows
3255            of the in diagonal portion of the local (possibly different for each block
3256            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
3257            set it even if it is zero.
3258 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
3259            submatrix (same for all local rows).
3260 -  o_nnz - array containing the number of nonzeros in the various block rows of the
3261            off-diagonal portion of the local submatrix (possibly different for
3262            each block row) or NULL.
3263 
3264    If the *_nnz parameter is given then the *_nz parameter is ignored
3265 
3266    Options Database Keys:
3267 +   -mat_block_size - size of the blocks to use
3268 -   -mat_use_hash_table <fact>
3269 
3270    Notes:
3271    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3272    than it must be used on all processors that share the object for that argument.
3273 
3274    Storage Information:
3275    For a square global matrix we define each processor's diagonal portion
3276    to be its local rows and the corresponding columns (a square submatrix);
3277    each processor's off-diagonal portion encompasses the remainder of the
3278    local matrix (a rectangular submatrix).
3279 
3280    The user can specify preallocated storage for the diagonal part of
3281    the local submatrix with either d_nz or d_nnz (not both).  Set
3282    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3283    memory allocation.  Likewise, specify preallocated storage for the
3284    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3285 
3286    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3287    the figure below we depict these three local rows and all columns (0-11).
3288 
3289 .vb
3290            0 1 2 3 4 5 6 7 8 9 10 11
3291           --------------------------
3292    row 3  |o o o d d d o o o o  o  o
3293    row 4  |o o o d d d o o o o  o  o
3294    row 5  |o o o d d d o o o o  o  o
3295           --------------------------
3296 .ve
3297 
3298    Thus, any entries in the d locations are stored in the d (diagonal)
3299    submatrix, and any entries in the o locations are stored in the
3300    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3301    stored simply in the MATSEQBAIJ format for compressed row storage.
3302 
3303    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3304    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3305    In general, for PDE problems in which most nonzeros are near the diagonal,
3306    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3307    or you will get TERRIBLE performance; see the users' manual chapter on
3308    matrices.
3309 
3310    You can call MatGetInfo() to get information on how effective the preallocation was;
3311    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3312    You can also run with the option -info and look for messages with the string
3313    malloc in them to see if additional memory allocation was needed.
3314 
3315    Level: intermediate
3316 
3317 .keywords: matrix, block, aij, compressed row, sparse, parallel
3318 
3319 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3320 @*/
3321 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3322 {
3323   PetscErrorCode ierr;
3324 
3325   PetscFunctionBegin;
3326   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3327   PetscValidType(B,1);
3328   PetscValidLogicalCollectiveInt(B,bs,2);
3329   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3330   PetscFunctionReturn(0);
3331 }
3332 
3333 #undef __FUNCT__
3334 #define __FUNCT__ "MatCreateBAIJ"
3335 /*@C
3336    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3337    (block compressed row).  For good matrix assembly performance
3338    the user should preallocate the matrix storage by setting the parameters
3339    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3340    performance can be increased by more than a factor of 50.
3341 
3342    Collective on MPI_Comm
3343 
3344    Input Parameters:
3345 +  comm - MPI communicator
3346 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3347           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3348 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3349            This value should be the same as the local size used in creating the
3350            y vector for the matrix-vector product y = Ax.
3351 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3352            This value should be the same as the local size used in creating the
3353            x vector for the matrix-vector product y = Ax.
3354 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3355 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3356 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3357            submatrix  (same for all local rows)
3358 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3359            of the in diagonal portion of the local (possibly different for each block
3360            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3361            and set it even if it is zero.
3362 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3363            submatrix (same for all local rows).
3364 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3365            off-diagonal portion of the local submatrix (possibly different for
3366            each block row) or NULL.
3367 
3368    Output Parameter:
3369 .  A - the matrix
3370 
3371    Options Database Keys:
3372 +   -mat_block_size - size of the blocks to use
3373 -   -mat_use_hash_table <fact>
3374 
3375    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3376    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3377    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3378 
3379    Notes:
3380    If the *_nnz parameter is given then the *_nz parameter is ignored
3381 
3382    A nonzero block is any block that as 1 or more nonzeros in it
3383 
3384    The user MUST specify either the local or global matrix dimensions
3385    (possibly both).
3386 
3387    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3388    than it must be used on all processors that share the object for that argument.
3389 
3390    Storage Information:
3391    For a square global matrix we define each processor's diagonal portion
3392    to be its local rows and the corresponding columns (a square submatrix);
3393    each processor's off-diagonal portion encompasses the remainder of the
3394    local matrix (a rectangular submatrix).
3395 
3396    The user can specify preallocated storage for the diagonal part of
3397    the local submatrix with either d_nz or d_nnz (not both).  Set
3398    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3399    memory allocation.  Likewise, specify preallocated storage for the
3400    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3401 
3402    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3403    the figure below we depict these three local rows and all columns (0-11).
3404 
3405 .vb
3406            0 1 2 3 4 5 6 7 8 9 10 11
3407           --------------------------
3408    row 3  |o o o d d d o o o o  o  o
3409    row 4  |o o o d d d o o o o  o  o
3410    row 5  |o o o d d d o o o o  o  o
3411           --------------------------
3412 .ve
3413 
3414    Thus, any entries in the d locations are stored in the d (diagonal)
3415    submatrix, and any entries in the o locations are stored in the
3416    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3417    stored simply in the MATSEQBAIJ format for compressed row storage.
3418 
3419    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3420    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3421    In general, for PDE problems in which most nonzeros are near the diagonal,
3422    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3423    or you will get TERRIBLE performance; see the users' manual chapter on
3424    matrices.
3425 
3426    Level: intermediate
3427 
3428 .keywords: matrix, block, aij, compressed row, sparse, parallel
3429 
3430 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3431 @*/
3432 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3433 {
3434   PetscErrorCode ierr;
3435   PetscMPIInt    size;
3436 
3437   PetscFunctionBegin;
3438   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3439   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3440   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3441   if (size > 1) {
3442     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3443     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3444   } else {
3445     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3446     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3447   }
3448   PetscFunctionReturn(0);
3449 }
3450 
3451 #undef __FUNCT__
3452 #define __FUNCT__ "MatDuplicate_MPIBAIJ"
3453 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3454 {
3455   Mat            mat;
3456   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3457   PetscErrorCode ierr;
3458   PetscInt       len=0;
3459 
3460   PetscFunctionBegin;
3461   *newmat = 0;
3462   ierr    = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
3463   ierr    = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3464   ierr    = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3465   ierr    = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3466 
3467   mat->factortype   = matin->factortype;
3468   mat->preallocated = PETSC_TRUE;
3469   mat->assembled    = PETSC_TRUE;
3470   mat->insertmode   = NOT_SET_VALUES;
3471 
3472   a             = (Mat_MPIBAIJ*)mat->data;
3473   mat->rmap->bs = matin->rmap->bs;
3474   a->bs2        = oldmat->bs2;
3475   a->mbs        = oldmat->mbs;
3476   a->nbs        = oldmat->nbs;
3477   a->Mbs        = oldmat->Mbs;
3478   a->Nbs        = oldmat->Nbs;
3479 
3480   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3481   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3482 
3483   a->size         = oldmat->size;
3484   a->rank         = oldmat->rank;
3485   a->donotstash   = oldmat->donotstash;
3486   a->roworiented  = oldmat->roworiented;
3487   a->rowindices   = 0;
3488   a->rowvalues    = 0;
3489   a->getrowactive = PETSC_FALSE;
3490   a->barray       = 0;
3491   a->rstartbs     = oldmat->rstartbs;
3492   a->rendbs       = oldmat->rendbs;
3493   a->cstartbs     = oldmat->cstartbs;
3494   a->cendbs       = oldmat->cendbs;
3495 
3496   /* hash table stuff */
3497   a->ht           = 0;
3498   a->hd           = 0;
3499   a->ht_size      = 0;
3500   a->ht_flag      = oldmat->ht_flag;
3501   a->ht_fact      = oldmat->ht_fact;
3502   a->ht_total_ct  = 0;
3503   a->ht_insert_ct = 0;
3504 
3505   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3506   if (oldmat->colmap) {
3507 #if defined(PETSC_USE_CTABLE)
3508     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3509 #else
3510     ierr = PetscMalloc1(a->Nbs,&a->colmap);CHKERRQ(ierr);
3511     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3512     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3513 #endif
3514   } else a->colmap = 0;
3515 
3516   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3517     ierr = PetscMalloc1(len,&a->garray);CHKERRQ(ierr);
3518     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3519     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3520   } else a->garray = 0;
3521 
3522   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3523   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3524   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
3525   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3526   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
3527 
3528   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3529   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
3530   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3531   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
3532   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3533   *newmat = mat;
3534   PetscFunctionReturn(0);
3535 }
3536 
3537 #undef __FUNCT__
3538 #define __FUNCT__ "MatLoad_MPIBAIJ"
3539 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3540 {
3541   PetscErrorCode ierr;
3542   int            fd;
3543   PetscInt       i,nz,j,rstart,rend;
3544   PetscScalar    *vals,*buf;
3545   MPI_Comm       comm;
3546   MPI_Status     status;
3547   PetscMPIInt    rank,size,maxnz;
3548   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3549   PetscInt       *locrowlens = NULL,*procsnz = NULL,*browners = NULL;
3550   PetscInt       jj,*mycols,*ibuf,bs = newmat->rmap->bs,Mbs,mbs,extra_rows,mmax;
3551   PetscMPIInt    tag    = ((PetscObject)viewer)->tag;
3552   PetscInt       *dlens = NULL,*odlens = NULL,*mask = NULL,*masked1 = NULL,*masked2 = NULL,rowcount,odcount;
3553   PetscInt       dcount,kmax,k,nzcount,tmp,mend;
3554 
3555   PetscFunctionBegin;
3556   /* force binary viewer to load .info file if it has not yet done so */
3557   ierr = PetscViewerSetUp(viewer);CHKERRQ(ierr);
3558   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
3559   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3560   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
3561   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3562   if (bs < 0) bs = 1;
3563 
3564   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3565   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3566   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3567   if (!rank) {
3568     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
3569     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3570     if (header[3] < 0) SETERRQ(PetscObjectComm((PetscObject)newmat),PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format on disk, cannot load as MPIAIJ");
3571   }
3572   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3573   M    = header[1]; N = header[2];
3574 
3575   /* If global sizes are set, check if they are consistent with that given in the file */
3576   if (newmat->rmap->N >= 0 && newmat->rmap->N != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of rows:Matrix in file has (%D) and input matrix has (%D)",newmat->rmap->N,M);
3577   if (newmat->cmap->N >= 0 && newmat->cmap->N != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of cols:Matrix in file has (%D) and input matrix has (%D)",newmat->cmap->N,N);
3578 
3579   if (M != N) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Can only do square matrices");
3580 
3581   /*
3582      This code adds extra rows to make sure the number of rows is
3583      divisible by the blocksize
3584   */
3585   Mbs        = M/bs;
3586   extra_rows = bs - M + bs*Mbs;
3587   if (extra_rows == bs) extra_rows = 0;
3588   else                  Mbs++;
3589   if (extra_rows && !rank) {
3590     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3591   }
3592 
3593   /* determine ownership of all rows */
3594   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3595     mbs = Mbs/size + ((Mbs % size) > rank);
3596     m   = mbs*bs;
3597   } else { /* User set */
3598     m   = newmat->rmap->n;
3599     mbs = m/bs;
3600   }
3601   ierr = PetscMalloc2(size+1,&rowners,size+1,&browners);CHKERRQ(ierr);
3602   ierr = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3603 
3604   /* process 0 needs enough room for process with most rows */
3605   if (!rank) {
3606     mmax = rowners[1];
3607     for (i=2; i<=size; i++) {
3608       mmax = PetscMax(mmax,rowners[i]);
3609     }
3610     mmax*=bs;
3611   } else mmax = -1;             /* unused, but compiler warns anyway */
3612 
3613   rowners[0] = 0;
3614   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
3615   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
3616   rstart = rowners[rank];
3617   rend   = rowners[rank+1];
3618 
3619   /* distribute row lengths to all processors */
3620   ierr = PetscMalloc1(m,&locrowlens);CHKERRQ(ierr);
3621   if (!rank) {
3622     mend = m;
3623     if (size == 1) mend = mend - extra_rows;
3624     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3625     for (j=mend; j<m; j++) locrowlens[j] = 1;
3626     ierr = PetscMalloc1(mmax,&rowlengths);CHKERRQ(ierr);
3627     ierr = PetscCalloc1(size,&procsnz);CHKERRQ(ierr);
3628     for (j=0; j<m; j++) {
3629       procsnz[0] += locrowlens[j];
3630     }
3631     for (i=1; i<size; i++) {
3632       mend = browners[i+1] - browners[i];
3633       if (i == size-1) mend = mend - extra_rows;
3634       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3635       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3636       /* calculate the number of nonzeros on each processor */
3637       for (j=0; j<browners[i+1]-browners[i]; j++) {
3638         procsnz[i] += rowlengths[j];
3639       }
3640       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3641     }
3642     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3643   } else {
3644     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3645   }
3646 
3647   if (!rank) {
3648     /* determine max buffer needed and allocate it */
3649     maxnz = procsnz[0];
3650     for (i=1; i<size; i++) {
3651       maxnz = PetscMax(maxnz,procsnz[i]);
3652     }
3653     ierr = PetscMalloc1(maxnz,&cols);CHKERRQ(ierr);
3654 
3655     /* read in my part of the matrix column indices  */
3656     nz     = procsnz[0];
3657     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3658     mycols = ibuf;
3659     if (size == 1) nz -= extra_rows;
3660     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3661     if (size == 1) {
3662       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
3663     }
3664 
3665     /* read in every ones (except the last) and ship off */
3666     for (i=1; i<size-1; i++) {
3667       nz   = procsnz[i];
3668       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3669       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3670     }
3671     /* read in the stuff for the last proc */
3672     if (size != 1) {
3673       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3674       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3675       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3676       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3677     }
3678     ierr = PetscFree(cols);CHKERRQ(ierr);
3679   } else {
3680     /* determine buffer space needed for message */
3681     nz = 0;
3682     for (i=0; i<m; i++) {
3683       nz += locrowlens[i];
3684     }
3685     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3686     mycols = ibuf;
3687     /* receive message of column indices*/
3688     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3689     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3690     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3691   }
3692 
3693   /* loop over local rows, determining number of off diagonal entries */
3694   ierr     = PetscMalloc2(rend-rstart,&dlens,rend-rstart,&odlens);CHKERRQ(ierr);
3695   ierr     = PetscCalloc3(Mbs,&mask,Mbs,&masked1,Mbs,&masked2);CHKERRQ(ierr);
3696   rowcount = 0; nzcount = 0;
3697   for (i=0; i<mbs; i++) {
3698     dcount  = 0;
3699     odcount = 0;
3700     for (j=0; j<bs; j++) {
3701       kmax = locrowlens[rowcount];
3702       for (k=0; k<kmax; k++) {
3703         tmp = mycols[nzcount++]/bs;
3704         if (!mask[tmp]) {
3705           mask[tmp] = 1;
3706           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3707           else masked1[dcount++] = tmp;
3708         }
3709       }
3710       rowcount++;
3711     }
3712 
3713     dlens[i]  = dcount;
3714     odlens[i] = odcount;
3715 
3716     /* zero out the mask elements we set */
3717     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3718     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3719   }
3720 
3721   ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3722   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3723 
3724   if (!rank) {
3725     ierr = PetscMalloc1(maxnz+1,&buf);CHKERRQ(ierr);
3726     /* read in my part of the matrix numerical values  */
3727     nz     = procsnz[0];
3728     vals   = buf;
3729     mycols = ibuf;
3730     if (size == 1) nz -= extra_rows;
3731     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3732     if (size == 1) {
3733       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
3734     }
3735 
3736     /* insert into matrix */
3737     jj = rstart*bs;
3738     for (i=0; i<m; i++) {
3739       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3740       mycols += locrowlens[i];
3741       vals   += locrowlens[i];
3742       jj++;
3743     }
3744     /* read in other processors (except the last one) and ship out */
3745     for (i=1; i<size-1; i++) {
3746       nz   = procsnz[i];
3747       vals = buf;
3748       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3749       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3750     }
3751     /* the last proc */
3752     if (size != 1) {
3753       nz   = procsnz[i] - extra_rows;
3754       vals = buf;
3755       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3756       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3757       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3758     }
3759     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3760   } else {
3761     /* receive numeric values */
3762     ierr = PetscMalloc1(nz+1,&buf);CHKERRQ(ierr);
3763 
3764     /* receive message of values*/
3765     vals   = buf;
3766     mycols = ibuf;
3767     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3768 
3769     /* insert into matrix */
3770     jj = rstart*bs;
3771     for (i=0; i<m; i++) {
3772       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3773       mycols += locrowlens[i];
3774       vals   += locrowlens[i];
3775       jj++;
3776     }
3777   }
3778   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3779   ierr = PetscFree(buf);CHKERRQ(ierr);
3780   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3781   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3782   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3783   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3784   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3785   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3786   PetscFunctionReturn(0);
3787 }
3788 
3789 #undef __FUNCT__
3790 #define __FUNCT__ "MatMPIBAIJSetHashTableFactor"
3791 /*@
3792    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3793 
3794    Input Parameters:
3795 .  mat  - the matrix
3796 .  fact - factor
3797 
3798    Not Collective, each process can use a different factor
3799 
3800    Level: advanced
3801 
3802   Notes:
3803    This can also be set by the command line option: -mat_use_hash_table <fact>
3804 
3805 .keywords: matrix, hashtable, factor, HT
3806 
3807 .seealso: MatSetOption()
3808 @*/
3809 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3810 {
3811   PetscErrorCode ierr;
3812 
3813   PetscFunctionBegin;
3814   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3815   PetscFunctionReturn(0);
3816 }
3817 
3818 #undef __FUNCT__
3819 #define __FUNCT__ "MatSetHashTableFactor_MPIBAIJ"
3820 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3821 {
3822   Mat_MPIBAIJ *baij;
3823 
3824   PetscFunctionBegin;
3825   baij          = (Mat_MPIBAIJ*)mat->data;
3826   baij->ht_fact = fact;
3827   PetscFunctionReturn(0);
3828 }
3829 
3830 #undef __FUNCT__
3831 #define __FUNCT__ "MatMPIBAIJGetSeqBAIJ"
3832 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3833 {
3834   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
3835 
3836   PetscFunctionBegin;
3837   if (Ad)     *Ad     = a->A;
3838   if (Ao)     *Ao     = a->B;
3839   if (colmap) *colmap = a->garray;
3840   PetscFunctionReturn(0);
3841 }
3842 
3843 /*
3844     Special version for direct calls from Fortran (to eliminate two function call overheads
3845 */
3846 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3847 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3848 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3849 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3850 #endif
3851 
3852 #undef __FUNCT__
3853 #define __FUNCT__ "matmpibiajsetvaluesblocked"
3854 /*@C
3855   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3856 
3857   Collective on Mat
3858 
3859   Input Parameters:
3860 + mat - the matrix
3861 . min - number of input rows
3862 . im - input rows
3863 . nin - number of input columns
3864 . in - input columns
3865 . v - numerical values input
3866 - addvin - INSERT_VALUES or ADD_VALUES
3867 
3868   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3869 
3870   Level: advanced
3871 
3872 .seealso:   MatSetValuesBlocked()
3873 @*/
3874 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3875 {
3876   /* convert input arguments to C version */
3877   Mat        mat  = *matin;
3878   PetscInt   m    = *min, n = *nin;
3879   InsertMode addv = *addvin;
3880 
3881   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
3882   const MatScalar *value;
3883   MatScalar       *barray     = baij->barray;
3884   PetscBool       roworiented = baij->roworiented;
3885   PetscErrorCode  ierr;
3886   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
3887   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
3888   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
3889 
3890   PetscFunctionBegin;
3891   /* tasks normally handled by MatSetValuesBlocked() */
3892   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
3893 #if defined(PETSC_USE_DEBUG)
3894   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
3895   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3896 #endif
3897   if (mat->assembled) {
3898     mat->was_assembled = PETSC_TRUE;
3899     mat->assembled     = PETSC_FALSE;
3900   }
3901   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3902 
3903 
3904   if (!barray) {
3905     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
3906     baij->barray = barray;
3907   }
3908 
3909   if (roworiented) stepval = (n-1)*bs;
3910   else stepval = (m-1)*bs;
3911 
3912   for (i=0; i<m; i++) {
3913     if (im[i] < 0) continue;
3914 #if defined(PETSC_USE_DEBUG)
3915     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
3916 #endif
3917     if (im[i] >= rstart && im[i] < rend) {
3918       row = im[i] - rstart;
3919       for (j=0; j<n; j++) {
3920         /* If NumCol = 1 then a copy is not required */
3921         if ((roworiented) && (n == 1)) {
3922           barray = (MatScalar*)v + i*bs2;
3923         } else if ((!roworiented) && (m == 1)) {
3924           barray = (MatScalar*)v + j*bs2;
3925         } else { /* Here a copy is required */
3926           if (roworiented) {
3927             value = v + i*(stepval+bs)*bs + j*bs;
3928           } else {
3929             value = v + j*(stepval+bs)*bs + i*bs;
3930           }
3931           for (ii=0; ii<bs; ii++,value+=stepval) {
3932             for (jj=0; jj<bs; jj++) {
3933               *barray++ = *value++;
3934             }
3935           }
3936           barray -=bs2;
3937         }
3938 
3939         if (in[j] >= cstart && in[j] < cend) {
3940           col  = in[j] - cstart;
3941           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3942         } else if (in[j] < 0) continue;
3943 #if defined(PETSC_USE_DEBUG)
3944         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
3945 #endif
3946         else {
3947           if (mat->was_assembled) {
3948             if (!baij->colmap) {
3949               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
3950             }
3951 
3952 #if defined(PETSC_USE_DEBUG)
3953 #if defined(PETSC_USE_CTABLE)
3954             { PetscInt data;
3955               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
3956               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3957             }
3958 #else
3959             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3960 #endif
3961 #endif
3962 #if defined(PETSC_USE_CTABLE)
3963             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
3964             col  = (col - 1)/bs;
3965 #else
3966             col = (baij->colmap[in[j]] - 1)/bs;
3967 #endif
3968             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
3969               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
3970               col  =  in[j];
3971             }
3972           } else col = in[j];
3973           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3974         }
3975       }
3976     } else {
3977       if (!baij->donotstash) {
3978         if (roworiented) {
3979           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3980         } else {
3981           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3982         }
3983       }
3984     }
3985   }
3986 
3987   /* task normally handled by MatSetValuesBlocked() */
3988   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3989   PetscFunctionReturn(0);
3990 }
3991 
3992 #undef __FUNCT__
3993 #define __FUNCT__ "MatCreateMPIBAIJWithArrays"
3994 /*@
3995      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
3996          CSR format the local rows.
3997 
3998    Collective on MPI_Comm
3999 
4000    Input Parameters:
4001 +  comm - MPI communicator
4002 .  bs - the block size, only a block size of 1 is supported
4003 .  m - number of local rows (Cannot be PETSC_DECIDE)
4004 .  n - This value should be the same as the local size used in creating the
4005        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
4006        calculated if N is given) For square matrices n is almost always m.
4007 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
4008 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
4009 .   i - row indices
4010 .   j - column indices
4011 -   a - matrix values
4012 
4013    Output Parameter:
4014 .   mat - the matrix
4015 
4016    Level: intermediate
4017 
4018    Notes:
4019        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
4020      thus you CANNOT change the matrix entries by changing the values of a[] after you have
4021      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
4022 
4023      The order of the entries in values is the same as the block compressed sparse row storage format; that is, it is
4024      the same as a three dimensional array in Fortran values(bs,bs,nnz) that contains the first column of the first
4025      block, followed by the second column of the first block etc etc.  That is, the blocks are contiguous in memory
4026      with column-major ordering within blocks.
4027 
4028        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
4029 
4030 .keywords: matrix, aij, compressed row, sparse, parallel
4031 
4032 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
4033           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
4034 @*/
4035 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
4036 {
4037   PetscErrorCode ierr;
4038 
4039   PetscFunctionBegin;
4040   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
4041   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
4042   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
4043   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
4044   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
4045   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
4046   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
4047   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
4048   PetscFunctionReturn(0);
4049 }
4050 
4051 #undef __FUNCT__
4052 #define __FUNCT__ "MatCreateMPIMatConcatenateSeqMat_MPIBAIJ"
4053 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPIBAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
4054 {
4055   PetscErrorCode ierr;
4056   PetscInt       m,N,i,rstart,nnz,Ii,bs,cbs;
4057   PetscInt       *indx;
4058   PetscScalar    *values;
4059 
4060   PetscFunctionBegin;
4061   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
4062   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
4063     Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)inmat->data;
4064     PetscInt       *dnz,*onz,sum,mbs,Nbs;
4065     PetscInt       *bindx,rmax=a->rmax,j;
4066 
4067     ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4068     mbs = m/bs; Nbs = N/cbs;
4069     if (n == PETSC_DECIDE) {
4070       ierr = PetscSplitOwnership(comm,&n,&Nbs);CHKERRQ(ierr);
4071     }
4072     /* Check sum(n) = Nbs */
4073     ierr = MPIU_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4074     if (sum != Nbs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",Nbs);
4075 
4076     ierr    = MPI_Scan(&mbs, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4077     rstart -= mbs;
4078 
4079     ierr = PetscMalloc1(rmax,&bindx);CHKERRQ(ierr);
4080     ierr = MatPreallocateInitialize(comm,mbs,n,dnz,onz);CHKERRQ(ierr);
4081     for (i=0; i<mbs; i++) {
4082       ierr = MatGetRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr); /* non-blocked nnz and indx */
4083       nnz = nnz/bs;
4084       for (j=0; j<nnz; j++) bindx[j] = indx[j*bs]/bs;
4085       ierr = MatPreallocateSet(i+rstart,nnz,bindx,dnz,onz);CHKERRQ(ierr);
4086       ierr = MatRestoreRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr);
4087     }
4088     ierr = PetscFree(bindx);CHKERRQ(ierr);
4089 
4090     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
4091     ierr = MatSetSizes(*outmat,m,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
4092     ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr);
4093     ierr = MatSetType(*outmat,MATMPIBAIJ);CHKERRQ(ierr);
4094     ierr = MatMPIBAIJSetPreallocation(*outmat,bs,0,dnz,0,onz);CHKERRQ(ierr);
4095     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
4096   }
4097 
4098   /* numeric phase */
4099   ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4100   ierr = MatGetOwnershipRange(*outmat,&rstart,NULL);CHKERRQ(ierr);
4101 
4102   for (i=0; i<m; i++) {
4103     ierr = MatGetRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4104     Ii   = i + rstart;
4105     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
4106     ierr = MatRestoreRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4107   }
4108   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4109   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4110   PetscFunctionReturn(0);
4111 }
4112