xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision 4fc747eaadbeca11629f314a99edccbc2ed7b3d3)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 
4 #include <petscblaslapack.h>
5 #include <petscsf.h>
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatGetRowMaxAbs_MPIBAIJ"
9 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
10 {
11   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
12   PetscErrorCode ierr;
13   PetscInt       i,*idxb = 0;
14   PetscScalar    *va,*vb;
15   Vec            vtmp;
16 
17   PetscFunctionBegin;
18   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
19   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
20   if (idx) {
21     for (i=0; i<A->rmap->n; i++) {
22       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
23     }
24   }
25 
26   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
27   if (idx) {ierr = PetscMalloc1(A->rmap->n,&idxb);CHKERRQ(ierr);}
28   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
29   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
30 
31   for (i=0; i<A->rmap->n; i++) {
32     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
33       va[i] = vb[i];
34       if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);
35     }
36   }
37 
38   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
39   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
40   ierr = PetscFree(idxb);CHKERRQ(ierr);
41   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "MatStoreValues_MPIBAIJ"
47 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
48 {
49   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
50   PetscErrorCode ierr;
51 
52   PetscFunctionBegin;
53   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
54   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatRetrieveValues_MPIBAIJ"
60 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
61 {
62   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
67   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
68   PetscFunctionReturn(0);
69 }
70 
71 /*
72      Local utility routine that creates a mapping from the global column
73    number to the local number in the off-diagonal part of the local
74    storage of the matrix.  This is done in a non scalable way since the
75    length of colmap equals the global matrix length.
76 */
77 #undef __FUNCT__
78 #define __FUNCT__ "MatCreateColmap_MPIBAIJ_Private"
79 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
80 {
81   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
82   Mat_SeqBAIJ    *B    = (Mat_SeqBAIJ*)baij->B->data;
83   PetscErrorCode ierr;
84   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
85 
86   PetscFunctionBegin;
87 #if defined(PETSC_USE_CTABLE)
88   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
89   for (i=0; i<nbs; i++) {
90     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
91   }
92 #else
93   ierr = PetscMalloc1(baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
94   ierr = PetscLogObjectMemory((PetscObject)mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
95   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
96   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
97 #endif
98   PetscFunctionReturn(0);
99 }
100 
101 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,orow,ocol)       \
102   { \
103  \
104     brow = row/bs;  \
105     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
106     rmax = aimax[brow]; nrow = ailen[brow]; \
107     bcol = col/bs; \
108     ridx = row % bs; cidx = col % bs; \
109     low  = 0; high = nrow; \
110     while (high-low > 3) { \
111       t = (low+high)/2; \
112       if (rp[t] > bcol) high = t; \
113       else              low  = t; \
114     } \
115     for (_i=low; _i<high; _i++) { \
116       if (rp[_i] > bcol) break; \
117       if (rp[_i] == bcol) { \
118         bap = ap +  bs2*_i + bs*cidx + ridx; \
119         if (addv == ADD_VALUES) *bap += value;  \
120         else                    *bap  = value;  \
121         goto a_noinsert; \
122       } \
123     } \
124     if (a->nonew == 1) goto a_noinsert; \
125     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column (%D, %D) into matrix", orow, ocol); \
126     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
127     N = nrow++ - 1;  \
128     /* shift up all the later entries in this row */ \
129     for (ii=N; ii>=_i; ii--) { \
130       rp[ii+1] = rp[ii]; \
131       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
132     } \
133     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
134     rp[_i]                      = bcol;  \
135     ap[bs2*_i + bs*cidx + ridx] = value;  \
136 a_noinsert:; \
137     ailen[brow] = nrow; \
138   }
139 
140 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,orow,ocol)       \
141   { \
142     brow = row/bs;  \
143     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
144     rmax = bimax[brow]; nrow = bilen[brow]; \
145     bcol = col/bs; \
146     ridx = row % bs; cidx = col % bs; \
147     low  = 0; high = nrow; \
148     while (high-low > 3) { \
149       t = (low+high)/2; \
150       if (rp[t] > bcol) high = t; \
151       else              low  = t; \
152     } \
153     for (_i=low; _i<high; _i++) { \
154       if (rp[_i] > bcol) break; \
155       if (rp[_i] == bcol) { \
156         bap = ap +  bs2*_i + bs*cidx + ridx; \
157         if (addv == ADD_VALUES) *bap += value;  \
158         else                    *bap  = value;  \
159         goto b_noinsert; \
160       } \
161     } \
162     if (b->nonew == 1) goto b_noinsert; \
163     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column  (%D, %D) into matrix", orow, ocol); \
164     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
165     N = nrow++ - 1;  \
166     /* shift up all the later entries in this row */ \
167     for (ii=N; ii>=_i; ii--) { \
168       rp[ii+1] = rp[ii]; \
169       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
170     } \
171     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
172     rp[_i]                      = bcol;  \
173     ap[bs2*_i + bs*cidx + ridx] = value;  \
174 b_noinsert:; \
175     bilen[brow] = nrow; \
176   }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "MatSetValues_MPIBAIJ"
180 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
181 {
182   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
183   MatScalar      value;
184   PetscBool      roworiented = baij->roworiented;
185   PetscErrorCode ierr;
186   PetscInt       i,j,row,col;
187   PetscInt       rstart_orig=mat->rmap->rstart;
188   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
189   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
190 
191   /* Some Variables required in the macro */
192   Mat         A     = baij->A;
193   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ*)(A)->data;
194   PetscInt    *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
195   MatScalar   *aa   =a->a;
196 
197   Mat         B     = baij->B;
198   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
199   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
200   MatScalar   *ba   =b->a;
201 
202   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
203   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
204   MatScalar *ap,*bap;
205 
206   PetscFunctionBegin;
207   for (i=0; i<m; i++) {
208     if (im[i] < 0) continue;
209 #if defined(PETSC_USE_DEBUG)
210     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
211 #endif
212     if (im[i] >= rstart_orig && im[i] < rend_orig) {
213       row = im[i] - rstart_orig;
214       for (j=0; j<n; j++) {
215         if (in[j] >= cstart_orig && in[j] < cend_orig) {
216           col = in[j] - cstart_orig;
217           if (roworiented) value = v[i*n+j];
218           else             value = v[i+j*m];
219           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,im[i],in[j]);
220           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
221         } else if (in[j] < 0) continue;
222 #if defined(PETSC_USE_DEBUG)
223         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
224 #endif
225         else {
226           if (mat->was_assembled) {
227             if (!baij->colmap) {
228               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
229             }
230 #if defined(PETSC_USE_CTABLE)
231             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
232             col  = col - 1;
233 #else
234             col = baij->colmap[in[j]/bs] - 1;
235 #endif
236             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
237               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
238               col  =  in[j];
239               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
240               B    = baij->B;
241               b    = (Mat_SeqBAIJ*)(B)->data;
242               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
243               ba   =b->a;
244             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
245             else col += in[j]%bs;
246           } else col = in[j];
247           if (roworiented) value = v[i*n+j];
248           else             value = v[i+j*m];
249           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,im[i],in[j]);
250           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
251         }
252       }
253     } else {
254       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
255       if (!baij->donotstash) {
256         mat->assembled = PETSC_FALSE;
257         if (roworiented) {
258           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
259         } else {
260           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
261         }
262       }
263     }
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "MatSetValuesBlocked_SeqBAIJ_Inlined"
270 PETSC_STATIC_INLINE PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A,PetscInt row,PetscInt col,const PetscScalar v[],InsertMode is,PetscInt orow,PetscInt ocol)
271 {
272   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ*)A->data;
273   PetscInt          *rp,low,high,t,ii,jj,nrow,i,rmax,N;
274   PetscInt          *imax=a->imax,*ai=a->i,*ailen=a->ilen;
275   PetscErrorCode    ierr;
276   PetscInt          *aj        =a->j,nonew=a->nonew,bs2=a->bs2,bs=A->rmap->bs;
277   PetscBool         roworiented=a->roworiented;
278   const PetscScalar *value     = v;
279   MatScalar         *ap,*aa = a->a,*bap;
280 
281   PetscFunctionBegin;
282   rp   = aj + ai[row];
283   ap   = aa + bs2*ai[row];
284   rmax = imax[row];
285   nrow = ailen[row];
286   value = v;
287   low = 0;
288   high = nrow;
289   while (high-low > 7) {
290     t = (low+high)/2;
291     if (rp[t] > col) high = t;
292     else             low  = t;
293   }
294   for (i=low; i<high; i++) {
295     if (rp[i] > col) break;
296     if (rp[i] == col) {
297       bap = ap +  bs2*i;
298       if (roworiented) {
299         if (is == ADD_VALUES) {
300           for (ii=0; ii<bs; ii++) {
301             for (jj=ii; jj<bs2; jj+=bs) {
302               bap[jj] += *value++;
303             }
304           }
305         } else {
306           for (ii=0; ii<bs; ii++) {
307             for (jj=ii; jj<bs2; jj+=bs) {
308               bap[jj] = *value++;
309             }
310           }
311         }
312       } else {
313         if (is == ADD_VALUES) {
314           for (ii=0; ii<bs; ii++,value+=bs) {
315             for (jj=0; jj<bs; jj++) {
316               bap[jj] += value[jj];
317             }
318             bap += bs;
319           }
320         } else {
321           for (ii=0; ii<bs; ii++,value+=bs) {
322             for (jj=0; jj<bs; jj++) {
323               bap[jj]  = value[jj];
324             }
325             bap += bs;
326           }
327         }
328       }
329       goto noinsert2;
330     }
331   }
332   if (nonew == 1) goto noinsert2;
333   if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new global block indexed nonzero block (%D, %D) in the matrix", orow, ocol);
334   MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,row,col,rmax,aa,ai,aj,rp,ap,imax,nonew,MatScalar);
335   N = nrow++ - 1; high++;
336   /* shift up all the later entries in this row */
337   for (ii=N; ii>=i; ii--) {
338     rp[ii+1] = rp[ii];
339     ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr);
340   }
341   if (N >= i) {
342     ierr = PetscMemzero(ap+bs2*i,bs2*sizeof(MatScalar));CHKERRQ(ierr);
343   }
344   rp[i] = col;
345   bap   = ap +  bs2*i;
346   if (roworiented) {
347     for (ii=0; ii<bs; ii++) {
348       for (jj=ii; jj<bs2; jj+=bs) {
349         bap[jj] = *value++;
350       }
351     }
352   } else {
353     for (ii=0; ii<bs; ii++) {
354       for (jj=0; jj<bs; jj++) {
355         *bap++ = *value++;
356       }
357     }
358   }
359   noinsert2:;
360   ailen[row] = nrow;
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ"
366 /*
367     This routine should be optimized so that the block copy at ** Here a copy is required ** below is not needed
368     by passing additional stride information into the MatSetValuesBlocked_SeqBAIJ_Inlined() routine
369 */
370 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
371 {
372   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
373   const PetscScalar *value;
374   MatScalar         *barray     = baij->barray;
375   PetscBool         roworiented = baij->roworiented;
376   PetscErrorCode    ierr;
377   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
378   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
379   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
380 
381   PetscFunctionBegin;
382   if (!barray) {
383     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
384     baij->barray = barray;
385   }
386 
387   if (roworiented) stepval = (n-1)*bs;
388   else stepval = (m-1)*bs;
389 
390   for (i=0; i<m; i++) {
391     if (im[i] < 0) continue;
392 #if defined(PETSC_USE_DEBUG)
393     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed row too large %D max %D",im[i],baij->Mbs-1);
394 #endif
395     if (im[i] >= rstart && im[i] < rend) {
396       row = im[i] - rstart;
397       for (j=0; j<n; j++) {
398         /* If NumCol = 1 then a copy is not required */
399         if ((roworiented) && (n == 1)) {
400           barray = (MatScalar*)v + i*bs2;
401         } else if ((!roworiented) && (m == 1)) {
402           barray = (MatScalar*)v + j*bs2;
403         } else { /* Here a copy is required */
404           if (roworiented) {
405             value = v + (i*(stepval+bs) + j)*bs;
406           } else {
407             value = v + (j*(stepval+bs) + i)*bs;
408           }
409           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
410             for (jj=0; jj<bs; jj++) barray[jj] = value[jj];
411             barray += bs;
412           }
413           barray -= bs2;
414         }
415 
416         if (in[j] >= cstart && in[j] < cend) {
417           col  = in[j] - cstart;
418           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
419         } else if (in[j] < 0) continue;
420 #if defined(PETSC_USE_DEBUG)
421         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed column too large %D max %D",in[j],baij->Nbs-1);
422 #endif
423         else {
424           if (mat->was_assembled) {
425             if (!baij->colmap) {
426               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
427             }
428 
429 #if defined(PETSC_USE_DEBUG)
430 #if defined(PETSC_USE_CTABLE)
431             { PetscInt data;
432               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
433               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
434             }
435 #else
436             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
437 #endif
438 #endif
439 #if defined(PETSC_USE_CTABLE)
440             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
441             col  = (col - 1)/bs;
442 #else
443             col = (baij->colmap[in[j]] - 1)/bs;
444 #endif
445             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
446               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
447               col  =  in[j];
448             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new blocked indexed nonzero block (%D, %D) into matrix",im[i],in[j]);
449           } else col = in[j];
450           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
451         }
452       }
453     } else {
454       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process block indexed row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
455       if (!baij->donotstash) {
456         if (roworiented) {
457           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
458         } else {
459           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
460         }
461       }
462     }
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #define HASH_KEY 0.6180339887
468 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
469 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
470 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
471 #undef __FUNCT__
472 #define __FUNCT__ "MatSetValues_MPIBAIJ_HT"
473 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
474 {
475   Mat_MPIBAIJ    *baij       = (Mat_MPIBAIJ*)mat->data;
476   PetscBool      roworiented = baij->roworiented;
477   PetscErrorCode ierr;
478   PetscInt       i,j,row,col;
479   PetscInt       rstart_orig=mat->rmap->rstart;
480   PetscInt       rend_orig  =mat->rmap->rend,Nbs=baij->Nbs;
481   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
482   PetscReal      tmp;
483   MatScalar      **HD = baij->hd,value;
484 #if defined(PETSC_USE_DEBUG)
485   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
486 #endif
487 
488   PetscFunctionBegin;
489   for (i=0; i<m; i++) {
490 #if defined(PETSC_USE_DEBUG)
491     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
492     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
493 #endif
494     row = im[i];
495     if (row >= rstart_orig && row < rend_orig) {
496       for (j=0; j<n; j++) {
497         col = in[j];
498         if (roworiented) value = v[i*n+j];
499         else             value = v[i+j*m];
500         /* Look up PetscInto the Hash Table */
501         key = (row/bs)*Nbs+(col/bs)+1;
502         h1  = HASH(size,key,tmp);
503 
504 
505         idx = h1;
506 #if defined(PETSC_USE_DEBUG)
507         insert_ct++;
508         total_ct++;
509         if (HT[idx] != key) {
510           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
511           if (idx == size) {
512             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
513             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
514           }
515         }
516 #else
517         if (HT[idx] != key) {
518           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
519           if (idx == size) {
520             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
521             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
522           }
523         }
524 #endif
525         /* A HASH table entry is found, so insert the values at the correct address */
526         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
527         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
528       }
529     } else if (!baij->donotstash) {
530       if (roworiented) {
531         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
532       } else {
533         ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
534       }
535     }
536   }
537 #if defined(PETSC_USE_DEBUG)
538   baij->ht_total_ct  = total_ct;
539   baij->ht_insert_ct = insert_ct;
540 #endif
541   PetscFunctionReturn(0);
542 }
543 
544 #undef __FUNCT__
545 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ_HT"
546 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
547 {
548   Mat_MPIBAIJ       *baij       = (Mat_MPIBAIJ*)mat->data;
549   PetscBool         roworiented = baij->roworiented;
550   PetscErrorCode    ierr;
551   PetscInt          i,j,ii,jj,row,col;
552   PetscInt          rstart=baij->rstartbs;
553   PetscInt          rend  =mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
554   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
555   PetscReal         tmp;
556   MatScalar         **HD = baij->hd,*baij_a;
557   const PetscScalar *v_t,*value;
558 #if defined(PETSC_USE_DEBUG)
559   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
560 #endif
561 
562   PetscFunctionBegin;
563   if (roworiented) stepval = (n-1)*bs;
564   else stepval = (m-1)*bs;
565 
566   for (i=0; i<m; i++) {
567 #if defined(PETSC_USE_DEBUG)
568     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
569     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
570 #endif
571     row = im[i];
572     v_t = v + i*nbs2;
573     if (row >= rstart && row < rend) {
574       for (j=0; j<n; j++) {
575         col = in[j];
576 
577         /* Look up into the Hash Table */
578         key = row*Nbs+col+1;
579         h1  = HASH(size,key,tmp);
580 
581         idx = h1;
582 #if defined(PETSC_USE_DEBUG)
583         total_ct++;
584         insert_ct++;
585         if (HT[idx] != key) {
586           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
587           if (idx == size) {
588             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
589             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
590           }
591         }
592 #else
593         if (HT[idx] != key) {
594           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
595           if (idx == size) {
596             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
597             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
598           }
599         }
600 #endif
601         baij_a = HD[idx];
602         if (roworiented) {
603           /*value = v + i*(stepval+bs)*bs + j*bs;*/
604           /* value = v + (i*(stepval+bs)+j)*bs; */
605           value = v_t;
606           v_t  += bs;
607           if (addv == ADD_VALUES) {
608             for (ii=0; ii<bs; ii++,value+=stepval) {
609               for (jj=ii; jj<bs2; jj+=bs) {
610                 baij_a[jj] += *value++;
611               }
612             }
613           } else {
614             for (ii=0; ii<bs; ii++,value+=stepval) {
615               for (jj=ii; jj<bs2; jj+=bs) {
616                 baij_a[jj] = *value++;
617               }
618             }
619           }
620         } else {
621           value = v + j*(stepval+bs)*bs + i*bs;
622           if (addv == ADD_VALUES) {
623             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
624               for (jj=0; jj<bs; jj++) {
625                 baij_a[jj] += *value++;
626               }
627             }
628           } else {
629             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
630               for (jj=0; jj<bs; jj++) {
631                 baij_a[jj] = *value++;
632               }
633             }
634           }
635         }
636       }
637     } else {
638       if (!baij->donotstash) {
639         if (roworiented) {
640           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
641         } else {
642           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
643         }
644       }
645     }
646   }
647 #if defined(PETSC_USE_DEBUG)
648   baij->ht_total_ct  = total_ct;
649   baij->ht_insert_ct = insert_ct;
650 #endif
651   PetscFunctionReturn(0);
652 }
653 
654 #undef __FUNCT__
655 #define __FUNCT__ "MatGetValues_MPIBAIJ"
656 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
657 {
658   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
659   PetscErrorCode ierr;
660   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
661   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
662 
663   PetscFunctionBegin;
664   for (i=0; i<m; i++) {
665     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
666     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
667     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
668       row = idxm[i] - bsrstart;
669       for (j=0; j<n; j++) {
670         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
671         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
672         if (idxn[j] >= bscstart && idxn[j] < bscend) {
673           col  = idxn[j] - bscstart;
674           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
675         } else {
676           if (!baij->colmap) {
677             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
678           }
679 #if defined(PETSC_USE_CTABLE)
680           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
681           data--;
682 #else
683           data = baij->colmap[idxn[j]/bs]-1;
684 #endif
685           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
686           else {
687             col  = data + idxn[j]%bs;
688             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
689           }
690         }
691       }
692     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
693   }
694   PetscFunctionReturn(0);
695 }
696 
697 #undef __FUNCT__
698 #define __FUNCT__ "MatNorm_MPIBAIJ"
699 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
700 {
701   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
702   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
703   PetscErrorCode ierr;
704   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
705   PetscReal      sum = 0.0;
706   MatScalar      *v;
707 
708   PetscFunctionBegin;
709   if (baij->size == 1) {
710     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
711   } else {
712     if (type == NORM_FROBENIUS) {
713       v  = amat->a;
714       nz = amat->nz*bs2;
715       for (i=0; i<nz; i++) {
716         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
717       }
718       v  = bmat->a;
719       nz = bmat->nz*bs2;
720       for (i=0; i<nz; i++) {
721         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
722       }
723       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
724       *nrm = PetscSqrtReal(*nrm);
725     } else if (type == NORM_1) { /* max column sum */
726       PetscReal *tmp,*tmp2;
727       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
728       ierr = PetscMalloc2(mat->cmap->N,&tmp,mat->cmap->N,&tmp2);CHKERRQ(ierr);
729       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
730       v    = amat->a; jj = amat->j;
731       for (i=0; i<amat->nz; i++) {
732         for (j=0; j<bs; j++) {
733           col = bs*(cstart + *jj) + j; /* column index */
734           for (row=0; row<bs; row++) {
735             tmp[col] += PetscAbsScalar(*v);  v++;
736           }
737         }
738         jj++;
739       }
740       v = bmat->a; jj = bmat->j;
741       for (i=0; i<bmat->nz; i++) {
742         for (j=0; j<bs; j++) {
743           col = bs*garray[*jj] + j;
744           for (row=0; row<bs; row++) {
745             tmp[col] += PetscAbsScalar(*v); v++;
746           }
747         }
748         jj++;
749       }
750       ierr = MPIU_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
751       *nrm = 0.0;
752       for (j=0; j<mat->cmap->N; j++) {
753         if (tmp2[j] > *nrm) *nrm = tmp2[j];
754       }
755       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
756     } else if (type == NORM_INFINITY) { /* max row sum */
757       PetscReal *sums;
758       ierr = PetscMalloc1(bs,&sums);CHKERRQ(ierr);
759       sum  = 0.0;
760       for (j=0; j<amat->mbs; j++) {
761         for (row=0; row<bs; row++) sums[row] = 0.0;
762         v  = amat->a + bs2*amat->i[j];
763         nz = amat->i[j+1]-amat->i[j];
764         for (i=0; i<nz; i++) {
765           for (col=0; col<bs; col++) {
766             for (row=0; row<bs; row++) {
767               sums[row] += PetscAbsScalar(*v); v++;
768             }
769           }
770         }
771         v  = bmat->a + bs2*bmat->i[j];
772         nz = bmat->i[j+1]-bmat->i[j];
773         for (i=0; i<nz; i++) {
774           for (col=0; col<bs; col++) {
775             for (row=0; row<bs; row++) {
776               sums[row] += PetscAbsScalar(*v); v++;
777             }
778           }
779         }
780         for (row=0; row<bs; row++) {
781           if (sums[row] > sum) sum = sums[row];
782         }
783       }
784       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
785       ierr = PetscFree(sums);CHKERRQ(ierr);
786     } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for this norm yet");
787   }
788   PetscFunctionReturn(0);
789 }
790 
791 /*
792   Creates the hash table, and sets the table
793   This table is created only once.
794   If new entried need to be added to the matrix
795   then the hash table has to be destroyed and
796   recreated.
797 */
798 #undef __FUNCT__
799 #define __FUNCT__ "MatCreateHashTable_MPIBAIJ_Private"
800 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
801 {
802   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
803   Mat            A     = baij->A,B=baij->B;
804   Mat_SeqBAIJ    *a    = (Mat_SeqBAIJ*)A->data,*b=(Mat_SeqBAIJ*)B->data;
805   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
806   PetscErrorCode ierr;
807   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
808   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
809   PetscInt       *HT,key;
810   MatScalar      **HD;
811   PetscReal      tmp;
812 #if defined(PETSC_USE_INFO)
813   PetscInt ct=0,max=0;
814 #endif
815 
816   PetscFunctionBegin;
817   if (baij->ht) PetscFunctionReturn(0);
818 
819   baij->ht_size = (PetscInt)(factor*nz);
820   ht_size       = baij->ht_size;
821 
822   /* Allocate Memory for Hash Table */
823   ierr = PetscCalloc2(ht_size,&baij->hd,ht_size,&baij->ht);CHKERRQ(ierr);
824   HD   = baij->hd;
825   HT   = baij->ht;
826 
827   /* Loop Over A */
828   for (i=0; i<a->mbs; i++) {
829     for (j=ai[i]; j<ai[i+1]; j++) {
830       row = i+rstart;
831       col = aj[j]+cstart;
832 
833       key = row*Nbs + col + 1;
834       h1  = HASH(ht_size,key,tmp);
835       for (k=0; k<ht_size; k++) {
836         if (!HT[(h1+k)%ht_size]) {
837           HT[(h1+k)%ht_size] = key;
838           HD[(h1+k)%ht_size] = a->a + j*bs2;
839           break;
840 #if defined(PETSC_USE_INFO)
841         } else {
842           ct++;
843 #endif
844         }
845       }
846 #if defined(PETSC_USE_INFO)
847       if (k> max) max = k;
848 #endif
849     }
850   }
851   /* Loop Over B */
852   for (i=0; i<b->mbs; i++) {
853     for (j=bi[i]; j<bi[i+1]; j++) {
854       row = i+rstart;
855       col = garray[bj[j]];
856       key = row*Nbs + col + 1;
857       h1  = HASH(ht_size,key,tmp);
858       for (k=0; k<ht_size; k++) {
859         if (!HT[(h1+k)%ht_size]) {
860           HT[(h1+k)%ht_size] = key;
861           HD[(h1+k)%ht_size] = b->a + j*bs2;
862           break;
863 #if defined(PETSC_USE_INFO)
864         } else {
865           ct++;
866 #endif
867         }
868       }
869 #if defined(PETSC_USE_INFO)
870       if (k> max) max = k;
871 #endif
872     }
873   }
874 
875   /* Print Summary */
876 #if defined(PETSC_USE_INFO)
877   for (i=0,j=0; i<ht_size; i++) {
878     if (HT[i]) j++;
879   }
880   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
881 #endif
882   PetscFunctionReturn(0);
883 }
884 
885 #undef __FUNCT__
886 #define __FUNCT__ "MatAssemblyBegin_MPIBAIJ"
887 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
888 {
889   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
890   PetscErrorCode ierr;
891   PetscInt       nstash,reallocs;
892 
893   PetscFunctionBegin;
894   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
895 
896   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
897   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
898   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
899   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
900   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
901   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "MatAssemblyEnd_MPIBAIJ"
907 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
908 {
909   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
910   Mat_SeqBAIJ    *a   =(Mat_SeqBAIJ*)baij->A->data;
911   PetscErrorCode ierr;
912   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
913   PetscInt       *row,*col;
914   PetscBool      r1,r2,r3,other_disassembled;
915   MatScalar      *val;
916   PetscMPIInt    n;
917 
918   PetscFunctionBegin;
919   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
920   if (!baij->donotstash && !mat->nooffprocentries) {
921     while (1) {
922       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
923       if (!flg) break;
924 
925       for (i=0; i<n;) {
926         /* Now identify the consecutive vals belonging to the same row */
927         for (j=i,rstart=row[j]; j<n; j++) {
928           if (row[j] != rstart) break;
929         }
930         if (j < n) ncols = j-i;
931         else       ncols = n-i;
932         /* Now assemble all these values with a single function call */
933         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,mat->insertmode);CHKERRQ(ierr);
934         i    = j;
935       }
936     }
937     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
938     /* Now process the block-stash. Since the values are stashed column-oriented,
939        set the roworiented flag to column oriented, and after MatSetValues()
940        restore the original flags */
941     r1 = baij->roworiented;
942     r2 = a->roworiented;
943     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
944 
945     baij->roworiented = PETSC_FALSE;
946     a->roworiented    = PETSC_FALSE;
947 
948     (((Mat_SeqBAIJ*)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
949     while (1) {
950       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
951       if (!flg) break;
952 
953       for (i=0; i<n;) {
954         /* Now identify the consecutive vals belonging to the same row */
955         for (j=i,rstart=row[j]; j<n; j++) {
956           if (row[j] != rstart) break;
957         }
958         if (j < n) ncols = j-i;
959         else       ncols = n-i;
960         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,mat->insertmode);CHKERRQ(ierr);
961         i    = j;
962       }
963     }
964     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
965 
966     baij->roworiented = r1;
967     a->roworiented    = r2;
968 
969     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworiented */
970   }
971 
972   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
973   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
974 
975   /* determine if any processor has disassembled, if so we must
976      also disassemble ourselfs, in order that we may reassemble. */
977   /*
978      if nonzero structure of submatrix B cannot change then we know that
979      no processor disassembled thus we can skip this stuff
980   */
981   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
982     ierr = MPIU_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
983     if (mat->was_assembled && !other_disassembled) {
984       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
985     }
986   }
987 
988   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
989     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
990   }
991   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
992   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
993 
994 #if defined(PETSC_USE_INFO)
995   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
996     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
997 
998     baij->ht_total_ct  = 0;
999     baij->ht_insert_ct = 0;
1000   }
1001 #endif
1002   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
1003     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
1004 
1005     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
1006     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
1007   }
1008 
1009   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1010 
1011   baij->rowvalues = 0;
1012 
1013   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
1014   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
1015     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
1016     ierr = MPIU_Allreduce(&state,&mat->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1017   }
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 extern PetscErrorCode MatView_SeqBAIJ(Mat,PetscViewer);
1022 #include <petscdraw.h>
1023 #undef __FUNCT__
1024 #define __FUNCT__ "MatView_MPIBAIJ_ASCIIorDraworSocket"
1025 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1026 {
1027   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
1028   PetscErrorCode    ierr;
1029   PetscMPIInt       rank = baij->rank;
1030   PetscInt          bs   = mat->rmap->bs;
1031   PetscBool         iascii,isdraw;
1032   PetscViewer       sviewer;
1033   PetscViewerFormat format;
1034 
1035   PetscFunctionBegin;
1036   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1037   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1038   if (iascii) {
1039     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1040     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1041       MatInfo info;
1042       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1043       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1044       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1045       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
1046                                                 rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
1047       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
1048       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1049       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
1050       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1051       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1052       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1053       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1054       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
1055       PetscFunctionReturn(0);
1056     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1057       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
1058       PetscFunctionReturn(0);
1059     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
1060       PetscFunctionReturn(0);
1061     }
1062   }
1063 
1064   if (isdraw) {
1065     PetscDraw draw;
1066     PetscBool isnull;
1067     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1068     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr);
1069     if (isnull) PetscFunctionReturn(0);
1070   }
1071 
1072   {
1073     /* assemble the entire matrix onto first processor. */
1074     Mat         A;
1075     Mat_SeqBAIJ *Aloc;
1076     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
1077     MatScalar   *a;
1078     const char  *matname;
1079 
1080     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
1081     /* Perhaps this should be the type of mat? */
1082     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
1083     if (!rank) {
1084       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
1085     } else {
1086       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
1087     }
1088     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1089     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1090     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1091     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
1092 
1093     /* copy over the A part */
1094     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1095     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1096     ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1097 
1098     for (i=0; i<mbs; i++) {
1099       rvals[0] = bs*(baij->rstartbs + i);
1100       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1101       for (j=ai[i]; j<ai[i+1]; j++) {
1102         col = (baij->cstartbs+aj[j])*bs;
1103         for (k=0; k<bs; k++) {
1104           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1105           col++; a += bs;
1106         }
1107       }
1108     }
1109     /* copy over the B part */
1110     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1111     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1112     for (i=0; i<mbs; i++) {
1113       rvals[0] = bs*(baij->rstartbs + i);
1114       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1115       for (j=ai[i]; j<ai[i+1]; j++) {
1116         col = baij->garray[aj[j]]*bs;
1117         for (k=0; k<bs; k++) {
1118           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1119           col++; a += bs;
1120         }
1121       }
1122     }
1123     ierr = PetscFree(rvals);CHKERRQ(ierr);
1124     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1125     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1126     /*
1127        Everyone has to call to draw the matrix since the graphics waits are
1128        synchronized across all processors that share the PetscDraw object
1129     */
1130     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1131     ierr = PetscObjectGetName((PetscObject)mat,&matname);CHKERRQ(ierr);
1132     if (!rank) {
1133       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,matname);CHKERRQ(ierr);
1134       ierr = MatView_SeqBAIJ(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1135     }
1136     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1137     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1138     ierr = MatDestroy(&A);CHKERRQ(ierr);
1139   }
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 #undef __FUNCT__
1144 #define __FUNCT__ "MatView_MPIBAIJ_Binary"
1145 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1146 {
1147   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1148   Mat_SeqBAIJ    *A = (Mat_SeqBAIJ*)a->A->data;
1149   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)a->B->data;
1150   PetscErrorCode ierr;
1151   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1152   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1153   int            fd;
1154   PetscScalar    *column_values;
1155   FILE           *file;
1156   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1157   PetscInt       message_count,flowcontrolcount;
1158 
1159   PetscFunctionBegin;
1160   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1161   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
1162   nz   = bs2*(A->nz + B->nz);
1163   rlen = mat->rmap->n;
1164   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1165   if (!rank) {
1166     header[0] = MAT_FILE_CLASSID;
1167     header[1] = mat->rmap->N;
1168     header[2] = mat->cmap->N;
1169 
1170     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1171     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1172     /* get largest number of rows any processor has */
1173     range = mat->rmap->range;
1174     for (i=1; i<size; i++) {
1175       rlen = PetscMax(rlen,range[i+1] - range[i]);
1176     }
1177   } else {
1178     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1179   }
1180 
1181   ierr = PetscMalloc1(rlen/bs,&crow_lens);CHKERRQ(ierr);
1182   /* compute lengths of each row  */
1183   for (i=0; i<a->mbs; i++) {
1184     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1185   }
1186   /* store the row lengths to the file */
1187   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1188   if (!rank) {
1189     MPI_Status status;
1190     ierr = PetscMalloc1(rlen,&row_lens);CHKERRQ(ierr);
1191     rlen = (range[1] - range[0])/bs;
1192     for (i=0; i<rlen; i++) {
1193       for (j=0; j<bs; j++) {
1194         row_lens[i*bs+j] = bs*crow_lens[i];
1195       }
1196     }
1197     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1198     for (i=1; i<size; i++) {
1199       rlen = (range[i+1] - range[i])/bs;
1200       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1201       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1202       for (k=0; k<rlen; k++) {
1203         for (j=0; j<bs; j++) {
1204           row_lens[k*bs+j] = bs*crow_lens[k];
1205         }
1206       }
1207       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1208     }
1209     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1210     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1211   } else {
1212     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1213     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1214     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1215   }
1216   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1217 
1218   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1219      information needed to make it for each row from a block row. This does require more communication but still not more than
1220      the communication needed for the nonzero values  */
1221   nzmax = nz; /*  space a largest processor needs */
1222   ierr  = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1223   ierr  = PetscMalloc1(nzmax,&column_indices);CHKERRQ(ierr);
1224   cnt   = 0;
1225   for (i=0; i<a->mbs; i++) {
1226     pcnt = cnt;
1227     for (j=B->i[i]; j<B->i[i+1]; j++) {
1228       if ((col = garray[B->j[j]]) > cstart) break;
1229       for (l=0; l<bs; l++) {
1230         column_indices[cnt++] = bs*col+l;
1231       }
1232     }
1233     for (k=A->i[i]; k<A->i[i+1]; k++) {
1234       for (l=0; l<bs; l++) {
1235         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1236       }
1237     }
1238     for (; j<B->i[i+1]; j++) {
1239       for (l=0; l<bs; l++) {
1240         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1241       }
1242     }
1243     len = cnt - pcnt;
1244     for (k=1; k<bs; k++) {
1245       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1246       cnt += len;
1247     }
1248   }
1249   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1250 
1251   /* store the columns to the file */
1252   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1253   if (!rank) {
1254     MPI_Status status;
1255     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1256     for (i=1; i<size; i++) {
1257       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1258       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1259       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1260       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1261     }
1262     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1263   } else {
1264     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1265     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1266     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1267     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1268   }
1269   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1270 
1271   /* load up the numerical values */
1272   ierr = PetscMalloc1(nzmax,&column_values);CHKERRQ(ierr);
1273   cnt  = 0;
1274   for (i=0; i<a->mbs; i++) {
1275     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1276     for (j=B->i[i]; j<B->i[i+1]; j++) {
1277       if (garray[B->j[j]] > cstart) break;
1278       for (l=0; l<bs; l++) {
1279         for (ll=0; ll<bs; ll++) {
1280           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1281         }
1282       }
1283       cnt += bs;
1284     }
1285     for (k=A->i[i]; k<A->i[i+1]; k++) {
1286       for (l=0; l<bs; l++) {
1287         for (ll=0; ll<bs; ll++) {
1288           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1289         }
1290       }
1291       cnt += bs;
1292     }
1293     for (; j<B->i[i+1]; j++) {
1294       for (l=0; l<bs; l++) {
1295         for (ll=0; ll<bs; ll++) {
1296           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1297         }
1298       }
1299       cnt += bs;
1300     }
1301     cnt += (bs-1)*rlen;
1302   }
1303   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1304 
1305   /* store the column values to the file */
1306   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1307   if (!rank) {
1308     MPI_Status status;
1309     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1310     for (i=1; i<size; i++) {
1311       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1312       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1313       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1314       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1315     }
1316     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1317   } else {
1318     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1319     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1320     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1321     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1322   }
1323   ierr = PetscFree(column_values);CHKERRQ(ierr);
1324 
1325   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1326   if (file) {
1327     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1328   }
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 #undef __FUNCT__
1333 #define __FUNCT__ "MatView_MPIBAIJ"
1334 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1335 {
1336   PetscErrorCode ierr;
1337   PetscBool      iascii,isdraw,issocket,isbinary;
1338 
1339   PetscFunctionBegin;
1340   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1341   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1342   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1343   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1344   if (iascii || isdraw || issocket) {
1345     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1346   } else if (isbinary) {
1347     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1348   }
1349   PetscFunctionReturn(0);
1350 }
1351 
1352 #undef __FUNCT__
1353 #define __FUNCT__ "MatDestroy_MPIBAIJ"
1354 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1355 {
1356   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1357   PetscErrorCode ierr;
1358 
1359   PetscFunctionBegin;
1360 #if defined(PETSC_USE_LOG)
1361   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1362 #endif
1363   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1364   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1365   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1366   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1367 #if defined(PETSC_USE_CTABLE)
1368   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1369 #else
1370   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1371 #endif
1372   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1373   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1374   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1375   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1376   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1377   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1378   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1379   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1380 
1381   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1382   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
1383   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
1384   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr);
1385   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
1386   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr);
1387   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr);
1388   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C",NULL);CHKERRQ(ierr);
1389   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C",NULL);CHKERRQ(ierr);
1390   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C",NULL);CHKERRQ(ierr);
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 #undef __FUNCT__
1395 #define __FUNCT__ "MatMult_MPIBAIJ"
1396 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1397 {
1398   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1399   PetscErrorCode ierr;
1400   PetscInt       nt;
1401 
1402   PetscFunctionBegin;
1403   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1404   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1405   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1406   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1407   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1408   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1409   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1410   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1411   PetscFunctionReturn(0);
1412 }
1413 
1414 #undef __FUNCT__
1415 #define __FUNCT__ "MatMultAdd_MPIBAIJ"
1416 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1417 {
1418   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1419   PetscErrorCode ierr;
1420 
1421   PetscFunctionBegin;
1422   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1423   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1424   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1425   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1426   PetscFunctionReturn(0);
1427 }
1428 
1429 #undef __FUNCT__
1430 #define __FUNCT__ "MatMultTranspose_MPIBAIJ"
1431 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1432 {
1433   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1434   PetscErrorCode ierr;
1435   PetscBool      merged;
1436 
1437   PetscFunctionBegin;
1438   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1439   /* do nondiagonal part */
1440   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1441   if (!merged) {
1442     /* send it on its way */
1443     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1444     /* do local part */
1445     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1446     /* receive remote parts: note this assumes the values are not actually */
1447     /* inserted in yy until the next line */
1448     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1449   } else {
1450     /* do local part */
1451     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1452     /* send it on its way */
1453     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1454     /* values actually were received in the Begin() but we need to call this nop */
1455     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1456   }
1457   PetscFunctionReturn(0);
1458 }
1459 
1460 #undef __FUNCT__
1461 #define __FUNCT__ "MatMultTransposeAdd_MPIBAIJ"
1462 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1463 {
1464   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1465   PetscErrorCode ierr;
1466 
1467   PetscFunctionBegin;
1468   /* do nondiagonal part */
1469   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1470   /* send it on its way */
1471   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1472   /* do local part */
1473   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1474   /* receive remote parts: note this assumes the values are not actually */
1475   /* inserted in yy until the next line, which is true for my implementation*/
1476   /* but is not perhaps always true. */
1477   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1478   PetscFunctionReturn(0);
1479 }
1480 
1481 /*
1482   This only works correctly for square matrices where the subblock A->A is the
1483    diagonal block
1484 */
1485 #undef __FUNCT__
1486 #define __FUNCT__ "MatGetDiagonal_MPIBAIJ"
1487 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1488 {
1489   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1490   PetscErrorCode ierr;
1491 
1492   PetscFunctionBegin;
1493   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1494   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1495   PetscFunctionReturn(0);
1496 }
1497 
1498 #undef __FUNCT__
1499 #define __FUNCT__ "MatScale_MPIBAIJ"
1500 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1501 {
1502   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1503   PetscErrorCode ierr;
1504 
1505   PetscFunctionBegin;
1506   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1507   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1508   PetscFunctionReturn(0);
1509 }
1510 
1511 #undef __FUNCT__
1512 #define __FUNCT__ "MatGetRow_MPIBAIJ"
1513 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1514 {
1515   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1516   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1517   PetscErrorCode ierr;
1518   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1519   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1520   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1521 
1522   PetscFunctionBegin;
1523   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1524   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1525   mat->getrowactive = PETSC_TRUE;
1526 
1527   if (!mat->rowvalues && (idx || v)) {
1528     /*
1529         allocate enough space to hold information from the longest row.
1530     */
1531     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1532     PetscInt    max = 1,mbs = mat->mbs,tmp;
1533     for (i=0; i<mbs; i++) {
1534       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1535       if (max < tmp) max = tmp;
1536     }
1537     ierr = PetscMalloc2(max*bs2,&mat->rowvalues,max*bs2,&mat->rowindices);CHKERRQ(ierr);
1538   }
1539   lrow = row - brstart;
1540 
1541   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1542   if (!v)   {pvA = 0; pvB = 0;}
1543   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1544   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1545   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1546   nztot = nzA + nzB;
1547 
1548   cmap = mat->garray;
1549   if (v  || idx) {
1550     if (nztot) {
1551       /* Sort by increasing column numbers, assuming A and B already sorted */
1552       PetscInt imark = -1;
1553       if (v) {
1554         *v = v_p = mat->rowvalues;
1555         for (i=0; i<nzB; i++) {
1556           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1557           else break;
1558         }
1559         imark = i;
1560         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1561         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1562       }
1563       if (idx) {
1564         *idx = idx_p = mat->rowindices;
1565         if (imark > -1) {
1566           for (i=0; i<imark; i++) {
1567             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1568           }
1569         } else {
1570           for (i=0; i<nzB; i++) {
1571             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1572             else break;
1573           }
1574           imark = i;
1575         }
1576         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1577         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1578       }
1579     } else {
1580       if (idx) *idx = 0;
1581       if (v)   *v   = 0;
1582     }
1583   }
1584   *nz  = nztot;
1585   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1586   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1587   PetscFunctionReturn(0);
1588 }
1589 
1590 #undef __FUNCT__
1591 #define __FUNCT__ "MatRestoreRow_MPIBAIJ"
1592 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1593 {
1594   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1595 
1596   PetscFunctionBegin;
1597   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1598   baij->getrowactive = PETSC_FALSE;
1599   PetscFunctionReturn(0);
1600 }
1601 
1602 #undef __FUNCT__
1603 #define __FUNCT__ "MatZeroEntries_MPIBAIJ"
1604 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1605 {
1606   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1607   PetscErrorCode ierr;
1608 
1609   PetscFunctionBegin;
1610   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1611   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1612   PetscFunctionReturn(0);
1613 }
1614 
1615 #undef __FUNCT__
1616 #define __FUNCT__ "MatGetInfo_MPIBAIJ"
1617 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1618 {
1619   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1620   Mat            A  = a->A,B = a->B;
1621   PetscErrorCode ierr;
1622   PetscReal      isend[5],irecv[5];
1623 
1624   PetscFunctionBegin;
1625   info->block_size = (PetscReal)matin->rmap->bs;
1626 
1627   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1628 
1629   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1630   isend[3] = info->memory;  isend[4] = info->mallocs;
1631 
1632   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1633 
1634   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1635   isend[3] += info->memory;  isend[4] += info->mallocs;
1636 
1637   if (flag == MAT_LOCAL) {
1638     info->nz_used      = isend[0];
1639     info->nz_allocated = isend[1];
1640     info->nz_unneeded  = isend[2];
1641     info->memory       = isend[3];
1642     info->mallocs      = isend[4];
1643   } else if (flag == MAT_GLOBAL_MAX) {
1644     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1645 
1646     info->nz_used      = irecv[0];
1647     info->nz_allocated = irecv[1];
1648     info->nz_unneeded  = irecv[2];
1649     info->memory       = irecv[3];
1650     info->mallocs      = irecv[4];
1651   } else if (flag == MAT_GLOBAL_SUM) {
1652     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1653 
1654     info->nz_used      = irecv[0];
1655     info->nz_allocated = irecv[1];
1656     info->nz_unneeded  = irecv[2];
1657     info->memory       = irecv[3];
1658     info->mallocs      = irecv[4];
1659   } else SETERRQ1(PetscObjectComm((PetscObject)matin),PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1660   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1661   info->fill_ratio_needed = 0;
1662   info->factor_mallocs    = 0;
1663   PetscFunctionReturn(0);
1664 }
1665 
1666 #undef __FUNCT__
1667 #define __FUNCT__ "MatSetOption_MPIBAIJ"
1668 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool flg)
1669 {
1670   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1671   PetscErrorCode ierr;
1672 
1673   PetscFunctionBegin;
1674   switch (op) {
1675   case MAT_NEW_NONZERO_LOCATIONS:
1676   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1677   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1678   case MAT_KEEP_NONZERO_PATTERN:
1679   case MAT_NEW_NONZERO_LOCATION_ERR:
1680     MatCheckPreallocated(A,1);
1681     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1682     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1683     break;
1684   case MAT_ROW_ORIENTED:
1685     MatCheckPreallocated(A,1);
1686     a->roworiented = flg;
1687 
1688     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1689     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1690     break;
1691   case MAT_NEW_DIAGONALS:
1692     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1693     break;
1694   case MAT_IGNORE_OFF_PROC_ENTRIES:
1695     a->donotstash = flg;
1696     break;
1697   case MAT_USE_HASH_TABLE:
1698     a->ht_flag = flg;
1699     break;
1700   case MAT_SYMMETRIC:
1701   case MAT_STRUCTURALLY_SYMMETRIC:
1702   case MAT_HERMITIAN:
1703   case MAT_SYMMETRY_ETERNAL:
1704     MatCheckPreallocated(A,1);
1705     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1706     break;
1707   default:
1708     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"unknown option %d",op);
1709   }
1710   PetscFunctionReturn(0);
1711 }
1712 
1713 #undef __FUNCT__
1714 #define __FUNCT__ "MatTranspose_MPIBAIJ"
1715 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1716 {
1717   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1718   Mat_SeqBAIJ    *Aloc;
1719   Mat            B;
1720   PetscErrorCode ierr;
1721   PetscInt       M =A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1722   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1723   MatScalar      *a;
1724 
1725   PetscFunctionBegin;
1726   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1727   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1728     ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1729     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1730     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1731     /* Do not know preallocation information, but must set block size */
1732     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,NULL,PETSC_DECIDE,NULL);CHKERRQ(ierr);
1733   } else {
1734     B = *matout;
1735   }
1736 
1737   /* copy over the A part */
1738   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1739   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1740   ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1741 
1742   for (i=0; i<mbs; i++) {
1743     rvals[0] = bs*(baij->rstartbs + i);
1744     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1745     for (j=ai[i]; j<ai[i+1]; j++) {
1746       col = (baij->cstartbs+aj[j])*bs;
1747       for (k=0; k<bs; k++) {
1748         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1749 
1750         col++; a += bs;
1751       }
1752     }
1753   }
1754   /* copy over the B part */
1755   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1756   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1757   for (i=0; i<mbs; i++) {
1758     rvals[0] = bs*(baij->rstartbs + i);
1759     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1760     for (j=ai[i]; j<ai[i+1]; j++) {
1761       col = baij->garray[aj[j]]*bs;
1762       for (k=0; k<bs; k++) {
1763         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1764         col++;
1765         a += bs;
1766       }
1767     }
1768   }
1769   ierr = PetscFree(rvals);CHKERRQ(ierr);
1770   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1771   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1772 
1773   if (reuse == MAT_INITIAL_MATRIX || *matout != A) *matout = B;
1774   else {
1775     ierr = MatHeaderMerge(A,&B);CHKERRQ(ierr);
1776   }
1777   PetscFunctionReturn(0);
1778 }
1779 
1780 #undef __FUNCT__
1781 #define __FUNCT__ "MatDiagonalScale_MPIBAIJ"
1782 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1783 {
1784   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1785   Mat            a     = baij->A,b = baij->B;
1786   PetscErrorCode ierr;
1787   PetscInt       s1,s2,s3;
1788 
1789   PetscFunctionBegin;
1790   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1791   if (rr) {
1792     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1793     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1794     /* Overlap communication with computation. */
1795     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1796   }
1797   if (ll) {
1798     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1799     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1800     ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1801   }
1802   /* scale  the diagonal block */
1803   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1804 
1805   if (rr) {
1806     /* Do a scatter end and then right scale the off-diagonal block */
1807     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1808     ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1809   }
1810   PetscFunctionReturn(0);
1811 }
1812 
1813 #undef __FUNCT__
1814 #define __FUNCT__ "MatZeroRows_MPIBAIJ"
1815 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1816 {
1817   Mat_MPIBAIJ   *l      = (Mat_MPIBAIJ *) A->data;
1818   PetscInt      *owners = A->rmap->range;
1819   PetscInt       n      = A->rmap->n;
1820   PetscSF        sf;
1821   PetscInt      *lrows;
1822   PetscSFNode   *rrows;
1823   PetscInt       r, p = 0, len = 0;
1824   PetscErrorCode ierr;
1825 
1826   PetscFunctionBegin;
1827   /* Create SF where leaves are input rows and roots are owned rows */
1828   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1829   for (r = 0; r < n; ++r) lrows[r] = -1;
1830   if (!A->nooffproczerorows) {ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);}
1831   for (r = 0; r < N; ++r) {
1832     const PetscInt idx   = rows[r];
1833     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1834     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1835       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1836     }
1837     if (A->nooffproczerorows) {
1838       if (p != l->rank) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"MAT_NO_OFF_PROC_ZERO_ROWS set, but row %D is not owned by rank %d",idx,l->rank);
1839       lrows[len++] = idx - owners[p];
1840     } else {
1841       rrows[r].rank = p;
1842       rrows[r].index = rows[r] - owners[p];
1843     }
1844   }
1845   if (!A->nooffproczerorows) {
1846     ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1847     ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1848     /* Collect flags for rows to be zeroed */
1849     ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1850     ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1851     ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1852     /* Compress and put in row numbers */
1853     for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1854   }
1855   /* fix right hand side if needed */
1856   if (x && b) {
1857     const PetscScalar *xx;
1858     PetscScalar       *bb;
1859 
1860     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1861     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1862     for (r = 0; r < len; ++r) bb[lrows[r]] = diag*xx[lrows[r]];
1863     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1864     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1865   }
1866 
1867   /* actually zap the local rows */
1868   /*
1869         Zero the required rows. If the "diagonal block" of the matrix
1870      is square and the user wishes to set the diagonal we use separate
1871      code so that MatSetValues() is not called for each diagonal allocating
1872      new memory, thus calling lots of mallocs and slowing things down.
1873 
1874   */
1875   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1876   ierr = MatZeroRows_SeqBAIJ(l->B,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1877   if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) {
1878     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,diag,NULL,NULL);CHKERRQ(ierr);
1879   } else if (diag != 0.0) {
1880     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,0,0);CHKERRQ(ierr);
1881     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1882        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1883     for (r = 0; r < len; ++r) {
1884       const PetscInt row = lrows[r] + A->rmap->rstart;
1885       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1886     }
1887     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1888     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1889   } else {
1890     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1891   }
1892   ierr = PetscFree(lrows);CHKERRQ(ierr);
1893 
1894   /* only change matrix nonzero state if pattern was allowed to be changed */
1895   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1896     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1897     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1898   }
1899   PetscFunctionReturn(0);
1900 }
1901 
1902 #undef __FUNCT__
1903 #define __FUNCT__ "MatZeroRowsColumns_MPIBAIJ"
1904 PetscErrorCode MatZeroRowsColumns_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1905 {
1906   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1907   PetscErrorCode    ierr;
1908   PetscMPIInt       n = A->rmap->n;
1909   PetscInt          i,j,k,r,p = 0,len = 0,row,col,count;
1910   PetscInt          *lrows,*owners = A->rmap->range;
1911   PetscSFNode       *rrows;
1912   PetscSF           sf;
1913   const PetscScalar *xx;
1914   PetscScalar       *bb,*mask;
1915   Vec               xmask,lmask;
1916   Mat_SeqBAIJ       *baij = (Mat_SeqBAIJ*)l->B->data;
1917   PetscInt           bs = A->rmap->bs, bs2 = baij->bs2;
1918   PetscScalar       *aa;
1919 
1920   PetscFunctionBegin;
1921   /* Create SF where leaves are input rows and roots are owned rows */
1922   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1923   for (r = 0; r < n; ++r) lrows[r] = -1;
1924   ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);
1925   for (r = 0; r < N; ++r) {
1926     const PetscInt idx   = rows[r];
1927     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1928     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1929       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1930     }
1931     rrows[r].rank  = p;
1932     rrows[r].index = rows[r] - owners[p];
1933   }
1934   ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1935   ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1936   /* Collect flags for rows to be zeroed */
1937   ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1938   ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1939   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1940   /* Compress and put in row numbers */
1941   for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1942   /* zero diagonal part of matrix */
1943   ierr = MatZeroRowsColumns(l->A,len,lrows,diag,x,b);CHKERRQ(ierr);
1944   /* handle off diagonal part of matrix */
1945   ierr = MatCreateVecs(A,&xmask,NULL);CHKERRQ(ierr);
1946   ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr);
1947   ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr);
1948   for (i=0; i<len; i++) bb[lrows[i]] = 1;
1949   ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr);
1950   ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1951   ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1952   ierr = VecDestroy(&xmask);CHKERRQ(ierr);
1953   if (x) {
1954     ierr = VecScatterBegin(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1955     ierr = VecScatterEnd(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1956     ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1957     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1958   }
1959   ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr);
1960   /* remove zeroed rows of off diagonal matrix */
1961   for (i = 0; i < len; ++i) {
1962     row   = lrows[i];
1963     count = (baij->i[row/bs +1] - baij->i[row/bs])*bs;
1964     aa    = ((MatScalar*)(baij->a)) + baij->i[row/bs]*bs2 + (row%bs);
1965     for (k = 0; k < count; ++k) {
1966       aa[0] = 0.0;
1967       aa   += bs;
1968     }
1969   }
1970   /* loop over all elements of off process part of matrix zeroing removed columns*/
1971   for (i = 0; i < l->B->rmap->N; ++i) {
1972     row = i/bs;
1973     for (j = baij->i[row]; j < baij->i[row+1]; ++j) {
1974       for (k = 0; k < bs; ++k) {
1975         col = bs*baij->j[j] + k;
1976         if (PetscAbsScalar(mask[col])) {
1977           aa = ((MatScalar*)(baij->a)) + j*bs2 + (i%bs) + bs*k;
1978           if (x) bb[i] -= aa[0]*xx[col];
1979           aa[0] = 0.0;
1980         }
1981       }
1982     }
1983   }
1984   if (x) {
1985     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1986     ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1987   }
1988   ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr);
1989   ierr = VecDestroy(&lmask);CHKERRQ(ierr);
1990   ierr = PetscFree(lrows);CHKERRQ(ierr);
1991 
1992   /* only change matrix nonzero state if pattern was allowed to be changed */
1993   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1994     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1995     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1996   }
1997   PetscFunctionReturn(0);
1998 }
1999 
2000 #undef __FUNCT__
2001 #define __FUNCT__ "MatSetUnfactored_MPIBAIJ"
2002 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
2003 {
2004   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2005   PetscErrorCode ierr;
2006 
2007   PetscFunctionBegin;
2008   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
2009   PetscFunctionReturn(0);
2010 }
2011 
2012 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat*);
2013 
2014 #undef __FUNCT__
2015 #define __FUNCT__ "MatEqual_MPIBAIJ"
2016 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
2017 {
2018   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
2019   Mat            a,b,c,d;
2020   PetscBool      flg;
2021   PetscErrorCode ierr;
2022 
2023   PetscFunctionBegin;
2024   a = matA->A; b = matA->B;
2025   c = matB->A; d = matB->B;
2026 
2027   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
2028   if (flg) {
2029     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
2030   }
2031   ierr = MPIU_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2032   PetscFunctionReturn(0);
2033 }
2034 
2035 #undef __FUNCT__
2036 #define __FUNCT__ "MatCopy_MPIBAIJ"
2037 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
2038 {
2039   PetscErrorCode ierr;
2040   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2041   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
2042 
2043   PetscFunctionBegin;
2044   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
2045   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
2046     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
2047   } else {
2048     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
2049     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
2050   }
2051   PetscFunctionReturn(0);
2052 }
2053 
2054 #undef __FUNCT__
2055 #define __FUNCT__ "MatSetUp_MPIBAIJ"
2056 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
2057 {
2058   PetscErrorCode ierr;
2059 
2060   PetscFunctionBegin;
2061   ierr = MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
2062   PetscFunctionReturn(0);
2063 }
2064 
2065 #undef __FUNCT__
2066 #define __FUNCT__ "MatAXPYGetPreallocation_MPIBAIJ"
2067 PetscErrorCode MatAXPYGetPreallocation_MPIBAIJ(Mat Y,const PetscInt *yltog,Mat X,const PetscInt *xltog,PetscInt *nnz)
2068 {
2069   PetscErrorCode ierr;
2070   PetscInt       bs = Y->rmap->bs,m = Y->rmap->N/bs;
2071   Mat_SeqBAIJ    *x = (Mat_SeqBAIJ*)X->data;
2072   Mat_SeqBAIJ    *y = (Mat_SeqBAIJ*)Y->data;
2073 
2074   PetscFunctionBegin;
2075   ierr = MatAXPYGetPreallocation_MPIX_private(m,x->i,x->j,xltog,y->i,y->j,yltog,nnz);CHKERRQ(ierr);
2076   PetscFunctionReturn(0);
2077 }
2078 
2079 #undef __FUNCT__
2080 #define __FUNCT__ "MatAXPY_MPIBAIJ"
2081 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
2082 {
2083   PetscErrorCode ierr;
2084   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ*)X->data,*yy=(Mat_MPIBAIJ*)Y->data;
2085   PetscBLASInt   bnz,one=1;
2086   Mat_SeqBAIJ    *x,*y;
2087 
2088   PetscFunctionBegin;
2089   if (str == SAME_NONZERO_PATTERN) {
2090     PetscScalar alpha = a;
2091     x    = (Mat_SeqBAIJ*)xx->A->data;
2092     y    = (Mat_SeqBAIJ*)yy->A->data;
2093     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2094     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2095     x    = (Mat_SeqBAIJ*)xx->B->data;
2096     y    = (Mat_SeqBAIJ*)yy->B->data;
2097     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2098     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2099     ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
2100   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
2101     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
2102   } else {
2103     Mat      B;
2104     PetscInt *nnz_d,*nnz_o,bs=Y->rmap->bs;
2105     ierr = PetscMalloc1(yy->A->rmap->N,&nnz_d);CHKERRQ(ierr);
2106     ierr = PetscMalloc1(yy->B->rmap->N,&nnz_o);CHKERRQ(ierr);
2107     ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr);
2108     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
2109     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
2110     ierr = MatSetBlockSizesFromMats(B,Y,Y);CHKERRQ(ierr);
2111     ierr = MatSetType(B,MATMPIBAIJ);CHKERRQ(ierr);
2112     ierr = MatAXPYGetPreallocation_SeqBAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
2113     ierr = MatAXPYGetPreallocation_MPIBAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr);
2114     ierr = MatMPIBAIJSetPreallocation(B,bs,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
2115     /* MatAXPY_BasicWithPreallocation() for BAIJ matrix is much slower than AIJ, even for bs=1 ! */
2116     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2117     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2118     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
2119     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
2120   }
2121   PetscFunctionReturn(0);
2122 }
2123 
2124 #undef __FUNCT__
2125 #define __FUNCT__ "MatRealPart_MPIBAIJ"
2126 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
2127 {
2128   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2129   PetscErrorCode ierr;
2130 
2131   PetscFunctionBegin;
2132   ierr = MatRealPart(a->A);CHKERRQ(ierr);
2133   ierr = MatRealPart(a->B);CHKERRQ(ierr);
2134   PetscFunctionReturn(0);
2135 }
2136 
2137 #undef __FUNCT__
2138 #define __FUNCT__ "MatImaginaryPart_MPIBAIJ"
2139 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
2140 {
2141   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2142   PetscErrorCode ierr;
2143 
2144   PetscFunctionBegin;
2145   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2146   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2147   PetscFunctionReturn(0);
2148 }
2149 
2150 #undef __FUNCT__
2151 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ"
2152 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2153 {
2154   PetscErrorCode ierr;
2155   IS             iscol_local;
2156   PetscInt       csize;
2157 
2158   PetscFunctionBegin;
2159   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2160   if (call == MAT_REUSE_MATRIX) {
2161     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2162     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2163   } else {
2164     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2165   }
2166   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2167   if (call == MAT_INITIAL_MATRIX) {
2168     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2169     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2170   }
2171   PetscFunctionReturn(0);
2172 }
2173 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2174 #undef __FUNCT__
2175 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ_Private"
2176 /*
2177   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2178   in local and then by concatenating the local matrices the end result.
2179   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ().
2180   This routine is used for BAIJ and SBAIJ matrices (unfortunate dependency).
2181 */
2182 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2183 {
2184   PetscErrorCode ierr;
2185   PetscMPIInt    rank,size;
2186   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2187   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2188   Mat            M,Mreuse;
2189   MatScalar      *vwork,*aa;
2190   MPI_Comm       comm;
2191   IS             isrow_new, iscol_new;
2192   PetscBool      idflag,allrows, allcols;
2193   Mat_SeqBAIJ    *aij;
2194 
2195   PetscFunctionBegin;
2196   ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
2197   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2198   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2199   /* The compression and expansion should be avoided. Doesn't point
2200      out errors, might change the indices, hence buggey */
2201   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2202   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2203 
2204   /* Check for special case: each processor gets entire matrix columns */
2205   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2206   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2207   if (idflag && ncol == mat->cmap->N) allcols = PETSC_TRUE;
2208   else allcols = PETSC_FALSE;
2209 
2210   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2211   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2212   if (idflag && nrow == mat->rmap->N) allrows = PETSC_TRUE;
2213   else allrows = PETSC_FALSE;
2214 
2215   if (call ==  MAT_REUSE_MATRIX) {
2216     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr);
2217     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2218     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2219   } else {
2220     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2221   }
2222   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2223   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2224   /*
2225       m - number of local rows
2226       n - number of columns (same on all processors)
2227       rstart - first row in new global matrix generated
2228   */
2229   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2230   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2231   m    = m/bs;
2232   n    = n/bs;
2233 
2234   if (call == MAT_INITIAL_MATRIX) {
2235     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2236     ii  = aij->i;
2237     jj  = aij->j;
2238 
2239     /*
2240         Determine the number of non-zeros in the diagonal and off-diagonal
2241         portions of the matrix in order to do correct preallocation
2242     */
2243 
2244     /* first get start and end of "diagonal" columns */
2245     if (csize == PETSC_DECIDE) {
2246       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2247       if (mglobal == n*bs) { /* square matrix */
2248         nlocal = m;
2249       } else {
2250         nlocal = n/size + ((n % size) > rank);
2251       }
2252     } else {
2253       nlocal = csize/bs;
2254     }
2255     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2256     rstart = rend - nlocal;
2257     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2258 
2259     /* next, compute all the lengths */
2260     ierr  = PetscMalloc2(m+1,&dlens,m+1,&olens);CHKERRQ(ierr);
2261     for (i=0; i<m; i++) {
2262       jend = ii[i+1] - ii[i];
2263       olen = 0;
2264       dlen = 0;
2265       for (j=0; j<jend; j++) {
2266         if (*jj < rstart || *jj >= rend) olen++;
2267         else dlen++;
2268         jj++;
2269       }
2270       olens[i] = olen;
2271       dlens[i] = dlen;
2272     }
2273     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2274     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2275     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2276     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2277     ierr = MatMPISBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2278     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
2279   } else {
2280     PetscInt ml,nl;
2281 
2282     M    = *newmat;
2283     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2284     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2285     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2286     /*
2287          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2288        rather than the slower MatSetValues().
2289     */
2290     M->was_assembled = PETSC_TRUE;
2291     M->assembled     = PETSC_FALSE;
2292   }
2293   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2294   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2295   aij  = (Mat_SeqBAIJ*)(Mreuse)->data;
2296   ii   = aij->i;
2297   jj   = aij->j;
2298   aa   = aij->a;
2299   for (i=0; i<m; i++) {
2300     row   = rstart/bs + i;
2301     nz    = ii[i+1] - ii[i];
2302     cwork = jj;     jj += nz;
2303     vwork = aa;     aa += nz*bs*bs;
2304     ierr  = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2305   }
2306 
2307   ierr    = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2308   ierr    = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2309   *newmat = M;
2310 
2311   /* save submatrix used in processor for next request */
2312   if (call ==  MAT_INITIAL_MATRIX) {
2313     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2314     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2315   }
2316   PetscFunctionReturn(0);
2317 }
2318 
2319 #undef __FUNCT__
2320 #define __FUNCT__ "MatPermute_MPIBAIJ"
2321 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2322 {
2323   MPI_Comm       comm,pcomm;
2324   PetscInt       clocal_size,nrows;
2325   const PetscInt *rows;
2326   PetscMPIInt    size;
2327   IS             crowp,lcolp;
2328   PetscErrorCode ierr;
2329 
2330   PetscFunctionBegin;
2331   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2332   /* make a collective version of 'rowp' */
2333   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2334   if (pcomm==comm) {
2335     crowp = rowp;
2336   } else {
2337     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2338     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2339     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2340     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2341   }
2342   ierr = ISSetPermutation(crowp);CHKERRQ(ierr);
2343   /* make a local version of 'colp' */
2344   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2345   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2346   if (size==1) {
2347     lcolp = colp;
2348   } else {
2349     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2350   }
2351   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2352   /* now we just get the submatrix */
2353   ierr = MatGetLocalSize(A,NULL,&clocal_size);CHKERRQ(ierr);
2354   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,crowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2355   /* clean up */
2356   if (pcomm!=comm) {
2357     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2358   }
2359   if (size>1) {
2360     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2361   }
2362   PetscFunctionReturn(0);
2363 }
2364 
2365 #undef __FUNCT__
2366 #define __FUNCT__ "MatGetGhosts_MPIBAIJ"
2367 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2368 {
2369   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*) mat->data;
2370   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ*)baij->B->data;
2371 
2372   PetscFunctionBegin;
2373   if (nghosts) *nghosts = B->nbs;
2374   if (ghosts) *ghosts = baij->garray;
2375   PetscFunctionReturn(0);
2376 }
2377 
2378 #undef __FUNCT__
2379 #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIBAIJ"
2380 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2381 {
2382   Mat            B;
2383   Mat_MPIBAIJ    *a  = (Mat_MPIBAIJ*)A->data;
2384   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2385   Mat_SeqAIJ     *b;
2386   PetscErrorCode ierr;
2387   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2388   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2389   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2390 
2391   PetscFunctionBegin;
2392   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2393   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2394 
2395   /* ----------------------------------------------------------------
2396      Tell every processor the number of nonzeros per row
2397   */
2398   ierr = PetscMalloc1(A->rmap->N/bs,&lens);CHKERRQ(ierr);
2399   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2400     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2401   }
2402   ierr      = PetscMalloc1(2*size,&recvcounts);CHKERRQ(ierr);
2403   displs    = recvcounts + size;
2404   for (i=0; i<size; i++) {
2405     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2406     displs[i]     = A->rmap->range[i]/bs;
2407   }
2408 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2409   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2410 #else
2411   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2412   ierr = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2413 #endif
2414   /* ---------------------------------------------------------------
2415      Create the sequential matrix of the same type as the local block diagonal
2416   */
2417   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2418   ierr = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2419   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2420   ierr = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2421   b    = (Mat_SeqAIJ*)B->data;
2422 
2423   /*--------------------------------------------------------------------
2424     Copy my part of matrix column indices over
2425   */
2426   sendcount  = ad->nz + bd->nz;
2427   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2428   a_jsendbuf = ad->j;
2429   b_jsendbuf = bd->j;
2430   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2431   cnt        = 0;
2432   for (i=0; i<n; i++) {
2433 
2434     /* put in lower diagonal portion */
2435     m = bd->i[i+1] - bd->i[i];
2436     while (m > 0) {
2437       /* is it above diagonal (in bd (compressed) numbering) */
2438       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2439       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2440       m--;
2441     }
2442 
2443     /* put in diagonal portion */
2444     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2445       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2446     }
2447 
2448     /* put in upper diagonal portion */
2449     while (m-- > 0) {
2450       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2451     }
2452   }
2453   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2454 
2455   /*--------------------------------------------------------------------
2456     Gather all column indices to all processors
2457   */
2458   for (i=0; i<size; i++) {
2459     recvcounts[i] = 0;
2460     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2461       recvcounts[i] += lens[j];
2462     }
2463   }
2464   displs[0] = 0;
2465   for (i=1; i<size; i++) {
2466     displs[i] = displs[i-1] + recvcounts[i-1];
2467   }
2468 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2469   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2470 #else
2471   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2472 #endif
2473   /*--------------------------------------------------------------------
2474     Assemble the matrix into useable form (note numerical values not yet set)
2475   */
2476   /* set the b->ilen (length of each row) values */
2477   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2478   /* set the b->i indices */
2479   b->i[0] = 0;
2480   for (i=1; i<=A->rmap->N/bs; i++) {
2481     b->i[i] = b->i[i-1] + lens[i-1];
2482   }
2483   ierr = PetscFree(lens);CHKERRQ(ierr);
2484   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2485   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2486   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2487 
2488   if (A->symmetric) {
2489     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2490   } else if (A->hermitian) {
2491     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2492   } else if (A->structurally_symmetric) {
2493     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2494   }
2495   *newmat = B;
2496   PetscFunctionReturn(0);
2497 }
2498 
2499 #undef __FUNCT__
2500 #define __FUNCT__ "MatSOR_MPIBAIJ"
2501 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2502 {
2503   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2504   PetscErrorCode ierr;
2505   Vec            bb1 = 0;
2506 
2507   PetscFunctionBegin;
2508   if (flag == SOR_APPLY_UPPER) {
2509     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2510     PetscFunctionReturn(0);
2511   }
2512 
2513   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2514     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2515   }
2516 
2517   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2518     if (flag & SOR_ZERO_INITIAL_GUESS) {
2519       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2520       its--;
2521     }
2522 
2523     while (its--) {
2524       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2525       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2526 
2527       /* update rhs: bb1 = bb - B*x */
2528       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2529       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2530 
2531       /* local sweep */
2532       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2533     }
2534   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2535     if (flag & SOR_ZERO_INITIAL_GUESS) {
2536       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2537       its--;
2538     }
2539     while (its--) {
2540       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2541       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2542 
2543       /* update rhs: bb1 = bb - B*x */
2544       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2545       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2546 
2547       /* local sweep */
2548       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2549     }
2550   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2551     if (flag & SOR_ZERO_INITIAL_GUESS) {
2552       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2553       its--;
2554     }
2555     while (its--) {
2556       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2557       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2558 
2559       /* update rhs: bb1 = bb - B*x */
2560       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2561       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2562 
2563       /* local sweep */
2564       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2565     }
2566   } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2567 
2568   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2569   PetscFunctionReturn(0);
2570 }
2571 
2572 #undef __FUNCT__
2573 #define __FUNCT__ "MatGetColumnNorms_MPIBAIJ"
2574 PetscErrorCode MatGetColumnNorms_MPIBAIJ(Mat A,NormType type,PetscReal *norms)
2575 {
2576   PetscErrorCode ierr;
2577   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)A->data;
2578   PetscInt       N,i,*garray = aij->garray;
2579   PetscInt       ib,jb,bs = A->rmap->bs;
2580   Mat_SeqBAIJ    *a_aij = (Mat_SeqBAIJ*) aij->A->data;
2581   MatScalar      *a_val = a_aij->a;
2582   Mat_SeqBAIJ    *b_aij = (Mat_SeqBAIJ*) aij->B->data;
2583   MatScalar      *b_val = b_aij->a;
2584   PetscReal      *work;
2585 
2586   PetscFunctionBegin;
2587   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
2588   ierr = PetscCalloc1(N,&work);CHKERRQ(ierr);
2589   if (type == NORM_2) {
2590     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2591       for (jb=0; jb<bs; jb++) {
2592         for (ib=0; ib<bs; ib++) {
2593           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val);
2594           a_val++;
2595         }
2596       }
2597     }
2598     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2599       for (jb=0; jb<bs; jb++) {
2600         for (ib=0; ib<bs; ib++) {
2601           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val * *b_val);
2602           b_val++;
2603         }
2604       }
2605     }
2606   } else if (type == NORM_1) {
2607     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2608       for (jb=0; jb<bs; jb++) {
2609         for (ib=0; ib<bs; ib++) {
2610           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val);
2611           a_val++;
2612         }
2613       }
2614     }
2615     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2616       for (jb=0; jb<bs; jb++) {
2617        for (ib=0; ib<bs; ib++) {
2618           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val);
2619           b_val++;
2620         }
2621       }
2622     }
2623   } else if (type == NORM_INFINITY) {
2624     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2625       for (jb=0; jb<bs; jb++) {
2626         for (ib=0; ib<bs; ib++) {
2627           int col = A->cmap->rstart + a_aij->j[i] * bs + jb;
2628           work[col] = PetscMax(PetscAbsScalar(*a_val), work[col]);
2629           a_val++;
2630         }
2631       }
2632     }
2633     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2634       for (jb=0; jb<bs; jb++) {
2635         for (ib=0; ib<bs; ib++) {
2636           int col = garray[b_aij->j[i]] * bs + jb;
2637           work[col] = PetscMax(PetscAbsScalar(*b_val), work[col]);
2638           b_val++;
2639         }
2640       }
2641     }
2642   } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType");
2643   if (type == NORM_INFINITY) {
2644     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2645   } else {
2646     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2647   }
2648   ierr = PetscFree(work);CHKERRQ(ierr);
2649   if (type == NORM_2) {
2650     for (i=0; i<N; i++) norms[i] = PetscSqrtReal(norms[i]);
2651   }
2652   PetscFunctionReturn(0);
2653 }
2654 
2655 #undef __FUNCT__
2656 #define __FUNCT__ "MatInvertBlockDiagonal_MPIBAIJ"
2657 PetscErrorCode MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2658 {
2659   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2660   PetscErrorCode ierr;
2661 
2662   PetscFunctionBegin;
2663   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2664   A->errortype = a->A->errortype;
2665   PetscFunctionReturn(0);
2666 }
2667 
2668 #undef __FUNCT__
2669 #define __FUNCT__ "MatShift_MPIBAIJ"
2670 PetscErrorCode MatShift_MPIBAIJ(Mat Y,PetscScalar a)
2671 {
2672   PetscErrorCode ierr;
2673   Mat_MPIBAIJ    *maij = (Mat_MPIBAIJ*)Y->data;
2674   Mat_SeqBAIJ    *aij = (Mat_SeqBAIJ*)maij->A->data;
2675 
2676   PetscFunctionBegin;
2677   if (!Y->preallocated) {
2678     ierr = MatMPIBAIJSetPreallocation(Y,Y->rmap->bs,1,NULL,0,NULL);CHKERRQ(ierr);
2679   } else if (!aij->nz) {
2680     PetscInt nonew = aij->nonew;
2681     ierr = MatSeqBAIJSetPreallocation(maij->A,Y->rmap->bs,1,NULL);CHKERRQ(ierr);
2682     aij->nonew = nonew;
2683   }
2684   ierr = MatShift_Basic(Y,a);CHKERRQ(ierr);
2685   PetscFunctionReturn(0);
2686 }
2687 
2688 #undef __FUNCT__
2689 #define __FUNCT__ "MatMissingDiagonal_MPIBAIJ"
2690 PetscErrorCode MatMissingDiagonal_MPIBAIJ(Mat A,PetscBool  *missing,PetscInt *d)
2691 {
2692   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2693   PetscErrorCode ierr;
2694 
2695   PetscFunctionBegin;
2696   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only works for square matrices");
2697   ierr = MatMissingDiagonal(a->A,missing,d);CHKERRQ(ierr);
2698   if (d) {
2699     PetscInt rstart;
2700     ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
2701     *d += rstart/A->rmap->bs;
2702 
2703   }
2704   PetscFunctionReturn(0);
2705 }
2706 
2707 /* -------------------------------------------------------------------*/
2708 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2709                                        MatGetRow_MPIBAIJ,
2710                                        MatRestoreRow_MPIBAIJ,
2711                                        MatMult_MPIBAIJ,
2712                                 /* 4*/ MatMultAdd_MPIBAIJ,
2713                                        MatMultTranspose_MPIBAIJ,
2714                                        MatMultTransposeAdd_MPIBAIJ,
2715                                        0,
2716                                        0,
2717                                        0,
2718                                 /*10*/ 0,
2719                                        0,
2720                                        0,
2721                                        MatSOR_MPIBAIJ,
2722                                        MatTranspose_MPIBAIJ,
2723                                 /*15*/ MatGetInfo_MPIBAIJ,
2724                                        MatEqual_MPIBAIJ,
2725                                        MatGetDiagonal_MPIBAIJ,
2726                                        MatDiagonalScale_MPIBAIJ,
2727                                        MatNorm_MPIBAIJ,
2728                                 /*20*/ MatAssemblyBegin_MPIBAIJ,
2729                                        MatAssemblyEnd_MPIBAIJ,
2730                                        MatSetOption_MPIBAIJ,
2731                                        MatZeroEntries_MPIBAIJ,
2732                                 /*24*/ MatZeroRows_MPIBAIJ,
2733                                        0,
2734                                        0,
2735                                        0,
2736                                        0,
2737                                 /*29*/ MatSetUp_MPIBAIJ,
2738                                        0,
2739                                        0,
2740                                        0,
2741                                        0,
2742                                 /*34*/ MatDuplicate_MPIBAIJ,
2743                                        0,
2744                                        0,
2745                                        0,
2746                                        0,
2747                                 /*39*/ MatAXPY_MPIBAIJ,
2748                                        MatGetSubMatrices_MPIBAIJ,
2749                                        MatIncreaseOverlap_MPIBAIJ,
2750                                        MatGetValues_MPIBAIJ,
2751                                        MatCopy_MPIBAIJ,
2752                                 /*44*/ 0,
2753                                        MatScale_MPIBAIJ,
2754                                        MatShift_MPIBAIJ,
2755                                        0,
2756                                        MatZeroRowsColumns_MPIBAIJ,
2757                                 /*49*/ 0,
2758                                        0,
2759                                        0,
2760                                        0,
2761                                        0,
2762                                 /*54*/ MatFDColoringCreate_MPIXAIJ,
2763                                        0,
2764                                        MatSetUnfactored_MPIBAIJ,
2765                                        MatPermute_MPIBAIJ,
2766                                        MatSetValuesBlocked_MPIBAIJ,
2767                                 /*59*/ MatGetSubMatrix_MPIBAIJ,
2768                                        MatDestroy_MPIBAIJ,
2769                                        MatView_MPIBAIJ,
2770                                        0,
2771                                        0,
2772                                 /*64*/ 0,
2773                                        0,
2774                                        0,
2775                                        0,
2776                                        0,
2777                                 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2778                                        0,
2779                                        0,
2780                                        0,
2781                                        0,
2782                                 /*74*/ 0,
2783                                        MatFDColoringApply_BAIJ,
2784                                        0,
2785                                        0,
2786                                        0,
2787                                 /*79*/ 0,
2788                                        0,
2789                                        0,
2790                                        0,
2791                                        MatLoad_MPIBAIJ,
2792                                 /*84*/ 0,
2793                                        0,
2794                                        0,
2795                                        0,
2796                                        0,
2797                                 /*89*/ 0,
2798                                        0,
2799                                        0,
2800                                        0,
2801                                        0,
2802                                 /*94*/ 0,
2803                                        0,
2804                                        0,
2805                                        0,
2806                                        0,
2807                                 /*99*/ 0,
2808                                        0,
2809                                        0,
2810                                        0,
2811                                        0,
2812                                 /*104*/0,
2813                                        MatRealPart_MPIBAIJ,
2814                                        MatImaginaryPart_MPIBAIJ,
2815                                        0,
2816                                        0,
2817                                 /*109*/0,
2818                                        0,
2819                                        0,
2820                                        0,
2821                                        MatMissingDiagonal_MPIBAIJ,
2822                                 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2823                                        0,
2824                                        MatGetGhosts_MPIBAIJ,
2825                                        0,
2826                                        0,
2827                                 /*119*/0,
2828                                        0,
2829                                        0,
2830                                        0,
2831                                        MatGetMultiProcBlock_MPIBAIJ,
2832                                 /*124*/0,
2833                                        MatGetColumnNorms_MPIBAIJ,
2834                                        MatInvertBlockDiagonal_MPIBAIJ,
2835                                        0,
2836                                        0,
2837                                /*129*/ 0,
2838                                        0,
2839                                        0,
2840                                        0,
2841                                        0,
2842                                /*134*/ 0,
2843                                        0,
2844                                        0,
2845                                        0,
2846                                        0,
2847                                /*139*/ 0,
2848                                        0,
2849                                        0,
2850                                        MatFDColoringSetUp_MPIXAIJ,
2851                                        0,
2852                                 /*144*/MatCreateMPIMatConcatenateSeqMat_MPIBAIJ
2853 };
2854 
2855 #undef __FUNCT__
2856 #define __FUNCT__ "MatGetDiagonalBlock_MPIBAIJ"
2857 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2858 {
2859   PetscFunctionBegin;
2860   *a = ((Mat_MPIBAIJ*)A->data)->A;
2861   PetscFunctionReturn(0);
2862 }
2863 
2864 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2865 
2866 #undef __FUNCT__
2867 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR_MPIBAIJ"
2868 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2869 {
2870   PetscInt       m,rstart,cstart,cend;
2871   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2872   const PetscInt *JJ    =0;
2873   PetscScalar    *values=0;
2874   PetscBool      roworiented = ((Mat_MPIBAIJ*)B->data)->roworiented;
2875   PetscErrorCode ierr;
2876 
2877   PetscFunctionBegin;
2878   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2879   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2880   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2881   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2882   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2883   m      = B->rmap->n/bs;
2884   rstart = B->rmap->rstart/bs;
2885   cstart = B->cmap->rstart/bs;
2886   cend   = B->cmap->rend/bs;
2887 
2888   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2889   ierr = PetscMalloc2(m,&d_nnz,m,&o_nnz);CHKERRQ(ierr);
2890   for (i=0; i<m; i++) {
2891     nz = ii[i+1] - ii[i];
2892     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
2893     nz_max = PetscMax(nz_max,nz);
2894     JJ     = jj + ii[i];
2895     for (j=0; j<nz; j++) {
2896       if (*JJ >= cstart) break;
2897       JJ++;
2898     }
2899     d = 0;
2900     for (; j<nz; j++) {
2901       if (*JJ++ >= cend) break;
2902       d++;
2903     }
2904     d_nnz[i] = d;
2905     o_nnz[i] = nz - d;
2906   }
2907   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2908   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
2909 
2910   values = (PetscScalar*)V;
2911   if (!values) {
2912     ierr = PetscMalloc1(bs*bs*nz_max,&values);CHKERRQ(ierr);
2913     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2914   }
2915   for (i=0; i<m; i++) {
2916     PetscInt          row    = i + rstart;
2917     PetscInt          ncols  = ii[i+1] - ii[i];
2918     const PetscInt    *icols = jj + ii[i];
2919     if (!roworiented) {         /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2920       const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2921       ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2922     } else {                    /* block ordering does not match so we can only insert one block at a time. */
2923       PetscInt j;
2924       for (j=0; j<ncols; j++) {
2925         const PetscScalar *svals = values + (V ? (bs*bs*(ii[i]+j)) : 0);
2926         ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,1,&icols[j],svals,INSERT_VALUES);CHKERRQ(ierr);
2927       }
2928     }
2929   }
2930 
2931   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2932   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2933   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2934   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2935   PetscFunctionReturn(0);
2936 }
2937 
2938 #undef __FUNCT__
2939 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR"
2940 /*@C
2941    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2942    (the default parallel PETSc format).
2943 
2944    Collective on MPI_Comm
2945 
2946    Input Parameters:
2947 +  B - the matrix
2948 .  bs - the block size
2949 .  i - the indices into j for the start of each local row (starts with zero)
2950 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2951 -  v - optional values in the matrix
2952 
2953    Level: developer
2954 
2955    Notes: The order of the entries in values is specified by the MatOption MAT_ROW_ORIENTED.  For example, C programs
2956    may want to use the default MAT_ROW_ORIENTED=PETSC_TRUE and use an array v[nnz][bs][bs] where the second index is
2957    over rows within a block and the last index is over columns within a block row.  Fortran programs will likely set
2958    MAT_ROW_ORIENTED=PETSC_FALSE and use a Fortran array v(bs,bs,nnz) in which the first index is over rows within a
2959    block column and the second index is over columns within a block.
2960 
2961 .keywords: matrix, aij, compressed row, sparse, parallel
2962 
2963 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, MatCreateMPIBAIJWithArrays(), MPIBAIJ
2964 @*/
2965 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2966 {
2967   PetscErrorCode ierr;
2968 
2969   PetscFunctionBegin;
2970   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
2971   PetscValidType(B,1);
2972   PetscValidLogicalCollectiveInt(B,bs,2);
2973   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2974   PetscFunctionReturn(0);
2975 }
2976 
2977 #undef __FUNCT__
2978 #define __FUNCT__ "MatMPIBAIJSetPreallocation_MPIBAIJ"
2979 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
2980 {
2981   Mat_MPIBAIJ    *b;
2982   PetscErrorCode ierr;
2983   PetscInt       i;
2984 
2985   PetscFunctionBegin;
2986   ierr = MatSetBlockSize(B,PetscAbs(bs));CHKERRQ(ierr);
2987   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2988   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2989   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2990 
2991   if (d_nnz) {
2992     for (i=0; i<B->rmap->n/bs; i++) {
2993       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
2994     }
2995   }
2996   if (o_nnz) {
2997     for (i=0; i<B->rmap->n/bs; i++) {
2998       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
2999     }
3000   }
3001 
3002   b      = (Mat_MPIBAIJ*)B->data;
3003   b->bs2 = bs*bs;
3004   b->mbs = B->rmap->n/bs;
3005   b->nbs = B->cmap->n/bs;
3006   b->Mbs = B->rmap->N/bs;
3007   b->Nbs = B->cmap->N/bs;
3008 
3009   for (i=0; i<=b->size; i++) {
3010     b->rangebs[i] = B->rmap->range[i]/bs;
3011   }
3012   b->rstartbs = B->rmap->rstart/bs;
3013   b->rendbs   = B->rmap->rend/bs;
3014   b->cstartbs = B->cmap->rstart/bs;
3015   b->cendbs   = B->cmap->rend/bs;
3016 
3017   if (!B->preallocated) {
3018     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
3019     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
3020     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
3021     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
3022     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
3023     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
3024     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
3025     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
3026     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
3027   }
3028 
3029   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3030   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
3031   B->preallocated = PETSC_TRUE;
3032   PetscFunctionReturn(0);
3033 }
3034 
3035 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
3036 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
3037 
3038 #undef __FUNCT__
3039 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAdj"
3040 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
3041 {
3042   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
3043   PetscErrorCode ierr;
3044   Mat_SeqBAIJ    *d  = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
3045   PetscInt       M   = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
3046   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
3047 
3048   PetscFunctionBegin;
3049   ierr  = PetscMalloc1(M+1,&ii);CHKERRQ(ierr);
3050   ii[0] = 0;
3051   for (i=0; i<M; i++) {
3052     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
3053     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
3054     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
3055     /* remove one from count of matrix has diagonal */
3056     for (j=id[i]; j<id[i+1]; j++) {
3057       if (jd[j] == i) {ii[i+1]--;break;}
3058     }
3059   }
3060   ierr = PetscMalloc1(ii[M],&jj);CHKERRQ(ierr);
3061   cnt  = 0;
3062   for (i=0; i<M; i++) {
3063     for (j=io[i]; j<io[i+1]; j++) {
3064       if (garray[jo[j]] > rstart) break;
3065       jj[cnt++] = garray[jo[j]];
3066     }
3067     for (k=id[i]; k<id[i+1]; k++) {
3068       if (jd[k] != i) {
3069         jj[cnt++] = rstart + jd[k];
3070       }
3071     }
3072     for (; j<io[i+1]; j++) {
3073       jj[cnt++] = garray[jo[j]];
3074     }
3075   }
3076   ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)B),M,B->cmap->N/B->rmap->bs,ii,jj,NULL,adj);CHKERRQ(ierr);
3077   PetscFunctionReturn(0);
3078 }
3079 
3080 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3081 
3082 PETSC_INTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
3083 
3084 #undef __FUNCT__
3085 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAIJ"
3086 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
3087 {
3088   PetscErrorCode ierr;
3089   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
3090   Mat            B;
3091   Mat_MPIAIJ     *b;
3092 
3093   PetscFunctionBegin;
3094   if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix must be assembled");
3095 
3096   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
3097   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
3098   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
3099   ierr = MatSetBlockSizes(B,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr);
3100   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
3101   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
3102   b    = (Mat_MPIAIJ*) B->data;
3103 
3104   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
3105   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
3106   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
3107   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
3108   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
3109   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3110   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3111   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3112   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3113   if (reuse == MAT_INPLACE_MATRIX) {
3114     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
3115   } else {
3116    *newmat = B;
3117   }
3118   PetscFunctionReturn(0);
3119 }
3120 
3121 /*MC
3122    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
3123 
3124    Options Database Keys:
3125 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
3126 . -mat_block_size <bs> - set the blocksize used to store the matrix
3127 - -mat_use_hash_table <fact>
3128 
3129   Level: beginner
3130 
3131 .seealso: MatCreateMPIBAIJ
3132 M*/
3133 
3134 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
3135 
3136 #undef __FUNCT__
3137 #define __FUNCT__ "MatCreate_MPIBAIJ"
3138 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
3139 {
3140   Mat_MPIBAIJ    *b;
3141   PetscErrorCode ierr;
3142   PetscBool      flg = PETSC_FALSE;
3143 
3144   PetscFunctionBegin;
3145   ierr    = PetscNewLog(B,&b);CHKERRQ(ierr);
3146   B->data = (void*)b;
3147 
3148   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
3149   B->assembled = PETSC_FALSE;
3150 
3151   B->insertmode = NOT_SET_VALUES;
3152   ierr          = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
3153   ierr          = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
3154 
3155   /* build local table of row and column ownerships */
3156   ierr = PetscMalloc1(b->size+1,&b->rangebs);CHKERRQ(ierr);
3157 
3158   /* build cache for off array entries formed */
3159   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
3160 
3161   b->donotstash  = PETSC_FALSE;
3162   b->colmap      = NULL;
3163   b->garray      = NULL;
3164   b->roworiented = PETSC_TRUE;
3165 
3166   /* stuff used in block assembly */
3167   b->barray = 0;
3168 
3169   /* stuff used for matrix vector multiply */
3170   b->lvec  = 0;
3171   b->Mvctx = 0;
3172 
3173   /* stuff for MatGetRow() */
3174   b->rowindices   = 0;
3175   b->rowvalues    = 0;
3176   b->getrowactive = PETSC_FALSE;
3177 
3178   /* hash table stuff */
3179   b->ht           = 0;
3180   b->hd           = 0;
3181   b->ht_size      = 0;
3182   b->ht_flag      = PETSC_FALSE;
3183   b->ht_fact      = 0;
3184   b->ht_total_ct  = 0;
3185   b->ht_insert_ct = 0;
3186 
3187   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
3188   b->ijonly = PETSC_FALSE;
3189 
3190 
3191   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
3192   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
3193   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
3194   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
3195   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
3196   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPIBAIJ);CHKERRQ(ierr);
3197   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
3198   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
3199   ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
3200   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSetHashTableFactor_C",MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
3201   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
3202 
3203   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
3204   ierr = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",flg,&flg,NULL);CHKERRQ(ierr);
3205   if (flg) {
3206     PetscReal fact = 1.39;
3207     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
3208     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
3209     if (fact <= 1.0) fact = 1.39;
3210     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
3211     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
3212   }
3213   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3214   PetscFunctionReturn(0);
3215 }
3216 
3217 /*MC
3218    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
3219 
3220    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
3221    and MATMPIBAIJ otherwise.
3222 
3223    Options Database Keys:
3224 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
3225 
3226   Level: beginner
3227 
3228 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3229 M*/
3230 
3231 #undef __FUNCT__
3232 #define __FUNCT__ "MatMPIBAIJSetPreallocation"
3233 /*@C
3234    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
3235    (block compressed row).  For good matrix assembly performance
3236    the user should preallocate the matrix storage by setting the parameters
3237    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3238    performance can be increased by more than a factor of 50.
3239 
3240    Collective on Mat
3241 
3242    Input Parameters:
3243 +  B - the matrix
3244 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3245           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3246 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
3247            submatrix  (same for all local rows)
3248 .  d_nnz - array containing the number of block nonzeros in the various block rows
3249            of the in diagonal portion of the local (possibly different for each block
3250            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
3251            set it even if it is zero.
3252 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
3253            submatrix (same for all local rows).
3254 -  o_nnz - array containing the number of nonzeros in the various block rows of the
3255            off-diagonal portion of the local submatrix (possibly different for
3256            each block row) or NULL.
3257 
3258    If the *_nnz parameter is given then the *_nz parameter is ignored
3259 
3260    Options Database Keys:
3261 +   -mat_block_size - size of the blocks to use
3262 -   -mat_use_hash_table <fact>
3263 
3264    Notes:
3265    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3266    than it must be used on all processors that share the object for that argument.
3267 
3268    Storage Information:
3269    For a square global matrix we define each processor's diagonal portion
3270    to be its local rows and the corresponding columns (a square submatrix);
3271    each processor's off-diagonal portion encompasses the remainder of the
3272    local matrix (a rectangular submatrix).
3273 
3274    The user can specify preallocated storage for the diagonal part of
3275    the local submatrix with either d_nz or d_nnz (not both).  Set
3276    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3277    memory allocation.  Likewise, specify preallocated storage for the
3278    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3279 
3280    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3281    the figure below we depict these three local rows and all columns (0-11).
3282 
3283 .vb
3284            0 1 2 3 4 5 6 7 8 9 10 11
3285           --------------------------
3286    row 3  |o o o d d d o o o o  o  o
3287    row 4  |o o o d d d o o o o  o  o
3288    row 5  |o o o d d d o o o o  o  o
3289           --------------------------
3290 .ve
3291 
3292    Thus, any entries in the d locations are stored in the d (diagonal)
3293    submatrix, and any entries in the o locations are stored in the
3294    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3295    stored simply in the MATSEQBAIJ format for compressed row storage.
3296 
3297    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3298    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3299    In general, for PDE problems in which most nonzeros are near the diagonal,
3300    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3301    or you will get TERRIBLE performance; see the users' manual chapter on
3302    matrices.
3303 
3304    You can call MatGetInfo() to get information on how effective the preallocation was;
3305    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3306    You can also run with the option -info and look for messages with the string
3307    malloc in them to see if additional memory allocation was needed.
3308 
3309    Level: intermediate
3310 
3311 .keywords: matrix, block, aij, compressed row, sparse, parallel
3312 
3313 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3314 @*/
3315 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3316 {
3317   PetscErrorCode ierr;
3318 
3319   PetscFunctionBegin;
3320   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3321   PetscValidType(B,1);
3322   PetscValidLogicalCollectiveInt(B,bs,2);
3323   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3324   PetscFunctionReturn(0);
3325 }
3326 
3327 #undef __FUNCT__
3328 #define __FUNCT__ "MatCreateBAIJ"
3329 /*@C
3330    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3331    (block compressed row).  For good matrix assembly performance
3332    the user should preallocate the matrix storage by setting the parameters
3333    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3334    performance can be increased by more than a factor of 50.
3335 
3336    Collective on MPI_Comm
3337 
3338    Input Parameters:
3339 +  comm - MPI communicator
3340 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3341           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3342 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3343            This value should be the same as the local size used in creating the
3344            y vector for the matrix-vector product y = Ax.
3345 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3346            This value should be the same as the local size used in creating the
3347            x vector for the matrix-vector product y = Ax.
3348 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3349 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3350 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3351            submatrix  (same for all local rows)
3352 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3353            of the in diagonal portion of the local (possibly different for each block
3354            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3355            and set it even if it is zero.
3356 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3357            submatrix (same for all local rows).
3358 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3359            off-diagonal portion of the local submatrix (possibly different for
3360            each block row) or NULL.
3361 
3362    Output Parameter:
3363 .  A - the matrix
3364 
3365    Options Database Keys:
3366 +   -mat_block_size - size of the blocks to use
3367 -   -mat_use_hash_table <fact>
3368 
3369    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3370    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3371    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3372 
3373    Notes:
3374    If the *_nnz parameter is given then the *_nz parameter is ignored
3375 
3376    A nonzero block is any block that as 1 or more nonzeros in it
3377 
3378    The user MUST specify either the local or global matrix dimensions
3379    (possibly both).
3380 
3381    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3382    than it must be used on all processors that share the object for that argument.
3383 
3384    Storage Information:
3385    For a square global matrix we define each processor's diagonal portion
3386    to be its local rows and the corresponding columns (a square submatrix);
3387    each processor's off-diagonal portion encompasses the remainder of the
3388    local matrix (a rectangular submatrix).
3389 
3390    The user can specify preallocated storage for the diagonal part of
3391    the local submatrix with either d_nz or d_nnz (not both).  Set
3392    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3393    memory allocation.  Likewise, specify preallocated storage for the
3394    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3395 
3396    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3397    the figure below we depict these three local rows and all columns (0-11).
3398 
3399 .vb
3400            0 1 2 3 4 5 6 7 8 9 10 11
3401           --------------------------
3402    row 3  |o o o d d d o o o o  o  o
3403    row 4  |o o o d d d o o o o  o  o
3404    row 5  |o o o d d d o o o o  o  o
3405           --------------------------
3406 .ve
3407 
3408    Thus, any entries in the d locations are stored in the d (diagonal)
3409    submatrix, and any entries in the o locations are stored in the
3410    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3411    stored simply in the MATSEQBAIJ format for compressed row storage.
3412 
3413    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3414    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3415    In general, for PDE problems in which most nonzeros are near the diagonal,
3416    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3417    or you will get TERRIBLE performance; see the users' manual chapter on
3418    matrices.
3419 
3420    Level: intermediate
3421 
3422 .keywords: matrix, block, aij, compressed row, sparse, parallel
3423 
3424 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3425 @*/
3426 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3427 {
3428   PetscErrorCode ierr;
3429   PetscMPIInt    size;
3430 
3431   PetscFunctionBegin;
3432   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3433   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3434   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3435   if (size > 1) {
3436     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3437     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3438   } else {
3439     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3440     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3441   }
3442   PetscFunctionReturn(0);
3443 }
3444 
3445 #undef __FUNCT__
3446 #define __FUNCT__ "MatDuplicate_MPIBAIJ"
3447 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3448 {
3449   Mat            mat;
3450   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3451   PetscErrorCode ierr;
3452   PetscInt       len=0;
3453 
3454   PetscFunctionBegin;
3455   *newmat = 0;
3456   ierr    = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
3457   ierr    = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3458   ierr    = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3459   ierr    = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3460 
3461   mat->factortype   = matin->factortype;
3462   mat->preallocated = PETSC_TRUE;
3463   mat->assembled    = PETSC_TRUE;
3464   mat->insertmode   = NOT_SET_VALUES;
3465 
3466   a             = (Mat_MPIBAIJ*)mat->data;
3467   mat->rmap->bs = matin->rmap->bs;
3468   a->bs2        = oldmat->bs2;
3469   a->mbs        = oldmat->mbs;
3470   a->nbs        = oldmat->nbs;
3471   a->Mbs        = oldmat->Mbs;
3472   a->Nbs        = oldmat->Nbs;
3473 
3474   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3475   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3476 
3477   a->size         = oldmat->size;
3478   a->rank         = oldmat->rank;
3479   a->donotstash   = oldmat->donotstash;
3480   a->roworiented  = oldmat->roworiented;
3481   a->rowindices   = 0;
3482   a->rowvalues    = 0;
3483   a->getrowactive = PETSC_FALSE;
3484   a->barray       = 0;
3485   a->rstartbs     = oldmat->rstartbs;
3486   a->rendbs       = oldmat->rendbs;
3487   a->cstartbs     = oldmat->cstartbs;
3488   a->cendbs       = oldmat->cendbs;
3489 
3490   /* hash table stuff */
3491   a->ht           = 0;
3492   a->hd           = 0;
3493   a->ht_size      = 0;
3494   a->ht_flag      = oldmat->ht_flag;
3495   a->ht_fact      = oldmat->ht_fact;
3496   a->ht_total_ct  = 0;
3497   a->ht_insert_ct = 0;
3498 
3499   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3500   if (oldmat->colmap) {
3501 #if defined(PETSC_USE_CTABLE)
3502     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3503 #else
3504     ierr = PetscMalloc1(a->Nbs,&a->colmap);CHKERRQ(ierr);
3505     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3506     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3507 #endif
3508   } else a->colmap = 0;
3509 
3510   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3511     ierr = PetscMalloc1(len,&a->garray);CHKERRQ(ierr);
3512     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3513     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3514   } else a->garray = 0;
3515 
3516   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3517   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3518   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
3519   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3520   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
3521 
3522   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3523   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
3524   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3525   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
3526   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3527   *newmat = mat;
3528   PetscFunctionReturn(0);
3529 }
3530 
3531 #undef __FUNCT__
3532 #define __FUNCT__ "MatLoad_MPIBAIJ"
3533 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3534 {
3535   PetscErrorCode ierr;
3536   int            fd;
3537   PetscInt       i,nz,j,rstart,rend;
3538   PetscScalar    *vals,*buf;
3539   MPI_Comm       comm;
3540   MPI_Status     status;
3541   PetscMPIInt    rank,size,maxnz;
3542   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3543   PetscInt       *locrowlens = NULL,*procsnz = NULL,*browners = NULL;
3544   PetscInt       jj,*mycols,*ibuf,bs = newmat->rmap->bs,Mbs,mbs,extra_rows,mmax;
3545   PetscMPIInt    tag    = ((PetscObject)viewer)->tag;
3546   PetscInt       *dlens = NULL,*odlens = NULL,*mask = NULL,*masked1 = NULL,*masked2 = NULL,rowcount,odcount;
3547   PetscInt       dcount,kmax,k,nzcount,tmp,mend;
3548 
3549   PetscFunctionBegin;
3550   /* force binary viewer to load .info file if it has not yet done so */
3551   ierr = PetscViewerSetUp(viewer);CHKERRQ(ierr);
3552   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
3553   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3554   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
3555   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3556   if (bs < 0) bs = 1;
3557 
3558   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3559   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3560   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3561   if (!rank) {
3562     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
3563     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3564   }
3565   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3566   M    = header[1]; N = header[2];
3567 
3568   /* If global sizes are set, check if they are consistent with that given in the file */
3569   if (newmat->rmap->N >= 0 && newmat->rmap->N != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of rows:Matrix in file has (%D) and input matrix has (%D)",newmat->rmap->N,M);
3570   if (newmat->cmap->N >= 0 && newmat->cmap->N != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of cols:Matrix in file has (%D) and input matrix has (%D)",newmat->cmap->N,N);
3571 
3572   if (M != N) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Can only do square matrices");
3573 
3574   /*
3575      This code adds extra rows to make sure the number of rows is
3576      divisible by the blocksize
3577   */
3578   Mbs        = M/bs;
3579   extra_rows = bs - M + bs*Mbs;
3580   if (extra_rows == bs) extra_rows = 0;
3581   else                  Mbs++;
3582   if (extra_rows && !rank) {
3583     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3584   }
3585 
3586   /* determine ownership of all rows */
3587   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3588     mbs = Mbs/size + ((Mbs % size) > rank);
3589     m   = mbs*bs;
3590   } else { /* User set */
3591     m   = newmat->rmap->n;
3592     mbs = m/bs;
3593   }
3594   ierr = PetscMalloc2(size+1,&rowners,size+1,&browners);CHKERRQ(ierr);
3595   ierr = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3596 
3597   /* process 0 needs enough room for process with most rows */
3598   if (!rank) {
3599     mmax = rowners[1];
3600     for (i=2; i<=size; i++) {
3601       mmax = PetscMax(mmax,rowners[i]);
3602     }
3603     mmax*=bs;
3604   } else mmax = -1;             /* unused, but compiler warns anyway */
3605 
3606   rowners[0] = 0;
3607   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
3608   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
3609   rstart = rowners[rank];
3610   rend   = rowners[rank+1];
3611 
3612   /* distribute row lengths to all processors */
3613   ierr = PetscMalloc1(m,&locrowlens);CHKERRQ(ierr);
3614   if (!rank) {
3615     mend = m;
3616     if (size == 1) mend = mend - extra_rows;
3617     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3618     for (j=mend; j<m; j++) locrowlens[j] = 1;
3619     ierr = PetscMalloc1(mmax,&rowlengths);CHKERRQ(ierr);
3620     ierr = PetscCalloc1(size,&procsnz);CHKERRQ(ierr);
3621     for (j=0; j<m; j++) {
3622       procsnz[0] += locrowlens[j];
3623     }
3624     for (i=1; i<size; i++) {
3625       mend = browners[i+1] - browners[i];
3626       if (i == size-1) mend = mend - extra_rows;
3627       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3628       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3629       /* calculate the number of nonzeros on each processor */
3630       for (j=0; j<browners[i+1]-browners[i]; j++) {
3631         procsnz[i] += rowlengths[j];
3632       }
3633       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3634     }
3635     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3636   } else {
3637     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3638   }
3639 
3640   if (!rank) {
3641     /* determine max buffer needed and allocate it */
3642     maxnz = procsnz[0];
3643     for (i=1; i<size; i++) {
3644       maxnz = PetscMax(maxnz,procsnz[i]);
3645     }
3646     ierr = PetscMalloc1(maxnz,&cols);CHKERRQ(ierr);
3647 
3648     /* read in my part of the matrix column indices  */
3649     nz     = procsnz[0];
3650     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3651     mycols = ibuf;
3652     if (size == 1) nz -= extra_rows;
3653     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3654     if (size == 1) {
3655       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
3656     }
3657 
3658     /* read in every ones (except the last) and ship off */
3659     for (i=1; i<size-1; i++) {
3660       nz   = procsnz[i];
3661       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3662       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3663     }
3664     /* read in the stuff for the last proc */
3665     if (size != 1) {
3666       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3667       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3668       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3669       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3670     }
3671     ierr = PetscFree(cols);CHKERRQ(ierr);
3672   } else {
3673     /* determine buffer space needed for message */
3674     nz = 0;
3675     for (i=0; i<m; i++) {
3676       nz += locrowlens[i];
3677     }
3678     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3679     mycols = ibuf;
3680     /* receive message of column indices*/
3681     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3682     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3683     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3684   }
3685 
3686   /* loop over local rows, determining number of off diagonal entries */
3687   ierr     = PetscMalloc2(rend-rstart,&dlens,rend-rstart,&odlens);CHKERRQ(ierr);
3688   ierr     = PetscCalloc3(Mbs,&mask,Mbs,&masked1,Mbs,&masked2);CHKERRQ(ierr);
3689   rowcount = 0; nzcount = 0;
3690   for (i=0; i<mbs; i++) {
3691     dcount  = 0;
3692     odcount = 0;
3693     for (j=0; j<bs; j++) {
3694       kmax = locrowlens[rowcount];
3695       for (k=0; k<kmax; k++) {
3696         tmp = mycols[nzcount++]/bs;
3697         if (!mask[tmp]) {
3698           mask[tmp] = 1;
3699           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3700           else masked1[dcount++] = tmp;
3701         }
3702       }
3703       rowcount++;
3704     }
3705 
3706     dlens[i]  = dcount;
3707     odlens[i] = odcount;
3708 
3709     /* zero out the mask elements we set */
3710     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3711     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3712   }
3713 
3714   ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3715   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3716 
3717   if (!rank) {
3718     ierr = PetscMalloc1(maxnz+1,&buf);CHKERRQ(ierr);
3719     /* read in my part of the matrix numerical values  */
3720     nz     = procsnz[0];
3721     vals   = buf;
3722     mycols = ibuf;
3723     if (size == 1) nz -= extra_rows;
3724     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3725     if (size == 1) {
3726       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
3727     }
3728 
3729     /* insert into matrix */
3730     jj = rstart*bs;
3731     for (i=0; i<m; i++) {
3732       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3733       mycols += locrowlens[i];
3734       vals   += locrowlens[i];
3735       jj++;
3736     }
3737     /* read in other processors (except the last one) and ship out */
3738     for (i=1; i<size-1; i++) {
3739       nz   = procsnz[i];
3740       vals = buf;
3741       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3742       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3743     }
3744     /* the last proc */
3745     if (size != 1) {
3746       nz   = procsnz[i] - extra_rows;
3747       vals = buf;
3748       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3749       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3750       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3751     }
3752     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3753   } else {
3754     /* receive numeric values */
3755     ierr = PetscMalloc1(nz+1,&buf);CHKERRQ(ierr);
3756 
3757     /* receive message of values*/
3758     vals   = buf;
3759     mycols = ibuf;
3760     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3761 
3762     /* insert into matrix */
3763     jj = rstart*bs;
3764     for (i=0; i<m; i++) {
3765       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3766       mycols += locrowlens[i];
3767       vals   += locrowlens[i];
3768       jj++;
3769     }
3770   }
3771   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3772   ierr = PetscFree(buf);CHKERRQ(ierr);
3773   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3774   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3775   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3776   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3777   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3778   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3779   PetscFunctionReturn(0);
3780 }
3781 
3782 #undef __FUNCT__
3783 #define __FUNCT__ "MatMPIBAIJSetHashTableFactor"
3784 /*@
3785    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3786 
3787    Input Parameters:
3788 .  mat  - the matrix
3789 .  fact - factor
3790 
3791    Not Collective, each process can use a different factor
3792 
3793    Level: advanced
3794 
3795   Notes:
3796    This can also be set by the command line option: -mat_use_hash_table <fact>
3797 
3798 .keywords: matrix, hashtable, factor, HT
3799 
3800 .seealso: MatSetOption()
3801 @*/
3802 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3803 {
3804   PetscErrorCode ierr;
3805 
3806   PetscFunctionBegin;
3807   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3808   PetscFunctionReturn(0);
3809 }
3810 
3811 #undef __FUNCT__
3812 #define __FUNCT__ "MatSetHashTableFactor_MPIBAIJ"
3813 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3814 {
3815   Mat_MPIBAIJ *baij;
3816 
3817   PetscFunctionBegin;
3818   baij          = (Mat_MPIBAIJ*)mat->data;
3819   baij->ht_fact = fact;
3820   PetscFunctionReturn(0);
3821 }
3822 
3823 #undef __FUNCT__
3824 #define __FUNCT__ "MatMPIBAIJGetSeqBAIJ"
3825 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3826 {
3827   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
3828 
3829   PetscFunctionBegin;
3830   if (Ad)     *Ad     = a->A;
3831   if (Ao)     *Ao     = a->B;
3832   if (colmap) *colmap = a->garray;
3833   PetscFunctionReturn(0);
3834 }
3835 
3836 /*
3837     Special version for direct calls from Fortran (to eliminate two function call overheads
3838 */
3839 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3840 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3841 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3842 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3843 #endif
3844 
3845 #undef __FUNCT__
3846 #define __FUNCT__ "matmpibiajsetvaluesblocked"
3847 /*@C
3848   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3849 
3850   Collective on Mat
3851 
3852   Input Parameters:
3853 + mat - the matrix
3854 . min - number of input rows
3855 . im - input rows
3856 . nin - number of input columns
3857 . in - input columns
3858 . v - numerical values input
3859 - addvin - INSERT_VALUES or ADD_VALUES
3860 
3861   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3862 
3863   Level: advanced
3864 
3865 .seealso:   MatSetValuesBlocked()
3866 @*/
3867 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3868 {
3869   /* convert input arguments to C version */
3870   Mat        mat  = *matin;
3871   PetscInt   m    = *min, n = *nin;
3872   InsertMode addv = *addvin;
3873 
3874   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
3875   const MatScalar *value;
3876   MatScalar       *barray     = baij->barray;
3877   PetscBool       roworiented = baij->roworiented;
3878   PetscErrorCode  ierr;
3879   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
3880   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
3881   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
3882 
3883   PetscFunctionBegin;
3884   /* tasks normally handled by MatSetValuesBlocked() */
3885   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
3886 #if defined(PETSC_USE_DEBUG)
3887   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
3888   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3889 #endif
3890   if (mat->assembled) {
3891     mat->was_assembled = PETSC_TRUE;
3892     mat->assembled     = PETSC_FALSE;
3893   }
3894   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3895 
3896 
3897   if (!barray) {
3898     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
3899     baij->barray = barray;
3900   }
3901 
3902   if (roworiented) stepval = (n-1)*bs;
3903   else stepval = (m-1)*bs;
3904 
3905   for (i=0; i<m; i++) {
3906     if (im[i] < 0) continue;
3907 #if defined(PETSC_USE_DEBUG)
3908     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
3909 #endif
3910     if (im[i] >= rstart && im[i] < rend) {
3911       row = im[i] - rstart;
3912       for (j=0; j<n; j++) {
3913         /* If NumCol = 1 then a copy is not required */
3914         if ((roworiented) && (n == 1)) {
3915           barray = (MatScalar*)v + i*bs2;
3916         } else if ((!roworiented) && (m == 1)) {
3917           barray = (MatScalar*)v + j*bs2;
3918         } else { /* Here a copy is required */
3919           if (roworiented) {
3920             value = v + i*(stepval+bs)*bs + j*bs;
3921           } else {
3922             value = v + j*(stepval+bs)*bs + i*bs;
3923           }
3924           for (ii=0; ii<bs; ii++,value+=stepval) {
3925             for (jj=0; jj<bs; jj++) {
3926               *barray++ = *value++;
3927             }
3928           }
3929           barray -=bs2;
3930         }
3931 
3932         if (in[j] >= cstart && in[j] < cend) {
3933           col  = in[j] - cstart;
3934           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3935         } else if (in[j] < 0) continue;
3936 #if defined(PETSC_USE_DEBUG)
3937         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
3938 #endif
3939         else {
3940           if (mat->was_assembled) {
3941             if (!baij->colmap) {
3942               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
3943             }
3944 
3945 #if defined(PETSC_USE_DEBUG)
3946 #if defined(PETSC_USE_CTABLE)
3947             { PetscInt data;
3948               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
3949               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3950             }
3951 #else
3952             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3953 #endif
3954 #endif
3955 #if defined(PETSC_USE_CTABLE)
3956             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
3957             col  = (col - 1)/bs;
3958 #else
3959             col = (baij->colmap[in[j]] - 1)/bs;
3960 #endif
3961             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
3962               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
3963               col  =  in[j];
3964             }
3965           } else col = in[j];
3966           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3967         }
3968       }
3969     } else {
3970       if (!baij->donotstash) {
3971         if (roworiented) {
3972           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3973         } else {
3974           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3975         }
3976       }
3977     }
3978   }
3979 
3980   /* task normally handled by MatSetValuesBlocked() */
3981   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3982   PetscFunctionReturn(0);
3983 }
3984 
3985 #undef __FUNCT__
3986 #define __FUNCT__ "MatCreateMPIBAIJWithArrays"
3987 /*@
3988      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
3989          CSR format the local rows.
3990 
3991    Collective on MPI_Comm
3992 
3993    Input Parameters:
3994 +  comm - MPI communicator
3995 .  bs - the block size, only a block size of 1 is supported
3996 .  m - number of local rows (Cannot be PETSC_DECIDE)
3997 .  n - This value should be the same as the local size used in creating the
3998        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3999        calculated if N is given) For square matrices n is almost always m.
4000 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
4001 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
4002 .   i - row indices
4003 .   j - column indices
4004 -   a - matrix values
4005 
4006    Output Parameter:
4007 .   mat - the matrix
4008 
4009    Level: intermediate
4010 
4011    Notes:
4012        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
4013      thus you CANNOT change the matrix entries by changing the values of a[] after you have
4014      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
4015 
4016      The order of the entries in values is the same as the block compressed sparse row storage format; that is, it is
4017      the same as a three dimensional array in Fortran values(bs,bs,nnz) that contains the first column of the first
4018      block, followed by the second column of the first block etc etc.  That is, the blocks are contiguous in memory
4019      with column-major ordering within blocks.
4020 
4021        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
4022 
4023 .keywords: matrix, aij, compressed row, sparse, parallel
4024 
4025 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
4026           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
4027 @*/
4028 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
4029 {
4030   PetscErrorCode ierr;
4031 
4032   PetscFunctionBegin;
4033   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
4034   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
4035   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
4036   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
4037   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
4038   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
4039   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
4040   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
4041   PetscFunctionReturn(0);
4042 }
4043 
4044 #undef __FUNCT__
4045 #define __FUNCT__ "MatCreateMPIMatConcatenateSeqMat_MPIBAIJ"
4046 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPIBAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
4047 {
4048   PetscErrorCode ierr;
4049   PetscInt       m,N,i,rstart,nnz,Ii,bs,cbs;
4050   PetscInt       *indx;
4051   PetscScalar    *values;
4052 
4053   PetscFunctionBegin;
4054   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
4055   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
4056     Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)inmat->data;
4057     PetscInt       *dnz,*onz,sum,mbs,Nbs;
4058     PetscInt       *bindx,rmax=a->rmax,j;
4059 
4060     ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4061     mbs = m/bs; Nbs = N/cbs;
4062     if (n == PETSC_DECIDE) {
4063       ierr = PetscSplitOwnership(comm,&n,&Nbs);CHKERRQ(ierr);
4064     }
4065     /* Check sum(n) = Nbs */
4066     ierr = MPIU_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4067     if (sum != Nbs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",Nbs);
4068 
4069     ierr    = MPI_Scan(&mbs, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4070     rstart -= mbs;
4071 
4072     ierr = PetscMalloc1(rmax,&bindx);CHKERRQ(ierr);
4073     ierr = MatPreallocateInitialize(comm,mbs,n,dnz,onz);CHKERRQ(ierr);
4074     for (i=0; i<mbs; i++) {
4075       ierr = MatGetRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr); /* non-blocked nnz and indx */
4076       nnz = nnz/bs;
4077       for (j=0; j<nnz; j++) bindx[j] = indx[j*bs]/bs;
4078       ierr = MatPreallocateSet(i+rstart,nnz,bindx,dnz,onz);CHKERRQ(ierr);
4079       ierr = MatRestoreRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr);
4080     }
4081     ierr = PetscFree(bindx);CHKERRQ(ierr);
4082 
4083     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
4084     ierr = MatSetSizes(*outmat,m,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
4085     ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr);
4086     ierr = MatSetType(*outmat,MATMPIBAIJ);CHKERRQ(ierr);
4087     ierr = MatMPIBAIJSetPreallocation(*outmat,bs,0,dnz,0,onz);CHKERRQ(ierr);
4088     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
4089   }
4090 
4091   /* numeric phase */
4092   ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4093   ierr = MatGetOwnershipRange(*outmat,&rstart,NULL);CHKERRQ(ierr);
4094 
4095   for (i=0; i<m; i++) {
4096     ierr = MatGetRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4097     Ii   = i + rstart;
4098     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
4099     ierr = MatRestoreRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4100   }
4101   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4102   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4103   PetscFunctionReturn(0);
4104 }
4105