xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision 101da5f51f3cf6340ec2fbcb4e4bcb2c811922e6)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 
4 #include <petscblaslapack.h>
5 #include <petscsf.h>
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatGetRowMaxAbs_MPIBAIJ"
9 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
10 {
11   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
12   PetscErrorCode ierr;
13   PetscInt       i,*idxb = 0;
14   PetscScalar    *va,*vb;
15   Vec            vtmp;
16 
17   PetscFunctionBegin;
18   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
19   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
20   if (idx) {
21     for (i=0; i<A->rmap->n; i++) {
22       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
23     }
24   }
25 
26   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
27   if (idx) {ierr = PetscMalloc1(A->rmap->n,&idxb);CHKERRQ(ierr);}
28   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
29   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
30 
31   for (i=0; i<A->rmap->n; i++) {
32     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
33       va[i] = vb[i];
34       if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);
35     }
36   }
37 
38   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
39   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
40   ierr = PetscFree(idxb);CHKERRQ(ierr);
41   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "MatStoreValues_MPIBAIJ"
47 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
48 {
49   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
50   PetscErrorCode ierr;
51 
52   PetscFunctionBegin;
53   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
54   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatRetrieveValues_MPIBAIJ"
60 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
61 {
62   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
67   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
68   PetscFunctionReturn(0);
69 }
70 
71 /*
72      Local utility routine that creates a mapping from the global column
73    number to the local number in the off-diagonal part of the local
74    storage of the matrix.  This is done in a non scalable way since the
75    length of colmap equals the global matrix length.
76 */
77 #undef __FUNCT__
78 #define __FUNCT__ "MatCreateColmap_MPIBAIJ_Private"
79 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
80 {
81   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
82   Mat_SeqBAIJ    *B    = (Mat_SeqBAIJ*)baij->B->data;
83   PetscErrorCode ierr;
84   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
85 
86   PetscFunctionBegin;
87 #if defined(PETSC_USE_CTABLE)
88   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
89   for (i=0; i<nbs; i++) {
90     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
91   }
92 #else
93   ierr = PetscMalloc1(baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
94   ierr = PetscLogObjectMemory((PetscObject)mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
95   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
96   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
97 #endif
98   PetscFunctionReturn(0);
99 }
100 
101 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,orow,ocol)       \
102   { \
103  \
104     brow = row/bs;  \
105     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
106     rmax = aimax[brow]; nrow = ailen[brow]; \
107     bcol = col/bs; \
108     ridx = row % bs; cidx = col % bs; \
109     low  = 0; high = nrow; \
110     while (high-low > 3) { \
111       t = (low+high)/2; \
112       if (rp[t] > bcol) high = t; \
113       else              low  = t; \
114     } \
115     for (_i=low; _i<high; _i++) { \
116       if (rp[_i] > bcol) break; \
117       if (rp[_i] == bcol) { \
118         bap = ap +  bs2*_i + bs*cidx + ridx; \
119         if (addv == ADD_VALUES) *bap += value;  \
120         else                    *bap  = value;  \
121         goto a_noinsert; \
122       } \
123     } \
124     if (a->nonew == 1) goto a_noinsert; \
125     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column (%D, %D) into matrix", orow, ocol); \
126     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
127     N = nrow++ - 1;  \
128     /* shift up all the later entries in this row */ \
129     for (ii=N; ii>=_i; ii--) { \
130       rp[ii+1] = rp[ii]; \
131       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
132     } \
133     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
134     rp[_i]                      = bcol;  \
135     ap[bs2*_i + bs*cidx + ridx] = value;  \
136 a_noinsert:; \
137     ailen[brow] = nrow; \
138   }
139 
140 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,orow,ocol)       \
141   { \
142     brow = row/bs;  \
143     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
144     rmax = bimax[brow]; nrow = bilen[brow]; \
145     bcol = col/bs; \
146     ridx = row % bs; cidx = col % bs; \
147     low  = 0; high = nrow; \
148     while (high-low > 3) { \
149       t = (low+high)/2; \
150       if (rp[t] > bcol) high = t; \
151       else              low  = t; \
152     } \
153     for (_i=low; _i<high; _i++) { \
154       if (rp[_i] > bcol) break; \
155       if (rp[_i] == bcol) { \
156         bap = ap +  bs2*_i + bs*cidx + ridx; \
157         if (addv == ADD_VALUES) *bap += value;  \
158         else                    *bap  = value;  \
159         goto b_noinsert; \
160       } \
161     } \
162     if (b->nonew == 1) goto b_noinsert; \
163     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column  (%D, %D) into matrix", orow, ocol); \
164     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
165     N = nrow++ - 1;  \
166     /* shift up all the later entries in this row */ \
167     for (ii=N; ii>=_i; ii--) { \
168       rp[ii+1] = rp[ii]; \
169       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
170     } \
171     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
172     rp[_i]                      = bcol;  \
173     ap[bs2*_i + bs*cidx + ridx] = value;  \
174 b_noinsert:; \
175     bilen[brow] = nrow; \
176   }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "MatSetValues_MPIBAIJ"
180 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
181 {
182   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
183   MatScalar      value;
184   PetscBool      roworiented = baij->roworiented;
185   PetscErrorCode ierr;
186   PetscInt       i,j,row,col;
187   PetscInt       rstart_orig=mat->rmap->rstart;
188   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
189   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
190 
191   /* Some Variables required in the macro */
192   Mat         A     = baij->A;
193   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ*)(A)->data;
194   PetscInt    *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
195   MatScalar   *aa   =a->a;
196 
197   Mat         B     = baij->B;
198   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
199   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
200   MatScalar   *ba   =b->a;
201 
202   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
203   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
204   MatScalar *ap,*bap;
205 
206   PetscFunctionBegin;
207   for (i=0; i<m; i++) {
208     if (im[i] < 0) continue;
209 #if defined(PETSC_USE_DEBUG)
210     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
211 #endif
212     if (im[i] >= rstart_orig && im[i] < rend_orig) {
213       row = im[i] - rstart_orig;
214       for (j=0; j<n; j++) {
215         if (in[j] >= cstart_orig && in[j] < cend_orig) {
216           col = in[j] - cstart_orig;
217           if (roworiented) value = v[i*n+j];
218           else             value = v[i+j*m];
219           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,im[i],in[j]);
220           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
221         } else if (in[j] < 0) continue;
222 #if defined(PETSC_USE_DEBUG)
223         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
224 #endif
225         else {
226           if (mat->was_assembled) {
227             if (!baij->colmap) {
228               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
229             }
230 #if defined(PETSC_USE_CTABLE)
231             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
232             col  = col - 1;
233 #else
234             col = baij->colmap[in[j]/bs] - 1;
235 #endif
236             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
237               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
238               col  =  in[j];
239               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
240               B    = baij->B;
241               b    = (Mat_SeqBAIJ*)(B)->data;
242               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
243               ba   =b->a;
244             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
245             else col += in[j]%bs;
246           } else col = in[j];
247           if (roworiented) value = v[i*n+j];
248           else             value = v[i+j*m];
249           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,im[i],in[j]);
250           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
251         }
252       }
253     } else {
254       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
255       if (!baij->donotstash) {
256         mat->assembled = PETSC_FALSE;
257         if (roworiented) {
258           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
259         } else {
260           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
261         }
262       }
263     }
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "MatSetValuesBlocked_SeqBAIJ_Inlined"
270 PETSC_STATIC_INLINE PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A,PetscInt row,PetscInt col,const PetscScalar v[],InsertMode is,PetscInt orow,PetscInt ocol)
271 {
272   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ*)A->data;
273   PetscInt          *rp,low,high,t,ii,jj,nrow,i,rmax,N;
274   PetscInt          *imax=a->imax,*ai=a->i,*ailen=a->ilen;
275   PetscErrorCode    ierr;
276   PetscInt          *aj        =a->j,nonew=a->nonew,bs2=a->bs2,bs=A->rmap->bs;
277   PetscBool         roworiented=a->roworiented;
278   const PetscScalar *value     = v;
279   MatScalar         *ap,*aa = a->a,*bap;
280 
281   PetscFunctionBegin;
282   rp   = aj + ai[row];
283   ap   = aa + bs2*ai[row];
284   rmax = imax[row];
285   nrow = ailen[row];
286   value = v;
287   low = 0;
288   high = nrow;
289   while (high-low > 7) {
290     t = (low+high)/2;
291     if (rp[t] > col) high = t;
292     else             low  = t;
293   }
294   for (i=low; i<high; i++) {
295     if (rp[i] > col) break;
296     if (rp[i] == col) {
297       bap = ap +  bs2*i;
298       if (roworiented) {
299         if (is == ADD_VALUES) {
300           for (ii=0; ii<bs; ii++) {
301             for (jj=ii; jj<bs2; jj+=bs) {
302               bap[jj] += *value++;
303             }
304           }
305         } else {
306           for (ii=0; ii<bs; ii++) {
307             for (jj=ii; jj<bs2; jj+=bs) {
308               bap[jj] = *value++;
309             }
310           }
311         }
312       } else {
313         if (is == ADD_VALUES) {
314           for (ii=0; ii<bs; ii++,value+=bs) {
315             for (jj=0; jj<bs; jj++) {
316               bap[jj] += value[jj];
317             }
318             bap += bs;
319           }
320         } else {
321           for (ii=0; ii<bs; ii++,value+=bs) {
322             for (jj=0; jj<bs; jj++) {
323               bap[jj]  = value[jj];
324             }
325             bap += bs;
326           }
327         }
328       }
329       goto noinsert2;
330     }
331   }
332   if (nonew == 1) goto noinsert2;
333   if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new global block indexed nonzero block (%D, %D) in the matrix", orow, ocol);
334   MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,row,col,rmax,aa,ai,aj,rp,ap,imax,nonew,MatScalar);
335   N = nrow++ - 1; high++;
336   /* shift up all the later entries in this row */
337   for (ii=N; ii>=i; ii--) {
338     rp[ii+1] = rp[ii];
339     ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr);
340   }
341   if (N >= i) {
342     ierr = PetscMemzero(ap+bs2*i,bs2*sizeof(MatScalar));CHKERRQ(ierr);
343   }
344   rp[i] = col;
345   bap   = ap +  bs2*i;
346   if (roworiented) {
347     for (ii=0; ii<bs; ii++) {
348       for (jj=ii; jj<bs2; jj+=bs) {
349         bap[jj] = *value++;
350       }
351     }
352   } else {
353     for (ii=0; ii<bs; ii++) {
354       for (jj=0; jj<bs; jj++) {
355         *bap++ = *value++;
356       }
357     }
358   }
359   noinsert2:;
360   ailen[row] = nrow;
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ"
366 /*
367     This routine should be optimized so that the block copy at ** Here a copy is required ** below is not needed
368     by passing additional stride information into the MatSetValuesBlocked_SeqBAIJ_Inlined() routine
369 */
370 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
371 {
372   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
373   const PetscScalar *value;
374   MatScalar         *barray     = baij->barray;
375   PetscBool         roworiented = baij->roworiented;
376   PetscErrorCode    ierr;
377   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
378   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
379   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
380 
381   PetscFunctionBegin;
382   if (!barray) {
383     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
384     baij->barray = barray;
385   }
386 
387   if (roworiented) stepval = (n-1)*bs;
388   else stepval = (m-1)*bs;
389 
390   for (i=0; i<m; i++) {
391     if (im[i] < 0) continue;
392 #if defined(PETSC_USE_DEBUG)
393     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed row too large %D max %D",im[i],baij->Mbs-1);
394 #endif
395     if (im[i] >= rstart && im[i] < rend) {
396       row = im[i] - rstart;
397       for (j=0; j<n; j++) {
398         /* If NumCol = 1 then a copy is not required */
399         if ((roworiented) && (n == 1)) {
400           barray = (MatScalar*)v + i*bs2;
401         } else if ((!roworiented) && (m == 1)) {
402           barray = (MatScalar*)v + j*bs2;
403         } else { /* Here a copy is required */
404           if (roworiented) {
405             value = v + (i*(stepval+bs) + j)*bs;
406           } else {
407             value = v + (j*(stepval+bs) + i)*bs;
408           }
409           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
410             for (jj=0; jj<bs; jj++) barray[jj] = value[jj];
411             barray += bs;
412           }
413           barray -= bs2;
414         }
415 
416         if (in[j] >= cstart && in[j] < cend) {
417           col  = in[j] - cstart;
418           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
419         } else if (in[j] < 0) continue;
420 #if defined(PETSC_USE_DEBUG)
421         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed column too large %D max %D",in[j],baij->Nbs-1);
422 #endif
423         else {
424           if (mat->was_assembled) {
425             if (!baij->colmap) {
426               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
427             }
428 
429 #if defined(PETSC_USE_DEBUG)
430 #if defined(PETSC_USE_CTABLE)
431             { PetscInt data;
432               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
433               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
434             }
435 #else
436             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
437 #endif
438 #endif
439 #if defined(PETSC_USE_CTABLE)
440             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
441             col  = (col - 1)/bs;
442 #else
443             col = (baij->colmap[in[j]] - 1)/bs;
444 #endif
445             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
446               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
447               col  =  in[j];
448             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new blocked indexed nonzero block (%D, %D) into matrix",im[i],in[j]);
449           } else col = in[j];
450           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
451         }
452       }
453     } else {
454       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process block indexed row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
455       if (!baij->donotstash) {
456         if (roworiented) {
457           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
458         } else {
459           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
460         }
461       }
462     }
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #define HASH_KEY 0.6180339887
468 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
469 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
470 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
471 #undef __FUNCT__
472 #define __FUNCT__ "MatSetValues_MPIBAIJ_HT"
473 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
474 {
475   Mat_MPIBAIJ    *baij       = (Mat_MPIBAIJ*)mat->data;
476   PetscBool      roworiented = baij->roworiented;
477   PetscErrorCode ierr;
478   PetscInt       i,j,row,col;
479   PetscInt       rstart_orig=mat->rmap->rstart;
480   PetscInt       rend_orig  =mat->rmap->rend,Nbs=baij->Nbs;
481   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
482   PetscReal      tmp;
483   MatScalar      **HD = baij->hd,value;
484 #if defined(PETSC_USE_DEBUG)
485   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
486 #endif
487 
488   PetscFunctionBegin;
489   for (i=0; i<m; i++) {
490 #if defined(PETSC_USE_DEBUG)
491     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
492     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
493 #endif
494     row = im[i];
495     if (row >= rstart_orig && row < rend_orig) {
496       for (j=0; j<n; j++) {
497         col = in[j];
498         if (roworiented) value = v[i*n+j];
499         else             value = v[i+j*m];
500         /* Look up PetscInto the Hash Table */
501         key = (row/bs)*Nbs+(col/bs)+1;
502         h1  = HASH(size,key,tmp);
503 
504 
505         idx = h1;
506 #if defined(PETSC_USE_DEBUG)
507         insert_ct++;
508         total_ct++;
509         if (HT[idx] != key) {
510           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
511           if (idx == size) {
512             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
513             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
514           }
515         }
516 #else
517         if (HT[idx] != key) {
518           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
519           if (idx == size) {
520             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
521             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
522           }
523         }
524 #endif
525         /* A HASH table entry is found, so insert the values at the correct address */
526         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
527         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
528       }
529     } else if (!baij->donotstash) {
530       if (roworiented) {
531         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
532       } else {
533         ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
534       }
535     }
536   }
537 #if defined(PETSC_USE_DEBUG)
538   baij->ht_total_ct  = total_ct;
539   baij->ht_insert_ct = insert_ct;
540 #endif
541   PetscFunctionReturn(0);
542 }
543 
544 #undef __FUNCT__
545 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ_HT"
546 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
547 {
548   Mat_MPIBAIJ       *baij       = (Mat_MPIBAIJ*)mat->data;
549   PetscBool         roworiented = baij->roworiented;
550   PetscErrorCode    ierr;
551   PetscInt          i,j,ii,jj,row,col;
552   PetscInt          rstart=baij->rstartbs;
553   PetscInt          rend  =mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
554   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
555   PetscReal         tmp;
556   MatScalar         **HD = baij->hd,*baij_a;
557   const PetscScalar *v_t,*value;
558 #if defined(PETSC_USE_DEBUG)
559   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
560 #endif
561 
562   PetscFunctionBegin;
563   if (roworiented) stepval = (n-1)*bs;
564   else stepval = (m-1)*bs;
565 
566   for (i=0; i<m; i++) {
567 #if defined(PETSC_USE_DEBUG)
568     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
569     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
570 #endif
571     row = im[i];
572     v_t = v + i*nbs2;
573     if (row >= rstart && row < rend) {
574       for (j=0; j<n; j++) {
575         col = in[j];
576 
577         /* Look up into the Hash Table */
578         key = row*Nbs+col+1;
579         h1  = HASH(size,key,tmp);
580 
581         idx = h1;
582 #if defined(PETSC_USE_DEBUG)
583         total_ct++;
584         insert_ct++;
585         if (HT[idx] != key) {
586           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
587           if (idx == size) {
588             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
589             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
590           }
591         }
592 #else
593         if (HT[idx] != key) {
594           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
595           if (idx == size) {
596             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
597             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
598           }
599         }
600 #endif
601         baij_a = HD[idx];
602         if (roworiented) {
603           /*value = v + i*(stepval+bs)*bs + j*bs;*/
604           /* value = v + (i*(stepval+bs)+j)*bs; */
605           value = v_t;
606           v_t  += bs;
607           if (addv == ADD_VALUES) {
608             for (ii=0; ii<bs; ii++,value+=stepval) {
609               for (jj=ii; jj<bs2; jj+=bs) {
610                 baij_a[jj] += *value++;
611               }
612             }
613           } else {
614             for (ii=0; ii<bs; ii++,value+=stepval) {
615               for (jj=ii; jj<bs2; jj+=bs) {
616                 baij_a[jj] = *value++;
617               }
618             }
619           }
620         } else {
621           value = v + j*(stepval+bs)*bs + i*bs;
622           if (addv == ADD_VALUES) {
623             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
624               for (jj=0; jj<bs; jj++) {
625                 baij_a[jj] += *value++;
626               }
627             }
628           } else {
629             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
630               for (jj=0; jj<bs; jj++) {
631                 baij_a[jj] = *value++;
632               }
633             }
634           }
635         }
636       }
637     } else {
638       if (!baij->donotstash) {
639         if (roworiented) {
640           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
641         } else {
642           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
643         }
644       }
645     }
646   }
647 #if defined(PETSC_USE_DEBUG)
648   baij->ht_total_ct  = total_ct;
649   baij->ht_insert_ct = insert_ct;
650 #endif
651   PetscFunctionReturn(0);
652 }
653 
654 #undef __FUNCT__
655 #define __FUNCT__ "MatGetValues_MPIBAIJ"
656 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
657 {
658   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
659   PetscErrorCode ierr;
660   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
661   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
662 
663   PetscFunctionBegin;
664   for (i=0; i<m; i++) {
665     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
666     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
667     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
668       row = idxm[i] - bsrstart;
669       for (j=0; j<n; j++) {
670         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
671         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
672         if (idxn[j] >= bscstart && idxn[j] < bscend) {
673           col  = idxn[j] - bscstart;
674           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
675         } else {
676           if (!baij->colmap) {
677             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
678           }
679 #if defined(PETSC_USE_CTABLE)
680           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
681           data--;
682 #else
683           data = baij->colmap[idxn[j]/bs]-1;
684 #endif
685           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
686           else {
687             col  = data + idxn[j]%bs;
688             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
689           }
690         }
691       }
692     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
693   }
694   PetscFunctionReturn(0);
695 }
696 
697 #undef __FUNCT__
698 #define __FUNCT__ "MatNorm_MPIBAIJ"
699 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
700 {
701   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
702   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
703   PetscErrorCode ierr;
704   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
705   PetscReal      sum = 0.0;
706   MatScalar      *v;
707 
708   PetscFunctionBegin;
709   if (baij->size == 1) {
710     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
711   } else {
712     if (type == NORM_FROBENIUS) {
713       v  = amat->a;
714       nz = amat->nz*bs2;
715       for (i=0; i<nz; i++) {
716         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
717       }
718       v  = bmat->a;
719       nz = bmat->nz*bs2;
720       for (i=0; i<nz; i++) {
721         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
722       }
723       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
724       *nrm = PetscSqrtReal(*nrm);
725     } else if (type == NORM_1) { /* max column sum */
726       PetscReal *tmp,*tmp2;
727       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
728       ierr = PetscMalloc2(mat->cmap->N,&tmp,mat->cmap->N,&tmp2);CHKERRQ(ierr);
729       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
730       v    = amat->a; jj = amat->j;
731       for (i=0; i<amat->nz; i++) {
732         for (j=0; j<bs; j++) {
733           col = bs*(cstart + *jj) + j; /* column index */
734           for (row=0; row<bs; row++) {
735             tmp[col] += PetscAbsScalar(*v);  v++;
736           }
737         }
738         jj++;
739       }
740       v = bmat->a; jj = bmat->j;
741       for (i=0; i<bmat->nz; i++) {
742         for (j=0; j<bs; j++) {
743           col = bs*garray[*jj] + j;
744           for (row=0; row<bs; row++) {
745             tmp[col] += PetscAbsScalar(*v); v++;
746           }
747         }
748         jj++;
749       }
750       ierr = MPIU_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
751       *nrm = 0.0;
752       for (j=0; j<mat->cmap->N; j++) {
753         if (tmp2[j] > *nrm) *nrm = tmp2[j];
754       }
755       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
756     } else if (type == NORM_INFINITY) { /* max row sum */
757       PetscReal *sums;
758       ierr = PetscMalloc1(bs,&sums);CHKERRQ(ierr);
759       sum  = 0.0;
760       for (j=0; j<amat->mbs; j++) {
761         for (row=0; row<bs; row++) sums[row] = 0.0;
762         v  = amat->a + bs2*amat->i[j];
763         nz = amat->i[j+1]-amat->i[j];
764         for (i=0; i<nz; i++) {
765           for (col=0; col<bs; col++) {
766             for (row=0; row<bs; row++) {
767               sums[row] += PetscAbsScalar(*v); v++;
768             }
769           }
770         }
771         v  = bmat->a + bs2*bmat->i[j];
772         nz = bmat->i[j+1]-bmat->i[j];
773         for (i=0; i<nz; i++) {
774           for (col=0; col<bs; col++) {
775             for (row=0; row<bs; row++) {
776               sums[row] += PetscAbsScalar(*v); v++;
777             }
778           }
779         }
780         for (row=0; row<bs; row++) {
781           if (sums[row] > sum) sum = sums[row];
782         }
783       }
784       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
785       ierr = PetscFree(sums);CHKERRQ(ierr);
786     } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for this norm yet");
787   }
788   PetscFunctionReturn(0);
789 }
790 
791 /*
792   Creates the hash table, and sets the table
793   This table is created only once.
794   If new entried need to be added to the matrix
795   then the hash table has to be destroyed and
796   recreated.
797 */
798 #undef __FUNCT__
799 #define __FUNCT__ "MatCreateHashTable_MPIBAIJ_Private"
800 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
801 {
802   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
803   Mat            A     = baij->A,B=baij->B;
804   Mat_SeqBAIJ    *a    = (Mat_SeqBAIJ*)A->data,*b=(Mat_SeqBAIJ*)B->data;
805   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
806   PetscErrorCode ierr;
807   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
808   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
809   PetscInt       *HT,key;
810   MatScalar      **HD;
811   PetscReal      tmp;
812 #if defined(PETSC_USE_INFO)
813   PetscInt ct=0,max=0;
814 #endif
815 
816   PetscFunctionBegin;
817   if (baij->ht) PetscFunctionReturn(0);
818 
819   baij->ht_size = (PetscInt)(factor*nz);
820   ht_size       = baij->ht_size;
821 
822   /* Allocate Memory for Hash Table */
823   ierr = PetscCalloc2(ht_size,&baij->hd,ht_size,&baij->ht);CHKERRQ(ierr);
824   HD   = baij->hd;
825   HT   = baij->ht;
826 
827   /* Loop Over A */
828   for (i=0; i<a->mbs; i++) {
829     for (j=ai[i]; j<ai[i+1]; j++) {
830       row = i+rstart;
831       col = aj[j]+cstart;
832 
833       key = row*Nbs + col + 1;
834       h1  = HASH(ht_size,key,tmp);
835       for (k=0; k<ht_size; k++) {
836         if (!HT[(h1+k)%ht_size]) {
837           HT[(h1+k)%ht_size] = key;
838           HD[(h1+k)%ht_size] = a->a + j*bs2;
839           break;
840 #if defined(PETSC_USE_INFO)
841         } else {
842           ct++;
843 #endif
844         }
845       }
846 #if defined(PETSC_USE_INFO)
847       if (k> max) max = k;
848 #endif
849     }
850   }
851   /* Loop Over B */
852   for (i=0; i<b->mbs; i++) {
853     for (j=bi[i]; j<bi[i+1]; j++) {
854       row = i+rstart;
855       col = garray[bj[j]];
856       key = row*Nbs + col + 1;
857       h1  = HASH(ht_size,key,tmp);
858       for (k=0; k<ht_size; k++) {
859         if (!HT[(h1+k)%ht_size]) {
860           HT[(h1+k)%ht_size] = key;
861           HD[(h1+k)%ht_size] = b->a + j*bs2;
862           break;
863 #if defined(PETSC_USE_INFO)
864         } else {
865           ct++;
866 #endif
867         }
868       }
869 #if defined(PETSC_USE_INFO)
870       if (k> max) max = k;
871 #endif
872     }
873   }
874 
875   /* Print Summary */
876 #if defined(PETSC_USE_INFO)
877   for (i=0,j=0; i<ht_size; i++) {
878     if (HT[i]) j++;
879   }
880   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
881 #endif
882   PetscFunctionReturn(0);
883 }
884 
885 #undef __FUNCT__
886 #define __FUNCT__ "MatAssemblyBegin_MPIBAIJ"
887 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
888 {
889   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
890   PetscErrorCode ierr;
891   PetscInt       nstash,reallocs;
892 
893   PetscFunctionBegin;
894   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
895 
896   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
897   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
898   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
899   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
900   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
901   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "MatAssemblyEnd_MPIBAIJ"
907 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
908 {
909   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
910   Mat_SeqBAIJ    *a   =(Mat_SeqBAIJ*)baij->A->data;
911   PetscErrorCode ierr;
912   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
913   PetscInt       *row,*col;
914   PetscBool      r1,r2,r3,other_disassembled;
915   MatScalar      *val;
916   PetscMPIInt    n;
917 
918   PetscFunctionBegin;
919   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
920   if (!baij->donotstash && !mat->nooffprocentries) {
921     while (1) {
922       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
923       if (!flg) break;
924 
925       for (i=0; i<n;) {
926         /* Now identify the consecutive vals belonging to the same row */
927         for (j=i,rstart=row[j]; j<n; j++) {
928           if (row[j] != rstart) break;
929         }
930         if (j < n) ncols = j-i;
931         else       ncols = n-i;
932         /* Now assemble all these values with a single function call */
933         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,mat->insertmode);CHKERRQ(ierr);
934         i    = j;
935       }
936     }
937     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
938     /* Now process the block-stash. Since the values are stashed column-oriented,
939        set the roworiented flag to column oriented, and after MatSetValues()
940        restore the original flags */
941     r1 = baij->roworiented;
942     r2 = a->roworiented;
943     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
944 
945     baij->roworiented = PETSC_FALSE;
946     a->roworiented    = PETSC_FALSE;
947 
948     (((Mat_SeqBAIJ*)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
949     while (1) {
950       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
951       if (!flg) break;
952 
953       for (i=0; i<n;) {
954         /* Now identify the consecutive vals belonging to the same row */
955         for (j=i,rstart=row[j]; j<n; j++) {
956           if (row[j] != rstart) break;
957         }
958         if (j < n) ncols = j-i;
959         else       ncols = n-i;
960         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,mat->insertmode);CHKERRQ(ierr);
961         i    = j;
962       }
963     }
964     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
965 
966     baij->roworiented = r1;
967     a->roworiented    = r2;
968 
969     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworiented */
970   }
971 
972   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
973   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
974 
975   /* determine if any processor has disassembled, if so we must
976      also disassemble ourselfs, in order that we may reassemble. */
977   /*
978      if nonzero structure of submatrix B cannot change then we know that
979      no processor disassembled thus we can skip this stuff
980   */
981   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
982     ierr = MPIU_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
983     if (mat->was_assembled && !other_disassembled) {
984       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
985     }
986   }
987 
988   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
989     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
990   }
991   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
992   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
993 
994 #if defined(PETSC_USE_INFO)
995   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
996     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
997 
998     baij->ht_total_ct  = 0;
999     baij->ht_insert_ct = 0;
1000   }
1001 #endif
1002   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
1003     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
1004 
1005     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
1006     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
1007   }
1008 
1009   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1010 
1011   baij->rowvalues = 0;
1012 
1013   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
1014   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
1015     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
1016     ierr = MPIU_Allreduce(&state,&mat->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1017   }
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 extern PetscErrorCode MatView_SeqBAIJ(Mat,PetscViewer);
1022 #include <petscdraw.h>
1023 #undef __FUNCT__
1024 #define __FUNCT__ "MatView_MPIBAIJ_ASCIIorDraworSocket"
1025 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1026 {
1027   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
1028   PetscErrorCode    ierr;
1029   PetscMPIInt       rank = baij->rank;
1030   PetscInt          bs   = mat->rmap->bs;
1031   PetscBool         iascii,isdraw;
1032   PetscViewer       sviewer;
1033   PetscViewerFormat format;
1034 
1035   PetscFunctionBegin;
1036   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1037   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1038   if (iascii) {
1039     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1040     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1041       MatInfo info;
1042       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1043       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1044       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1045       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
1046                                                 rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
1047       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
1048       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1049       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
1050       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1051       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1052       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1053       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1054       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
1055       PetscFunctionReturn(0);
1056     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1057       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
1058       PetscFunctionReturn(0);
1059     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
1060       PetscFunctionReturn(0);
1061     }
1062   }
1063 
1064   if (isdraw) {
1065     PetscDraw draw;
1066     PetscBool isnull;
1067     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1068     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr);
1069     if (isnull) PetscFunctionReturn(0);
1070   }
1071 
1072   {
1073     /* assemble the entire matrix onto first processor. */
1074     Mat         A;
1075     Mat_SeqBAIJ *Aloc;
1076     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
1077     MatScalar   *a;
1078     const char  *matname;
1079 
1080     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
1081     /* Perhaps this should be the type of mat? */
1082     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
1083     if (!rank) {
1084       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
1085     } else {
1086       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
1087     }
1088     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1089     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1090     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1091     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
1092 
1093     /* copy over the A part */
1094     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1095     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1096     ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1097 
1098     for (i=0; i<mbs; i++) {
1099       rvals[0] = bs*(baij->rstartbs + i);
1100       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1101       for (j=ai[i]; j<ai[i+1]; j++) {
1102         col = (baij->cstartbs+aj[j])*bs;
1103         for (k=0; k<bs; k++) {
1104           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1105           col++; a += bs;
1106         }
1107       }
1108     }
1109     /* copy over the B part */
1110     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1111     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1112     for (i=0; i<mbs; i++) {
1113       rvals[0] = bs*(baij->rstartbs + i);
1114       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1115       for (j=ai[i]; j<ai[i+1]; j++) {
1116         col = baij->garray[aj[j]]*bs;
1117         for (k=0; k<bs; k++) {
1118           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1119           col++; a += bs;
1120         }
1121       }
1122     }
1123     ierr = PetscFree(rvals);CHKERRQ(ierr);
1124     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1125     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1126     /*
1127        Everyone has to call to draw the matrix since the graphics waits are
1128        synchronized across all processors that share the PetscDraw object
1129     */
1130     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1131     ierr = PetscObjectGetName((PetscObject)mat,&matname);CHKERRQ(ierr);
1132     if (!rank) {
1133       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,matname);CHKERRQ(ierr);
1134       ierr = MatView_SeqBAIJ(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1135     }
1136     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1137     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1138     ierr = MatDestroy(&A);CHKERRQ(ierr);
1139   }
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 #undef __FUNCT__
1144 #define __FUNCT__ "MatView_MPIBAIJ_Binary"
1145 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1146 {
1147   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1148   Mat_SeqBAIJ    *A = (Mat_SeqBAIJ*)a->A->data;
1149   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)a->B->data;
1150   PetscErrorCode ierr;
1151   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1152   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1153   int            fd;
1154   PetscScalar    *column_values;
1155   FILE           *file;
1156   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1157   PetscInt       message_count,flowcontrolcount;
1158 
1159   PetscFunctionBegin;
1160   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1161   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
1162   nz   = bs2*(A->nz + B->nz);
1163   rlen = mat->rmap->n;
1164   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1165   if (!rank) {
1166     header[0] = MAT_FILE_CLASSID;
1167     header[1] = mat->rmap->N;
1168     header[2] = mat->cmap->N;
1169 
1170     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1171     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1172     /* get largest number of rows any processor has */
1173     range = mat->rmap->range;
1174     for (i=1; i<size; i++) {
1175       rlen = PetscMax(rlen,range[i+1] - range[i]);
1176     }
1177   } else {
1178     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1179   }
1180 
1181   ierr = PetscMalloc1(rlen/bs,&crow_lens);CHKERRQ(ierr);
1182   /* compute lengths of each row  */
1183   for (i=0; i<a->mbs; i++) {
1184     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1185   }
1186   /* store the row lengths to the file */
1187   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1188   if (!rank) {
1189     MPI_Status status;
1190     ierr = PetscMalloc1(rlen,&row_lens);CHKERRQ(ierr);
1191     rlen = (range[1] - range[0])/bs;
1192     for (i=0; i<rlen; i++) {
1193       for (j=0; j<bs; j++) {
1194         row_lens[i*bs+j] = bs*crow_lens[i];
1195       }
1196     }
1197     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1198     for (i=1; i<size; i++) {
1199       rlen = (range[i+1] - range[i])/bs;
1200       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1201       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1202       for (k=0; k<rlen; k++) {
1203         for (j=0; j<bs; j++) {
1204           row_lens[k*bs+j] = bs*crow_lens[k];
1205         }
1206       }
1207       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1208     }
1209     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1210     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1211   } else {
1212     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1213     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1214     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1215   }
1216   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1217 
1218   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1219      information needed to make it for each row from a block row. This does require more communication but still not more than
1220      the communication needed for the nonzero values  */
1221   nzmax = nz; /*  space a largest processor needs */
1222   ierr  = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1223   ierr  = PetscMalloc1(nzmax,&column_indices);CHKERRQ(ierr);
1224   cnt   = 0;
1225   for (i=0; i<a->mbs; i++) {
1226     pcnt = cnt;
1227     for (j=B->i[i]; j<B->i[i+1]; j++) {
1228       if ((col = garray[B->j[j]]) > cstart) break;
1229       for (l=0; l<bs; l++) {
1230         column_indices[cnt++] = bs*col+l;
1231       }
1232     }
1233     for (k=A->i[i]; k<A->i[i+1]; k++) {
1234       for (l=0; l<bs; l++) {
1235         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1236       }
1237     }
1238     for (; j<B->i[i+1]; j++) {
1239       for (l=0; l<bs; l++) {
1240         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1241       }
1242     }
1243     len = cnt - pcnt;
1244     for (k=1; k<bs; k++) {
1245       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1246       cnt += len;
1247     }
1248   }
1249   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1250 
1251   /* store the columns to the file */
1252   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1253   if (!rank) {
1254     MPI_Status status;
1255     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1256     for (i=1; i<size; i++) {
1257       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1258       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1259       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1260       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1261     }
1262     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1263   } else {
1264     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1265     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1266     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1267     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1268   }
1269   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1270 
1271   /* load up the numerical values */
1272   ierr = PetscMalloc1(nzmax,&column_values);CHKERRQ(ierr);
1273   cnt  = 0;
1274   for (i=0; i<a->mbs; i++) {
1275     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1276     for (j=B->i[i]; j<B->i[i+1]; j++) {
1277       if (garray[B->j[j]] > cstart) break;
1278       for (l=0; l<bs; l++) {
1279         for (ll=0; ll<bs; ll++) {
1280           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1281         }
1282       }
1283       cnt += bs;
1284     }
1285     for (k=A->i[i]; k<A->i[i+1]; k++) {
1286       for (l=0; l<bs; l++) {
1287         for (ll=0; ll<bs; ll++) {
1288           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1289         }
1290       }
1291       cnt += bs;
1292     }
1293     for (; j<B->i[i+1]; j++) {
1294       for (l=0; l<bs; l++) {
1295         for (ll=0; ll<bs; ll++) {
1296           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1297         }
1298       }
1299       cnt += bs;
1300     }
1301     cnt += (bs-1)*rlen;
1302   }
1303   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1304 
1305   /* store the column values to the file */
1306   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1307   if (!rank) {
1308     MPI_Status status;
1309     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1310     for (i=1; i<size; i++) {
1311       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1312       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1313       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1314       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1315     }
1316     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1317   } else {
1318     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1319     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1320     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1321     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1322   }
1323   ierr = PetscFree(column_values);CHKERRQ(ierr);
1324 
1325   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1326   if (file) {
1327     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1328   }
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 #undef __FUNCT__
1333 #define __FUNCT__ "MatView_MPIBAIJ"
1334 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1335 {
1336   PetscErrorCode ierr;
1337   PetscBool      iascii,isdraw,issocket,isbinary;
1338 
1339   PetscFunctionBegin;
1340   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1341   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1342   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1343   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1344   if (iascii || isdraw || issocket) {
1345     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1346   } else if (isbinary) {
1347     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1348   }
1349   PetscFunctionReturn(0);
1350 }
1351 
1352 #undef __FUNCT__
1353 #define __FUNCT__ "MatDestroy_MPIBAIJ"
1354 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1355 {
1356   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1357   PetscErrorCode ierr;
1358 
1359   PetscFunctionBegin;
1360 #if defined(PETSC_USE_LOG)
1361   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1362 #endif
1363   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1364   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1365   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1366   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1367 #if defined(PETSC_USE_CTABLE)
1368   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1369 #else
1370   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1371 #endif
1372   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1373   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1374   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1375   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1376   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1377   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1378   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1379   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1380 
1381   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1382   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
1383   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
1384   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
1385   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr);
1386   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr);
1387   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C",NULL);CHKERRQ(ierr);
1388   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C",NULL);CHKERRQ(ierr);
1389   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C",NULL);CHKERRQ(ierr);
1390   PetscFunctionReturn(0);
1391 }
1392 
1393 #undef __FUNCT__
1394 #define __FUNCT__ "MatMult_MPIBAIJ"
1395 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1396 {
1397   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1398   PetscErrorCode ierr;
1399   PetscInt       nt;
1400 
1401   PetscFunctionBegin;
1402   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1403   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1404   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1405   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1406   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1407   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1408   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1409   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1410   PetscFunctionReturn(0);
1411 }
1412 
1413 #undef __FUNCT__
1414 #define __FUNCT__ "MatMultAdd_MPIBAIJ"
1415 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1416 {
1417   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1418   PetscErrorCode ierr;
1419 
1420   PetscFunctionBegin;
1421   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1422   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1423   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1424   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 #undef __FUNCT__
1429 #define __FUNCT__ "MatMultTranspose_MPIBAIJ"
1430 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1431 {
1432   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1433   PetscErrorCode ierr;
1434   PetscBool      merged;
1435 
1436   PetscFunctionBegin;
1437   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1438   /* do nondiagonal part */
1439   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1440   if (!merged) {
1441     /* send it on its way */
1442     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1443     /* do local part */
1444     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1445     /* receive remote parts: note this assumes the values are not actually */
1446     /* inserted in yy until the next line */
1447     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1448   } else {
1449     /* do local part */
1450     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1451     /* send it on its way */
1452     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1453     /* values actually were received in the Begin() but we need to call this nop */
1454     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1455   }
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 #undef __FUNCT__
1460 #define __FUNCT__ "MatMultTransposeAdd_MPIBAIJ"
1461 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1462 {
1463   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1464   PetscErrorCode ierr;
1465 
1466   PetscFunctionBegin;
1467   /* do nondiagonal part */
1468   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1469   /* send it on its way */
1470   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1471   /* do local part */
1472   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1473   /* receive remote parts: note this assumes the values are not actually */
1474   /* inserted in yy until the next line, which is true for my implementation*/
1475   /* but is not perhaps always true. */
1476   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1477   PetscFunctionReturn(0);
1478 }
1479 
1480 /*
1481   This only works correctly for square matrices where the subblock A->A is the
1482    diagonal block
1483 */
1484 #undef __FUNCT__
1485 #define __FUNCT__ "MatGetDiagonal_MPIBAIJ"
1486 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1487 {
1488   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1489   PetscErrorCode ierr;
1490 
1491   PetscFunctionBegin;
1492   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1493   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1494   PetscFunctionReturn(0);
1495 }
1496 
1497 #undef __FUNCT__
1498 #define __FUNCT__ "MatScale_MPIBAIJ"
1499 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1500 {
1501   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1502   PetscErrorCode ierr;
1503 
1504   PetscFunctionBegin;
1505   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1506   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1507   PetscFunctionReturn(0);
1508 }
1509 
1510 #undef __FUNCT__
1511 #define __FUNCT__ "MatGetRow_MPIBAIJ"
1512 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1513 {
1514   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1515   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1516   PetscErrorCode ierr;
1517   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1518   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1519   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1520 
1521   PetscFunctionBegin;
1522   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1523   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1524   mat->getrowactive = PETSC_TRUE;
1525 
1526   if (!mat->rowvalues && (idx || v)) {
1527     /*
1528         allocate enough space to hold information from the longest row.
1529     */
1530     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1531     PetscInt    max = 1,mbs = mat->mbs,tmp;
1532     for (i=0; i<mbs; i++) {
1533       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1534       if (max < tmp) max = tmp;
1535     }
1536     ierr = PetscMalloc2(max*bs2,&mat->rowvalues,max*bs2,&mat->rowindices);CHKERRQ(ierr);
1537   }
1538   lrow = row - brstart;
1539 
1540   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1541   if (!v)   {pvA = 0; pvB = 0;}
1542   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1543   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1544   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1545   nztot = nzA + nzB;
1546 
1547   cmap = mat->garray;
1548   if (v  || idx) {
1549     if (nztot) {
1550       /* Sort by increasing column numbers, assuming A and B already sorted */
1551       PetscInt imark = -1;
1552       if (v) {
1553         *v = v_p = mat->rowvalues;
1554         for (i=0; i<nzB; i++) {
1555           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1556           else break;
1557         }
1558         imark = i;
1559         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1560         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1561       }
1562       if (idx) {
1563         *idx = idx_p = mat->rowindices;
1564         if (imark > -1) {
1565           for (i=0; i<imark; i++) {
1566             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1567           }
1568         } else {
1569           for (i=0; i<nzB; i++) {
1570             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1571             else break;
1572           }
1573           imark = i;
1574         }
1575         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1576         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1577       }
1578     } else {
1579       if (idx) *idx = 0;
1580       if (v)   *v   = 0;
1581     }
1582   }
1583   *nz  = nztot;
1584   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1585   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1586   PetscFunctionReturn(0);
1587 }
1588 
1589 #undef __FUNCT__
1590 #define __FUNCT__ "MatRestoreRow_MPIBAIJ"
1591 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1592 {
1593   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1594 
1595   PetscFunctionBegin;
1596   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1597   baij->getrowactive = PETSC_FALSE;
1598   PetscFunctionReturn(0);
1599 }
1600 
1601 #undef __FUNCT__
1602 #define __FUNCT__ "MatZeroEntries_MPIBAIJ"
1603 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1604 {
1605   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1606   PetscErrorCode ierr;
1607 
1608   PetscFunctionBegin;
1609   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1610   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1611   PetscFunctionReturn(0);
1612 }
1613 
1614 #undef __FUNCT__
1615 #define __FUNCT__ "MatGetInfo_MPIBAIJ"
1616 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1617 {
1618   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1619   Mat            A  = a->A,B = a->B;
1620   PetscErrorCode ierr;
1621   PetscReal      isend[5],irecv[5];
1622 
1623   PetscFunctionBegin;
1624   info->block_size = (PetscReal)matin->rmap->bs;
1625 
1626   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1627 
1628   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1629   isend[3] = info->memory;  isend[4] = info->mallocs;
1630 
1631   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1632 
1633   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1634   isend[3] += info->memory;  isend[4] += info->mallocs;
1635 
1636   if (flag == MAT_LOCAL) {
1637     info->nz_used      = isend[0];
1638     info->nz_allocated = isend[1];
1639     info->nz_unneeded  = isend[2];
1640     info->memory       = isend[3];
1641     info->mallocs      = isend[4];
1642   } else if (flag == MAT_GLOBAL_MAX) {
1643     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1644 
1645     info->nz_used      = irecv[0];
1646     info->nz_allocated = irecv[1];
1647     info->nz_unneeded  = irecv[2];
1648     info->memory       = irecv[3];
1649     info->mallocs      = irecv[4];
1650   } else if (flag == MAT_GLOBAL_SUM) {
1651     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1652 
1653     info->nz_used      = irecv[0];
1654     info->nz_allocated = irecv[1];
1655     info->nz_unneeded  = irecv[2];
1656     info->memory       = irecv[3];
1657     info->mallocs      = irecv[4];
1658   } else SETERRQ1(PetscObjectComm((PetscObject)matin),PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1659   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1660   info->fill_ratio_needed = 0;
1661   info->factor_mallocs    = 0;
1662   PetscFunctionReturn(0);
1663 }
1664 
1665 #undef __FUNCT__
1666 #define __FUNCT__ "MatSetOption_MPIBAIJ"
1667 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool flg)
1668 {
1669   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1670   PetscErrorCode ierr;
1671 
1672   PetscFunctionBegin;
1673   switch (op) {
1674   case MAT_NEW_NONZERO_LOCATIONS:
1675   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1676   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1677   case MAT_KEEP_NONZERO_PATTERN:
1678   case MAT_NEW_NONZERO_LOCATION_ERR:
1679     MatCheckPreallocated(A,1);
1680     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1681     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1682     break;
1683   case MAT_ROW_ORIENTED:
1684     MatCheckPreallocated(A,1);
1685     a->roworiented = flg;
1686 
1687     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1688     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1689     break;
1690   case MAT_NEW_DIAGONALS:
1691     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1692     break;
1693   case MAT_IGNORE_OFF_PROC_ENTRIES:
1694     a->donotstash = flg;
1695     break;
1696   case MAT_USE_HASH_TABLE:
1697     a->ht_flag = flg;
1698     break;
1699   case MAT_SYMMETRIC:
1700   case MAT_STRUCTURALLY_SYMMETRIC:
1701   case MAT_HERMITIAN:
1702   case MAT_SYMMETRY_ETERNAL:
1703     MatCheckPreallocated(A,1);
1704     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1705     break;
1706   default:
1707     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"unknown option %d",op);
1708   }
1709   PetscFunctionReturn(0);
1710 }
1711 
1712 #undef __FUNCT__
1713 #define __FUNCT__ "MatTranspose_MPIBAIJ"
1714 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1715 {
1716   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1717   Mat_SeqBAIJ    *Aloc;
1718   Mat            B;
1719   PetscErrorCode ierr;
1720   PetscInt       M =A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1721   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1722   MatScalar      *a;
1723 
1724   PetscFunctionBegin;
1725   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1726   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1727     ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1728     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1729     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1730     /* Do not know preallocation information, but must set block size */
1731     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,NULL,PETSC_DECIDE,NULL);CHKERRQ(ierr);
1732   } else {
1733     B = *matout;
1734   }
1735 
1736   /* copy over the A part */
1737   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1738   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1739   ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1740 
1741   for (i=0; i<mbs; i++) {
1742     rvals[0] = bs*(baij->rstartbs + i);
1743     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1744     for (j=ai[i]; j<ai[i+1]; j++) {
1745       col = (baij->cstartbs+aj[j])*bs;
1746       for (k=0; k<bs; k++) {
1747         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1748 
1749         col++; a += bs;
1750       }
1751     }
1752   }
1753   /* copy over the B part */
1754   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1755   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1756   for (i=0; i<mbs; i++) {
1757     rvals[0] = bs*(baij->rstartbs + i);
1758     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1759     for (j=ai[i]; j<ai[i+1]; j++) {
1760       col = baij->garray[aj[j]]*bs;
1761       for (k=0; k<bs; k++) {
1762         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1763         col++;
1764         a += bs;
1765       }
1766     }
1767   }
1768   ierr = PetscFree(rvals);CHKERRQ(ierr);
1769   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1770   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1771 
1772   if (reuse == MAT_INITIAL_MATRIX || *matout != A) *matout = B;
1773   else {
1774     ierr = MatHeaderMerge(A,&B);CHKERRQ(ierr);
1775   }
1776   PetscFunctionReturn(0);
1777 }
1778 
1779 #undef __FUNCT__
1780 #define __FUNCT__ "MatDiagonalScale_MPIBAIJ"
1781 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1782 {
1783   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1784   Mat            a     = baij->A,b = baij->B;
1785   PetscErrorCode ierr;
1786   PetscInt       s1,s2,s3;
1787 
1788   PetscFunctionBegin;
1789   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1790   if (rr) {
1791     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1792     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1793     /* Overlap communication with computation. */
1794     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1795   }
1796   if (ll) {
1797     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1798     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1799     ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1800   }
1801   /* scale  the diagonal block */
1802   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1803 
1804   if (rr) {
1805     /* Do a scatter end and then right scale the off-diagonal block */
1806     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1807     ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1808   }
1809   PetscFunctionReturn(0);
1810 }
1811 
1812 #undef __FUNCT__
1813 #define __FUNCT__ "MatZeroRows_MPIBAIJ"
1814 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1815 {
1816   Mat_MPIBAIJ   *l      = (Mat_MPIBAIJ *) A->data;
1817   PetscInt      *lrows;
1818   PetscInt       r, len;
1819   PetscErrorCode ierr;
1820 
1821   PetscFunctionBegin;
1822   /* get locally owned rows */
1823   ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr);
1824   /* fix right hand side if needed */
1825   if (x && b) {
1826     const PetscScalar *xx;
1827     PetscScalar       *bb;
1828 
1829     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1830     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1831     for (r = 0; r < len; ++r) bb[lrows[r]] = diag*xx[lrows[r]];
1832     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1833     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1834   }
1835 
1836   /* actually zap the local rows */
1837   /*
1838         Zero the required rows. If the "diagonal block" of the matrix
1839      is square and the user wishes to set the diagonal we use separate
1840      code so that MatSetValues() is not called for each diagonal allocating
1841      new memory, thus calling lots of mallocs and slowing things down.
1842 
1843   */
1844   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1845   ierr = MatZeroRows_SeqBAIJ(l->B,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1846   if (A->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
1847     PetscBool cong;
1848     ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr);
1849     if (cong) A->congruentlayouts = 1;
1850     else      A->congruentlayouts = 0;
1851   }
1852   if ((diag != 0.0) && A->congruentlayouts) {
1853     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,diag,NULL,NULL);CHKERRQ(ierr);
1854   } else if (diag != 0.0) {
1855     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,0,0);CHKERRQ(ierr);
1856     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1857        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1858     for (r = 0; r < len; ++r) {
1859       const PetscInt row = lrows[r] + A->rmap->rstart;
1860       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1861     }
1862     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1863     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1864   } else {
1865     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1866   }
1867   ierr = PetscFree(lrows);CHKERRQ(ierr);
1868 
1869   /* only change matrix nonzero state if pattern was allowed to be changed */
1870   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1871     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1872     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1873   }
1874   PetscFunctionReturn(0);
1875 }
1876 
1877 #undef __FUNCT__
1878 #define __FUNCT__ "MatZeroRowsColumns_MPIBAIJ"
1879 PetscErrorCode MatZeroRowsColumns_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1880 {
1881   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1882   PetscErrorCode    ierr;
1883   PetscMPIInt       n = A->rmap->n;
1884   PetscInt          i,j,k,r,p = 0,len = 0,row,col,count;
1885   PetscInt          *lrows,*owners = A->rmap->range;
1886   PetscSFNode       *rrows;
1887   PetscSF           sf;
1888   const PetscScalar *xx;
1889   PetscScalar       *bb,*mask;
1890   Vec               xmask,lmask;
1891   Mat_SeqBAIJ       *baij = (Mat_SeqBAIJ*)l->B->data;
1892   PetscInt           bs = A->rmap->bs, bs2 = baij->bs2;
1893   PetscScalar       *aa;
1894 
1895   PetscFunctionBegin;
1896   /* Create SF where leaves are input rows and roots are owned rows */
1897   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1898   for (r = 0; r < n; ++r) lrows[r] = -1;
1899   ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);
1900   for (r = 0; r < N; ++r) {
1901     const PetscInt idx   = rows[r];
1902     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1903     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1904       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1905     }
1906     rrows[r].rank  = p;
1907     rrows[r].index = rows[r] - owners[p];
1908   }
1909   ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1910   ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1911   /* Collect flags for rows to be zeroed */
1912   ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1913   ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1914   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1915   /* Compress and put in row numbers */
1916   for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1917   /* zero diagonal part of matrix */
1918   ierr = MatZeroRowsColumns(l->A,len,lrows,diag,x,b);CHKERRQ(ierr);
1919   /* handle off diagonal part of matrix */
1920   ierr = MatCreateVecs(A,&xmask,NULL);CHKERRQ(ierr);
1921   ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr);
1922   ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr);
1923   for (i=0; i<len; i++) bb[lrows[i]] = 1;
1924   ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr);
1925   ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1926   ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1927   ierr = VecDestroy(&xmask);CHKERRQ(ierr);
1928   if (x) {
1929     ierr = VecScatterBegin(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1930     ierr = VecScatterEnd(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1931     ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1932     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1933   }
1934   ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr);
1935   /* remove zeroed rows of off diagonal matrix */
1936   for (i = 0; i < len; ++i) {
1937     row   = lrows[i];
1938     count = (baij->i[row/bs +1] - baij->i[row/bs])*bs;
1939     aa    = ((MatScalar*)(baij->a)) + baij->i[row/bs]*bs2 + (row%bs);
1940     for (k = 0; k < count; ++k) {
1941       aa[0] = 0.0;
1942       aa   += bs;
1943     }
1944   }
1945   /* loop over all elements of off process part of matrix zeroing removed columns*/
1946   for (i = 0; i < l->B->rmap->N; ++i) {
1947     row = i/bs;
1948     for (j = baij->i[row]; j < baij->i[row+1]; ++j) {
1949       for (k = 0; k < bs; ++k) {
1950         col = bs*baij->j[j] + k;
1951         if (PetscAbsScalar(mask[col])) {
1952           aa = ((MatScalar*)(baij->a)) + j*bs2 + (i%bs) + bs*k;
1953           if (x) bb[i] -= aa[0]*xx[col];
1954           aa[0] = 0.0;
1955         }
1956       }
1957     }
1958   }
1959   if (x) {
1960     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1961     ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1962   }
1963   ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr);
1964   ierr = VecDestroy(&lmask);CHKERRQ(ierr);
1965   ierr = PetscFree(lrows);CHKERRQ(ierr);
1966 
1967   /* only change matrix nonzero state if pattern was allowed to be changed */
1968   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1969     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1970     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1971   }
1972   PetscFunctionReturn(0);
1973 }
1974 
1975 #undef __FUNCT__
1976 #define __FUNCT__ "MatSetUnfactored_MPIBAIJ"
1977 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
1978 {
1979   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1980   PetscErrorCode ierr;
1981 
1982   PetscFunctionBegin;
1983   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1984   PetscFunctionReturn(0);
1985 }
1986 
1987 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat*);
1988 
1989 #undef __FUNCT__
1990 #define __FUNCT__ "MatEqual_MPIBAIJ"
1991 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
1992 {
1993   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
1994   Mat            a,b,c,d;
1995   PetscBool      flg;
1996   PetscErrorCode ierr;
1997 
1998   PetscFunctionBegin;
1999   a = matA->A; b = matA->B;
2000   c = matB->A; d = matB->B;
2001 
2002   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
2003   if (flg) {
2004     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
2005   }
2006   ierr = MPIU_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2007   PetscFunctionReturn(0);
2008 }
2009 
2010 #undef __FUNCT__
2011 #define __FUNCT__ "MatCopy_MPIBAIJ"
2012 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
2013 {
2014   PetscErrorCode ierr;
2015   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2016   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
2017 
2018   PetscFunctionBegin;
2019   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
2020   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
2021     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
2022   } else {
2023     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
2024     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
2025   }
2026   PetscFunctionReturn(0);
2027 }
2028 
2029 #undef __FUNCT__
2030 #define __FUNCT__ "MatSetUp_MPIBAIJ"
2031 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
2032 {
2033   PetscErrorCode ierr;
2034 
2035   PetscFunctionBegin;
2036   ierr = MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
2037   PetscFunctionReturn(0);
2038 }
2039 
2040 #undef __FUNCT__
2041 #define __FUNCT__ "MatAXPYGetPreallocation_MPIBAIJ"
2042 PetscErrorCode MatAXPYGetPreallocation_MPIBAIJ(Mat Y,const PetscInt *yltog,Mat X,const PetscInt *xltog,PetscInt *nnz)
2043 {
2044   PetscErrorCode ierr;
2045   PetscInt       bs = Y->rmap->bs,m = Y->rmap->N/bs;
2046   Mat_SeqBAIJ    *x = (Mat_SeqBAIJ*)X->data;
2047   Mat_SeqBAIJ    *y = (Mat_SeqBAIJ*)Y->data;
2048 
2049   PetscFunctionBegin;
2050   ierr = MatAXPYGetPreallocation_MPIX_private(m,x->i,x->j,xltog,y->i,y->j,yltog,nnz);CHKERRQ(ierr);
2051   PetscFunctionReturn(0);
2052 }
2053 
2054 #undef __FUNCT__
2055 #define __FUNCT__ "MatAXPY_MPIBAIJ"
2056 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
2057 {
2058   PetscErrorCode ierr;
2059   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ*)X->data,*yy=(Mat_MPIBAIJ*)Y->data;
2060   PetscBLASInt   bnz,one=1;
2061   Mat_SeqBAIJ    *x,*y;
2062 
2063   PetscFunctionBegin;
2064   if (str == SAME_NONZERO_PATTERN) {
2065     PetscScalar alpha = a;
2066     x    = (Mat_SeqBAIJ*)xx->A->data;
2067     y    = (Mat_SeqBAIJ*)yy->A->data;
2068     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2069     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2070     x    = (Mat_SeqBAIJ*)xx->B->data;
2071     y    = (Mat_SeqBAIJ*)yy->B->data;
2072     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2073     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2074     ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
2075   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
2076     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
2077   } else {
2078     Mat      B;
2079     PetscInt *nnz_d,*nnz_o,bs=Y->rmap->bs;
2080     ierr = PetscMalloc1(yy->A->rmap->N,&nnz_d);CHKERRQ(ierr);
2081     ierr = PetscMalloc1(yy->B->rmap->N,&nnz_o);CHKERRQ(ierr);
2082     ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr);
2083     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
2084     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
2085     ierr = MatSetBlockSizesFromMats(B,Y,Y);CHKERRQ(ierr);
2086     ierr = MatSetType(B,MATMPIBAIJ);CHKERRQ(ierr);
2087     ierr = MatAXPYGetPreallocation_SeqBAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
2088     ierr = MatAXPYGetPreallocation_MPIBAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr);
2089     ierr = MatMPIBAIJSetPreallocation(B,bs,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
2090     /* MatAXPY_BasicWithPreallocation() for BAIJ matrix is much slower than AIJ, even for bs=1 ! */
2091     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2092     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2093     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
2094     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
2095   }
2096   PetscFunctionReturn(0);
2097 }
2098 
2099 #undef __FUNCT__
2100 #define __FUNCT__ "MatRealPart_MPIBAIJ"
2101 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
2102 {
2103   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2104   PetscErrorCode ierr;
2105 
2106   PetscFunctionBegin;
2107   ierr = MatRealPart(a->A);CHKERRQ(ierr);
2108   ierr = MatRealPart(a->B);CHKERRQ(ierr);
2109   PetscFunctionReturn(0);
2110 }
2111 
2112 #undef __FUNCT__
2113 #define __FUNCT__ "MatImaginaryPart_MPIBAIJ"
2114 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
2115 {
2116   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2117   PetscErrorCode ierr;
2118 
2119   PetscFunctionBegin;
2120   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2121   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2122   PetscFunctionReturn(0);
2123 }
2124 
2125 #undef __FUNCT__
2126 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ"
2127 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2128 {
2129   PetscErrorCode ierr;
2130   IS             iscol_local;
2131   PetscInt       csize;
2132 
2133   PetscFunctionBegin;
2134   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2135   if (call == MAT_REUSE_MATRIX) {
2136     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2137     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2138   } else {
2139     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2140   }
2141   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2142   if (call == MAT_INITIAL_MATRIX) {
2143     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2144     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2145   }
2146   PetscFunctionReturn(0);
2147 }
2148 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2149 #undef __FUNCT__
2150 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ_Private"
2151 /*
2152   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2153   in local and then by concatenating the local matrices the end result.
2154   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ().
2155   This routine is used for BAIJ and SBAIJ matrices (unfortunate dependency).
2156 */
2157 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2158 {
2159   PetscErrorCode ierr;
2160   PetscMPIInt    rank,size;
2161   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2162   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2163   Mat            M,Mreuse;
2164   MatScalar      *vwork,*aa;
2165   MPI_Comm       comm;
2166   IS             isrow_new, iscol_new;
2167   PetscBool      idflag,allrows, allcols;
2168   Mat_SeqBAIJ    *aij;
2169 
2170   PetscFunctionBegin;
2171   ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
2172   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2173   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2174   /* The compression and expansion should be avoided. Doesn't point
2175      out errors, might change the indices, hence buggey */
2176   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2177   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2178 
2179   /* Check for special case: each processor gets entire matrix columns */
2180   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2181   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2182   if (idflag && ncol == mat->cmap->N) allcols = PETSC_TRUE;
2183   else allcols = PETSC_FALSE;
2184 
2185   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2186   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2187   if (idflag && nrow == mat->rmap->N) allrows = PETSC_TRUE;
2188   else allrows = PETSC_FALSE;
2189 
2190   if (call ==  MAT_REUSE_MATRIX) {
2191     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr);
2192     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2193     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2194   } else {
2195     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2196   }
2197   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2198   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2199   /*
2200       m - number of local rows
2201       n - number of columns (same on all processors)
2202       rstart - first row in new global matrix generated
2203   */
2204   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2205   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2206   m    = m/bs;
2207   n    = n/bs;
2208 
2209   if (call == MAT_INITIAL_MATRIX) {
2210     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2211     ii  = aij->i;
2212     jj  = aij->j;
2213 
2214     /*
2215         Determine the number of non-zeros in the diagonal and off-diagonal
2216         portions of the matrix in order to do correct preallocation
2217     */
2218 
2219     /* first get start and end of "diagonal" columns */
2220     if (csize == PETSC_DECIDE) {
2221       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2222       if (mglobal == n*bs) { /* square matrix */
2223         nlocal = m;
2224       } else {
2225         nlocal = n/size + ((n % size) > rank);
2226       }
2227     } else {
2228       nlocal = csize/bs;
2229     }
2230     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2231     rstart = rend - nlocal;
2232     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2233 
2234     /* next, compute all the lengths */
2235     ierr  = PetscMalloc2(m+1,&dlens,m+1,&olens);CHKERRQ(ierr);
2236     for (i=0; i<m; i++) {
2237       jend = ii[i+1] - ii[i];
2238       olen = 0;
2239       dlen = 0;
2240       for (j=0; j<jend; j++) {
2241         if (*jj < rstart || *jj >= rend) olen++;
2242         else dlen++;
2243         jj++;
2244       }
2245       olens[i] = olen;
2246       dlens[i] = dlen;
2247     }
2248     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2249     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2250     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2251     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2252     ierr = MatMPISBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2253     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
2254   } else {
2255     PetscInt ml,nl;
2256 
2257     M    = *newmat;
2258     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2259     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2260     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2261     /*
2262          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2263        rather than the slower MatSetValues().
2264     */
2265     M->was_assembled = PETSC_TRUE;
2266     M->assembled     = PETSC_FALSE;
2267   }
2268   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2269   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2270   aij  = (Mat_SeqBAIJ*)(Mreuse)->data;
2271   ii   = aij->i;
2272   jj   = aij->j;
2273   aa   = aij->a;
2274   for (i=0; i<m; i++) {
2275     row   = rstart/bs + i;
2276     nz    = ii[i+1] - ii[i];
2277     cwork = jj;     jj += nz;
2278     vwork = aa;     aa += nz*bs*bs;
2279     ierr  = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2280   }
2281 
2282   ierr    = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2283   ierr    = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2284   *newmat = M;
2285 
2286   /* save submatrix used in processor for next request */
2287   if (call ==  MAT_INITIAL_MATRIX) {
2288     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2289     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2290   }
2291   PetscFunctionReturn(0);
2292 }
2293 
2294 #undef __FUNCT__
2295 #define __FUNCT__ "MatPermute_MPIBAIJ"
2296 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2297 {
2298   MPI_Comm       comm,pcomm;
2299   PetscInt       clocal_size,nrows;
2300   const PetscInt *rows;
2301   PetscMPIInt    size;
2302   IS             crowp,lcolp;
2303   PetscErrorCode ierr;
2304 
2305   PetscFunctionBegin;
2306   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2307   /* make a collective version of 'rowp' */
2308   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2309   if (pcomm==comm) {
2310     crowp = rowp;
2311   } else {
2312     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2313     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2314     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2315     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2316   }
2317   ierr = ISSetPermutation(crowp);CHKERRQ(ierr);
2318   /* make a local version of 'colp' */
2319   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2320   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2321   if (size==1) {
2322     lcolp = colp;
2323   } else {
2324     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2325   }
2326   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2327   /* now we just get the submatrix */
2328   ierr = MatGetLocalSize(A,NULL,&clocal_size);CHKERRQ(ierr);
2329   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,crowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2330   /* clean up */
2331   if (pcomm!=comm) {
2332     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2333   }
2334   if (size>1) {
2335     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2336   }
2337   PetscFunctionReturn(0);
2338 }
2339 
2340 #undef __FUNCT__
2341 #define __FUNCT__ "MatGetGhosts_MPIBAIJ"
2342 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2343 {
2344   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*) mat->data;
2345   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ*)baij->B->data;
2346 
2347   PetscFunctionBegin;
2348   if (nghosts) *nghosts = B->nbs;
2349   if (ghosts) *ghosts = baij->garray;
2350   PetscFunctionReturn(0);
2351 }
2352 
2353 #undef __FUNCT__
2354 #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIBAIJ"
2355 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2356 {
2357   Mat            B;
2358   Mat_MPIBAIJ    *a  = (Mat_MPIBAIJ*)A->data;
2359   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2360   Mat_SeqAIJ     *b;
2361   PetscErrorCode ierr;
2362   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2363   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2364   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2365 
2366   PetscFunctionBegin;
2367   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2368   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2369 
2370   /* ----------------------------------------------------------------
2371      Tell every processor the number of nonzeros per row
2372   */
2373   ierr = PetscMalloc1(A->rmap->N/bs,&lens);CHKERRQ(ierr);
2374   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2375     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2376   }
2377   ierr      = PetscMalloc1(2*size,&recvcounts);CHKERRQ(ierr);
2378   displs    = recvcounts + size;
2379   for (i=0; i<size; i++) {
2380     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2381     displs[i]     = A->rmap->range[i]/bs;
2382   }
2383 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2384   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2385 #else
2386   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2387   ierr = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2388 #endif
2389   /* ---------------------------------------------------------------
2390      Create the sequential matrix of the same type as the local block diagonal
2391   */
2392   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2393   ierr = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2394   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2395   ierr = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2396   b    = (Mat_SeqAIJ*)B->data;
2397 
2398   /*--------------------------------------------------------------------
2399     Copy my part of matrix column indices over
2400   */
2401   sendcount  = ad->nz + bd->nz;
2402   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2403   a_jsendbuf = ad->j;
2404   b_jsendbuf = bd->j;
2405   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2406   cnt        = 0;
2407   for (i=0; i<n; i++) {
2408 
2409     /* put in lower diagonal portion */
2410     m = bd->i[i+1] - bd->i[i];
2411     while (m > 0) {
2412       /* is it above diagonal (in bd (compressed) numbering) */
2413       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2414       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2415       m--;
2416     }
2417 
2418     /* put in diagonal portion */
2419     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2420       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2421     }
2422 
2423     /* put in upper diagonal portion */
2424     while (m-- > 0) {
2425       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2426     }
2427   }
2428   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2429 
2430   /*--------------------------------------------------------------------
2431     Gather all column indices to all processors
2432   */
2433   for (i=0; i<size; i++) {
2434     recvcounts[i] = 0;
2435     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2436       recvcounts[i] += lens[j];
2437     }
2438   }
2439   displs[0] = 0;
2440   for (i=1; i<size; i++) {
2441     displs[i] = displs[i-1] + recvcounts[i-1];
2442   }
2443 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2444   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2445 #else
2446   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2447 #endif
2448   /*--------------------------------------------------------------------
2449     Assemble the matrix into useable form (note numerical values not yet set)
2450   */
2451   /* set the b->ilen (length of each row) values */
2452   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2453   /* set the b->i indices */
2454   b->i[0] = 0;
2455   for (i=1; i<=A->rmap->N/bs; i++) {
2456     b->i[i] = b->i[i-1] + lens[i-1];
2457   }
2458   ierr = PetscFree(lens);CHKERRQ(ierr);
2459   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2460   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2461   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2462 
2463   if (A->symmetric) {
2464     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2465   } else if (A->hermitian) {
2466     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2467   } else if (A->structurally_symmetric) {
2468     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2469   }
2470   *newmat = B;
2471   PetscFunctionReturn(0);
2472 }
2473 
2474 #undef __FUNCT__
2475 #define __FUNCT__ "MatSOR_MPIBAIJ"
2476 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2477 {
2478   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2479   PetscErrorCode ierr;
2480   Vec            bb1 = 0;
2481 
2482   PetscFunctionBegin;
2483   if (flag == SOR_APPLY_UPPER) {
2484     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2485     PetscFunctionReturn(0);
2486   }
2487 
2488   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2489     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2490   }
2491 
2492   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2493     if (flag & SOR_ZERO_INITIAL_GUESS) {
2494       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2495       its--;
2496     }
2497 
2498     while (its--) {
2499       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2500       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2501 
2502       /* update rhs: bb1 = bb - B*x */
2503       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2504       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2505 
2506       /* local sweep */
2507       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2508     }
2509   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2510     if (flag & SOR_ZERO_INITIAL_GUESS) {
2511       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2512       its--;
2513     }
2514     while (its--) {
2515       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2516       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2517 
2518       /* update rhs: bb1 = bb - B*x */
2519       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2520       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2521 
2522       /* local sweep */
2523       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2524     }
2525   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2526     if (flag & SOR_ZERO_INITIAL_GUESS) {
2527       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2528       its--;
2529     }
2530     while (its--) {
2531       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2532       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2533 
2534       /* update rhs: bb1 = bb - B*x */
2535       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2536       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2537 
2538       /* local sweep */
2539       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2540     }
2541   } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2542 
2543   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2544   PetscFunctionReturn(0);
2545 }
2546 
2547 #undef __FUNCT__
2548 #define __FUNCT__ "MatGetColumnNorms_MPIBAIJ"
2549 PetscErrorCode MatGetColumnNorms_MPIBAIJ(Mat A,NormType type,PetscReal *norms)
2550 {
2551   PetscErrorCode ierr;
2552   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)A->data;
2553   PetscInt       N,i,*garray = aij->garray;
2554   PetscInt       ib,jb,bs = A->rmap->bs;
2555   Mat_SeqBAIJ    *a_aij = (Mat_SeqBAIJ*) aij->A->data;
2556   MatScalar      *a_val = a_aij->a;
2557   Mat_SeqBAIJ    *b_aij = (Mat_SeqBAIJ*) aij->B->data;
2558   MatScalar      *b_val = b_aij->a;
2559   PetscReal      *work;
2560 
2561   PetscFunctionBegin;
2562   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
2563   ierr = PetscCalloc1(N,&work);CHKERRQ(ierr);
2564   if (type == NORM_2) {
2565     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2566       for (jb=0; jb<bs; jb++) {
2567         for (ib=0; ib<bs; ib++) {
2568           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val);
2569           a_val++;
2570         }
2571       }
2572     }
2573     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2574       for (jb=0; jb<bs; jb++) {
2575         for (ib=0; ib<bs; ib++) {
2576           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val * *b_val);
2577           b_val++;
2578         }
2579       }
2580     }
2581   } else if (type == NORM_1) {
2582     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2583       for (jb=0; jb<bs; jb++) {
2584         for (ib=0; ib<bs; ib++) {
2585           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val);
2586           a_val++;
2587         }
2588       }
2589     }
2590     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2591       for (jb=0; jb<bs; jb++) {
2592        for (ib=0; ib<bs; ib++) {
2593           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val);
2594           b_val++;
2595         }
2596       }
2597     }
2598   } else if (type == NORM_INFINITY) {
2599     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2600       for (jb=0; jb<bs; jb++) {
2601         for (ib=0; ib<bs; ib++) {
2602           int col = A->cmap->rstart + a_aij->j[i] * bs + jb;
2603           work[col] = PetscMax(PetscAbsScalar(*a_val), work[col]);
2604           a_val++;
2605         }
2606       }
2607     }
2608     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2609       for (jb=0; jb<bs; jb++) {
2610         for (ib=0; ib<bs; ib++) {
2611           int col = garray[b_aij->j[i]] * bs + jb;
2612           work[col] = PetscMax(PetscAbsScalar(*b_val), work[col]);
2613           b_val++;
2614         }
2615       }
2616     }
2617   } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType");
2618   if (type == NORM_INFINITY) {
2619     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2620   } else {
2621     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2622   }
2623   ierr = PetscFree(work);CHKERRQ(ierr);
2624   if (type == NORM_2) {
2625     for (i=0; i<N; i++) norms[i] = PetscSqrtReal(norms[i]);
2626   }
2627   PetscFunctionReturn(0);
2628 }
2629 
2630 #undef __FUNCT__
2631 #define __FUNCT__ "MatInvertBlockDiagonal_MPIBAIJ"
2632 PetscErrorCode MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2633 {
2634   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2635   PetscErrorCode ierr;
2636 
2637   PetscFunctionBegin;
2638   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2639   A->errortype = a->A->errortype;
2640   PetscFunctionReturn(0);
2641 }
2642 
2643 #undef __FUNCT__
2644 #define __FUNCT__ "MatShift_MPIBAIJ"
2645 PetscErrorCode MatShift_MPIBAIJ(Mat Y,PetscScalar a)
2646 {
2647   PetscErrorCode ierr;
2648   Mat_MPIBAIJ    *maij = (Mat_MPIBAIJ*)Y->data;
2649   Mat_SeqBAIJ    *aij = (Mat_SeqBAIJ*)maij->A->data;
2650 
2651   PetscFunctionBegin;
2652   if (!Y->preallocated) {
2653     ierr = MatMPIBAIJSetPreallocation(Y,Y->rmap->bs,1,NULL,0,NULL);CHKERRQ(ierr);
2654   } else if (!aij->nz) {
2655     PetscInt nonew = aij->nonew;
2656     ierr = MatSeqBAIJSetPreallocation(maij->A,Y->rmap->bs,1,NULL);CHKERRQ(ierr);
2657     aij->nonew = nonew;
2658   }
2659   ierr = MatShift_Basic(Y,a);CHKERRQ(ierr);
2660   PetscFunctionReturn(0);
2661 }
2662 
2663 #undef __FUNCT__
2664 #define __FUNCT__ "MatMissingDiagonal_MPIBAIJ"
2665 PetscErrorCode MatMissingDiagonal_MPIBAIJ(Mat A,PetscBool  *missing,PetscInt *d)
2666 {
2667   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2668   PetscErrorCode ierr;
2669 
2670   PetscFunctionBegin;
2671   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only works for square matrices");
2672   ierr = MatMissingDiagonal(a->A,missing,d);CHKERRQ(ierr);
2673   if (d) {
2674     PetscInt rstart;
2675     ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
2676     *d += rstart/A->rmap->bs;
2677 
2678   }
2679   PetscFunctionReturn(0);
2680 }
2681 
2682 #undef __FUNCT__
2683 #define __FUNCT__ "MatGetDiagonalBlock_MPIBAIJ"
2684 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2685 {
2686   PetscFunctionBegin;
2687   *a = ((Mat_MPIBAIJ*)A->data)->A;
2688   PetscFunctionReturn(0);
2689 }
2690 
2691 /* -------------------------------------------------------------------*/
2692 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2693                                        MatGetRow_MPIBAIJ,
2694                                        MatRestoreRow_MPIBAIJ,
2695                                        MatMult_MPIBAIJ,
2696                                 /* 4*/ MatMultAdd_MPIBAIJ,
2697                                        MatMultTranspose_MPIBAIJ,
2698                                        MatMultTransposeAdd_MPIBAIJ,
2699                                        0,
2700                                        0,
2701                                        0,
2702                                 /*10*/ 0,
2703                                        0,
2704                                        0,
2705                                        MatSOR_MPIBAIJ,
2706                                        MatTranspose_MPIBAIJ,
2707                                 /*15*/ MatGetInfo_MPIBAIJ,
2708                                        MatEqual_MPIBAIJ,
2709                                        MatGetDiagonal_MPIBAIJ,
2710                                        MatDiagonalScale_MPIBAIJ,
2711                                        MatNorm_MPIBAIJ,
2712                                 /*20*/ MatAssemblyBegin_MPIBAIJ,
2713                                        MatAssemblyEnd_MPIBAIJ,
2714                                        MatSetOption_MPIBAIJ,
2715                                        MatZeroEntries_MPIBAIJ,
2716                                 /*24*/ MatZeroRows_MPIBAIJ,
2717                                        0,
2718                                        0,
2719                                        0,
2720                                        0,
2721                                 /*29*/ MatSetUp_MPIBAIJ,
2722                                        0,
2723                                        0,
2724                                        MatGetDiagonalBlock_MPIBAIJ,
2725                                        0,
2726                                 /*34*/ MatDuplicate_MPIBAIJ,
2727                                        0,
2728                                        0,
2729                                        0,
2730                                        0,
2731                                 /*39*/ MatAXPY_MPIBAIJ,
2732                                        MatGetSubMatrices_MPIBAIJ,
2733                                        MatIncreaseOverlap_MPIBAIJ,
2734                                        MatGetValues_MPIBAIJ,
2735                                        MatCopy_MPIBAIJ,
2736                                 /*44*/ 0,
2737                                        MatScale_MPIBAIJ,
2738                                        MatShift_MPIBAIJ,
2739                                        0,
2740                                        MatZeroRowsColumns_MPIBAIJ,
2741                                 /*49*/ 0,
2742                                        0,
2743                                        0,
2744                                        0,
2745                                        0,
2746                                 /*54*/ MatFDColoringCreate_MPIXAIJ,
2747                                        0,
2748                                        MatSetUnfactored_MPIBAIJ,
2749                                        MatPermute_MPIBAIJ,
2750                                        MatSetValuesBlocked_MPIBAIJ,
2751                                 /*59*/ MatGetSubMatrix_MPIBAIJ,
2752                                        MatDestroy_MPIBAIJ,
2753                                        MatView_MPIBAIJ,
2754                                        0,
2755                                        0,
2756                                 /*64*/ 0,
2757                                        0,
2758                                        0,
2759                                        0,
2760                                        0,
2761                                 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2762                                        0,
2763                                        0,
2764                                        0,
2765                                        0,
2766                                 /*74*/ 0,
2767                                        MatFDColoringApply_BAIJ,
2768                                        0,
2769                                        0,
2770                                        0,
2771                                 /*79*/ 0,
2772                                        0,
2773                                        0,
2774                                        0,
2775                                        MatLoad_MPIBAIJ,
2776                                 /*84*/ 0,
2777                                        0,
2778                                        0,
2779                                        0,
2780                                        0,
2781                                 /*89*/ 0,
2782                                        0,
2783                                        0,
2784                                        0,
2785                                        0,
2786                                 /*94*/ 0,
2787                                        0,
2788                                        0,
2789                                        0,
2790                                        0,
2791                                 /*99*/ 0,
2792                                        0,
2793                                        0,
2794                                        0,
2795                                        0,
2796                                 /*104*/0,
2797                                        MatRealPart_MPIBAIJ,
2798                                        MatImaginaryPart_MPIBAIJ,
2799                                        0,
2800                                        0,
2801                                 /*109*/0,
2802                                        0,
2803                                        0,
2804                                        0,
2805                                        MatMissingDiagonal_MPIBAIJ,
2806                                 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2807                                        0,
2808                                        MatGetGhosts_MPIBAIJ,
2809                                        0,
2810                                        0,
2811                                 /*119*/0,
2812                                        0,
2813                                        0,
2814                                        0,
2815                                        MatGetMultiProcBlock_MPIBAIJ,
2816                                 /*124*/0,
2817                                        MatGetColumnNorms_MPIBAIJ,
2818                                        MatInvertBlockDiagonal_MPIBAIJ,
2819                                        0,
2820                                        0,
2821                                /*129*/ 0,
2822                                        0,
2823                                        0,
2824                                        0,
2825                                        0,
2826                                /*134*/ 0,
2827                                        0,
2828                                        0,
2829                                        0,
2830                                        0,
2831                                /*139*/ 0,
2832                                        0,
2833                                        0,
2834                                        MatFDColoringSetUp_MPIXAIJ,
2835                                        0,
2836                                 /*144*/MatCreateMPIMatConcatenateSeqMat_MPIBAIJ
2837 };
2838 
2839 
2840 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2841 
2842 #undef __FUNCT__
2843 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR_MPIBAIJ"
2844 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2845 {
2846   PetscInt       m,rstart,cstart,cend;
2847   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2848   const PetscInt *JJ    =0;
2849   PetscScalar    *values=0;
2850   PetscBool      roworiented = ((Mat_MPIBAIJ*)B->data)->roworiented;
2851   PetscErrorCode ierr;
2852 
2853   PetscFunctionBegin;
2854   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2855   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2856   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2857   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2858   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2859   m      = B->rmap->n/bs;
2860   rstart = B->rmap->rstart/bs;
2861   cstart = B->cmap->rstart/bs;
2862   cend   = B->cmap->rend/bs;
2863 
2864   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2865   ierr = PetscMalloc2(m,&d_nnz,m,&o_nnz);CHKERRQ(ierr);
2866   for (i=0; i<m; i++) {
2867     nz = ii[i+1] - ii[i];
2868     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
2869     nz_max = PetscMax(nz_max,nz);
2870     JJ     = jj + ii[i];
2871     for (j=0; j<nz; j++) {
2872       if (*JJ >= cstart) break;
2873       JJ++;
2874     }
2875     d = 0;
2876     for (; j<nz; j++) {
2877       if (*JJ++ >= cend) break;
2878       d++;
2879     }
2880     d_nnz[i] = d;
2881     o_nnz[i] = nz - d;
2882   }
2883   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2884   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
2885 
2886   values = (PetscScalar*)V;
2887   if (!values) {
2888     ierr = PetscMalloc1(bs*bs*nz_max,&values);CHKERRQ(ierr);
2889     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2890   }
2891   for (i=0; i<m; i++) {
2892     PetscInt          row    = i + rstart;
2893     PetscInt          ncols  = ii[i+1] - ii[i];
2894     const PetscInt    *icols = jj + ii[i];
2895     if (!roworiented) {         /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2896       const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2897       ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2898     } else {                    /* block ordering does not match so we can only insert one block at a time. */
2899       PetscInt j;
2900       for (j=0; j<ncols; j++) {
2901         const PetscScalar *svals = values + (V ? (bs*bs*(ii[i]+j)) : 0);
2902         ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,1,&icols[j],svals,INSERT_VALUES);CHKERRQ(ierr);
2903       }
2904     }
2905   }
2906 
2907   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2908   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2909   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2910   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2911   PetscFunctionReturn(0);
2912 }
2913 
2914 #undef __FUNCT__
2915 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR"
2916 /*@C
2917    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2918    (the default parallel PETSc format).
2919 
2920    Collective on MPI_Comm
2921 
2922    Input Parameters:
2923 +  B - the matrix
2924 .  bs - the block size
2925 .  i - the indices into j for the start of each local row (starts with zero)
2926 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2927 -  v - optional values in the matrix
2928 
2929    Level: developer
2930 
2931    Notes: The order of the entries in values is specified by the MatOption MAT_ROW_ORIENTED.  For example, C programs
2932    may want to use the default MAT_ROW_ORIENTED=PETSC_TRUE and use an array v[nnz][bs][bs] where the second index is
2933    over rows within a block and the last index is over columns within a block row.  Fortran programs will likely set
2934    MAT_ROW_ORIENTED=PETSC_FALSE and use a Fortran array v(bs,bs,nnz) in which the first index is over rows within a
2935    block column and the second index is over columns within a block.
2936 
2937 .keywords: matrix, aij, compressed row, sparse, parallel
2938 
2939 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, MatCreateMPIBAIJWithArrays(), MPIBAIJ
2940 @*/
2941 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2942 {
2943   PetscErrorCode ierr;
2944 
2945   PetscFunctionBegin;
2946   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
2947   PetscValidType(B,1);
2948   PetscValidLogicalCollectiveInt(B,bs,2);
2949   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2950   PetscFunctionReturn(0);
2951 }
2952 
2953 #undef __FUNCT__
2954 #define __FUNCT__ "MatMPIBAIJSetPreallocation_MPIBAIJ"
2955 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
2956 {
2957   Mat_MPIBAIJ    *b;
2958   PetscErrorCode ierr;
2959   PetscInt       i;
2960 
2961   PetscFunctionBegin;
2962   ierr = MatSetBlockSize(B,PetscAbs(bs));CHKERRQ(ierr);
2963   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2964   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2965   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2966 
2967   if (d_nnz) {
2968     for (i=0; i<B->rmap->n/bs; i++) {
2969       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
2970     }
2971   }
2972   if (o_nnz) {
2973     for (i=0; i<B->rmap->n/bs; i++) {
2974       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
2975     }
2976   }
2977 
2978   b      = (Mat_MPIBAIJ*)B->data;
2979   b->bs2 = bs*bs;
2980   b->mbs = B->rmap->n/bs;
2981   b->nbs = B->cmap->n/bs;
2982   b->Mbs = B->rmap->N/bs;
2983   b->Nbs = B->cmap->N/bs;
2984 
2985   for (i=0; i<=b->size; i++) {
2986     b->rangebs[i] = B->rmap->range[i]/bs;
2987   }
2988   b->rstartbs = B->rmap->rstart/bs;
2989   b->rendbs   = B->rmap->rend/bs;
2990   b->cstartbs = B->cmap->rstart/bs;
2991   b->cendbs   = B->cmap->rend/bs;
2992 
2993   if (!B->preallocated) {
2994     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
2995     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
2996     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
2997     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
2998     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
2999     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
3000     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
3001     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
3002     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
3003   }
3004 
3005   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3006   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
3007   B->preallocated = PETSC_TRUE;
3008   PetscFunctionReturn(0);
3009 }
3010 
3011 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
3012 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
3013 
3014 #undef __FUNCT__
3015 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAdj"
3016 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
3017 {
3018   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
3019   PetscErrorCode ierr;
3020   Mat_SeqBAIJ    *d  = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
3021   PetscInt       M   = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
3022   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
3023 
3024   PetscFunctionBegin;
3025   ierr  = PetscMalloc1(M+1,&ii);CHKERRQ(ierr);
3026   ii[0] = 0;
3027   for (i=0; i<M; i++) {
3028     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
3029     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
3030     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
3031     /* remove one from count of matrix has diagonal */
3032     for (j=id[i]; j<id[i+1]; j++) {
3033       if (jd[j] == i) {ii[i+1]--;break;}
3034     }
3035   }
3036   ierr = PetscMalloc1(ii[M],&jj);CHKERRQ(ierr);
3037   cnt  = 0;
3038   for (i=0; i<M; i++) {
3039     for (j=io[i]; j<io[i+1]; j++) {
3040       if (garray[jo[j]] > rstart) break;
3041       jj[cnt++] = garray[jo[j]];
3042     }
3043     for (k=id[i]; k<id[i+1]; k++) {
3044       if (jd[k] != i) {
3045         jj[cnt++] = rstart + jd[k];
3046       }
3047     }
3048     for (; j<io[i+1]; j++) {
3049       jj[cnt++] = garray[jo[j]];
3050     }
3051   }
3052   ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)B),M,B->cmap->N/B->rmap->bs,ii,jj,NULL,adj);CHKERRQ(ierr);
3053   PetscFunctionReturn(0);
3054 }
3055 
3056 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3057 
3058 PETSC_INTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
3059 
3060 #undef __FUNCT__
3061 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAIJ"
3062 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
3063 {
3064   PetscErrorCode ierr;
3065   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
3066   Mat            B;
3067   Mat_MPIAIJ     *b;
3068 
3069   PetscFunctionBegin;
3070   if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix must be assembled");
3071 
3072   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
3073   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
3074   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
3075   ierr = MatSetBlockSizes(B,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr);
3076   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
3077   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
3078   b    = (Mat_MPIAIJ*) B->data;
3079 
3080   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
3081   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
3082   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
3083   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
3084   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
3085   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3086   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3087   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3088   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3089   if (reuse == MAT_INPLACE_MATRIX) {
3090     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
3091   } else {
3092    *newmat = B;
3093   }
3094   PetscFunctionReturn(0);
3095 }
3096 
3097 /*MC
3098    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
3099 
3100    Options Database Keys:
3101 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
3102 . -mat_block_size <bs> - set the blocksize used to store the matrix
3103 - -mat_use_hash_table <fact>
3104 
3105   Level: beginner
3106 
3107 .seealso: MatCreateMPIBAIJ
3108 M*/
3109 
3110 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
3111 
3112 #undef __FUNCT__
3113 #define __FUNCT__ "MatCreate_MPIBAIJ"
3114 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
3115 {
3116   Mat_MPIBAIJ    *b;
3117   PetscErrorCode ierr;
3118   PetscBool      flg = PETSC_FALSE;
3119 
3120   PetscFunctionBegin;
3121   ierr    = PetscNewLog(B,&b);CHKERRQ(ierr);
3122   B->data = (void*)b;
3123 
3124   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
3125   B->assembled = PETSC_FALSE;
3126 
3127   B->insertmode = NOT_SET_VALUES;
3128   ierr          = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
3129   ierr          = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
3130 
3131   /* build local table of row and column ownerships */
3132   ierr = PetscMalloc1(b->size+1,&b->rangebs);CHKERRQ(ierr);
3133 
3134   /* build cache for off array entries formed */
3135   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
3136 
3137   b->donotstash  = PETSC_FALSE;
3138   b->colmap      = NULL;
3139   b->garray      = NULL;
3140   b->roworiented = PETSC_TRUE;
3141 
3142   /* stuff used in block assembly */
3143   b->barray = 0;
3144 
3145   /* stuff used for matrix vector multiply */
3146   b->lvec  = 0;
3147   b->Mvctx = 0;
3148 
3149   /* stuff for MatGetRow() */
3150   b->rowindices   = 0;
3151   b->rowvalues    = 0;
3152   b->getrowactive = PETSC_FALSE;
3153 
3154   /* hash table stuff */
3155   b->ht           = 0;
3156   b->hd           = 0;
3157   b->ht_size      = 0;
3158   b->ht_flag      = PETSC_FALSE;
3159   b->ht_fact      = 0;
3160   b->ht_total_ct  = 0;
3161   b->ht_insert_ct = 0;
3162 
3163   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
3164   b->ijonly = PETSC_FALSE;
3165 
3166 
3167   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
3168   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
3169   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
3170   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
3171   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
3172   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
3173   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
3174   ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
3175   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSetHashTableFactor_C",MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
3176   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
3177 
3178   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
3179   ierr = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",flg,&flg,NULL);CHKERRQ(ierr);
3180   if (flg) {
3181     PetscReal fact = 1.39;
3182     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
3183     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
3184     if (fact <= 1.0) fact = 1.39;
3185     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
3186     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
3187   }
3188   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3189   PetscFunctionReturn(0);
3190 }
3191 
3192 /*MC
3193    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
3194 
3195    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
3196    and MATMPIBAIJ otherwise.
3197 
3198    Options Database Keys:
3199 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
3200 
3201   Level: beginner
3202 
3203 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3204 M*/
3205 
3206 #undef __FUNCT__
3207 #define __FUNCT__ "MatMPIBAIJSetPreallocation"
3208 /*@C
3209    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
3210    (block compressed row).  For good matrix assembly performance
3211    the user should preallocate the matrix storage by setting the parameters
3212    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3213    performance can be increased by more than a factor of 50.
3214 
3215    Collective on Mat
3216 
3217    Input Parameters:
3218 +  B - the matrix
3219 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3220           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3221 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
3222            submatrix  (same for all local rows)
3223 .  d_nnz - array containing the number of block nonzeros in the various block rows
3224            of the in diagonal portion of the local (possibly different for each block
3225            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
3226            set it even if it is zero.
3227 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
3228            submatrix (same for all local rows).
3229 -  o_nnz - array containing the number of nonzeros in the various block rows of the
3230            off-diagonal portion of the local submatrix (possibly different for
3231            each block row) or NULL.
3232 
3233    If the *_nnz parameter is given then the *_nz parameter is ignored
3234 
3235    Options Database Keys:
3236 +   -mat_block_size - size of the blocks to use
3237 -   -mat_use_hash_table <fact>
3238 
3239    Notes:
3240    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3241    than it must be used on all processors that share the object for that argument.
3242 
3243    Storage Information:
3244    For a square global matrix we define each processor's diagonal portion
3245    to be its local rows and the corresponding columns (a square submatrix);
3246    each processor's off-diagonal portion encompasses the remainder of the
3247    local matrix (a rectangular submatrix).
3248 
3249    The user can specify preallocated storage for the diagonal part of
3250    the local submatrix with either d_nz or d_nnz (not both).  Set
3251    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3252    memory allocation.  Likewise, specify preallocated storage for the
3253    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3254 
3255    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3256    the figure below we depict these three local rows and all columns (0-11).
3257 
3258 .vb
3259            0 1 2 3 4 5 6 7 8 9 10 11
3260           --------------------------
3261    row 3  |o o o d d d o o o o  o  o
3262    row 4  |o o o d d d o o o o  o  o
3263    row 5  |o o o d d d o o o o  o  o
3264           --------------------------
3265 .ve
3266 
3267    Thus, any entries in the d locations are stored in the d (diagonal)
3268    submatrix, and any entries in the o locations are stored in the
3269    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3270    stored simply in the MATSEQBAIJ format for compressed row storage.
3271 
3272    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3273    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3274    In general, for PDE problems in which most nonzeros are near the diagonal,
3275    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3276    or you will get TERRIBLE performance; see the users' manual chapter on
3277    matrices.
3278 
3279    You can call MatGetInfo() to get information on how effective the preallocation was;
3280    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3281    You can also run with the option -info and look for messages with the string
3282    malloc in them to see if additional memory allocation was needed.
3283 
3284    Level: intermediate
3285 
3286 .keywords: matrix, block, aij, compressed row, sparse, parallel
3287 
3288 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3289 @*/
3290 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3291 {
3292   PetscErrorCode ierr;
3293 
3294   PetscFunctionBegin;
3295   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3296   PetscValidType(B,1);
3297   PetscValidLogicalCollectiveInt(B,bs,2);
3298   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3299   PetscFunctionReturn(0);
3300 }
3301 
3302 #undef __FUNCT__
3303 #define __FUNCT__ "MatCreateBAIJ"
3304 /*@C
3305    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3306    (block compressed row).  For good matrix assembly performance
3307    the user should preallocate the matrix storage by setting the parameters
3308    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3309    performance can be increased by more than a factor of 50.
3310 
3311    Collective on MPI_Comm
3312 
3313    Input Parameters:
3314 +  comm - MPI communicator
3315 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3316           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3317 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3318            This value should be the same as the local size used in creating the
3319            y vector for the matrix-vector product y = Ax.
3320 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3321            This value should be the same as the local size used in creating the
3322            x vector for the matrix-vector product y = Ax.
3323 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3324 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3325 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3326            submatrix  (same for all local rows)
3327 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3328            of the in diagonal portion of the local (possibly different for each block
3329            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3330            and set it even if it is zero.
3331 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3332            submatrix (same for all local rows).
3333 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3334            off-diagonal portion of the local submatrix (possibly different for
3335            each block row) or NULL.
3336 
3337    Output Parameter:
3338 .  A - the matrix
3339 
3340    Options Database Keys:
3341 +   -mat_block_size - size of the blocks to use
3342 -   -mat_use_hash_table <fact>
3343 
3344    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3345    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3346    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3347 
3348    Notes:
3349    If the *_nnz parameter is given then the *_nz parameter is ignored
3350 
3351    A nonzero block is any block that as 1 or more nonzeros in it
3352 
3353    The user MUST specify either the local or global matrix dimensions
3354    (possibly both).
3355 
3356    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3357    than it must be used on all processors that share the object for that argument.
3358 
3359    Storage Information:
3360    For a square global matrix we define each processor's diagonal portion
3361    to be its local rows and the corresponding columns (a square submatrix);
3362    each processor's off-diagonal portion encompasses the remainder of the
3363    local matrix (a rectangular submatrix).
3364 
3365    The user can specify preallocated storage for the diagonal part of
3366    the local submatrix with either d_nz or d_nnz (not both).  Set
3367    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3368    memory allocation.  Likewise, specify preallocated storage for the
3369    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3370 
3371    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3372    the figure below we depict these three local rows and all columns (0-11).
3373 
3374 .vb
3375            0 1 2 3 4 5 6 7 8 9 10 11
3376           --------------------------
3377    row 3  |o o o d d d o o o o  o  o
3378    row 4  |o o o d d d o o o o  o  o
3379    row 5  |o o o d d d o o o o  o  o
3380           --------------------------
3381 .ve
3382 
3383    Thus, any entries in the d locations are stored in the d (diagonal)
3384    submatrix, and any entries in the o locations are stored in the
3385    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3386    stored simply in the MATSEQBAIJ format for compressed row storage.
3387 
3388    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3389    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3390    In general, for PDE problems in which most nonzeros are near the diagonal,
3391    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3392    or you will get TERRIBLE performance; see the users' manual chapter on
3393    matrices.
3394 
3395    Level: intermediate
3396 
3397 .keywords: matrix, block, aij, compressed row, sparse, parallel
3398 
3399 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3400 @*/
3401 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3402 {
3403   PetscErrorCode ierr;
3404   PetscMPIInt    size;
3405 
3406   PetscFunctionBegin;
3407   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3408   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3409   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3410   if (size > 1) {
3411     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3412     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3413   } else {
3414     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3415     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3416   }
3417   PetscFunctionReturn(0);
3418 }
3419 
3420 #undef __FUNCT__
3421 #define __FUNCT__ "MatDuplicate_MPIBAIJ"
3422 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3423 {
3424   Mat            mat;
3425   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3426   PetscErrorCode ierr;
3427   PetscInt       len=0;
3428 
3429   PetscFunctionBegin;
3430   *newmat = 0;
3431   ierr    = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
3432   ierr    = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3433   ierr    = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3434   ierr    = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3435 
3436   mat->factortype   = matin->factortype;
3437   mat->preallocated = PETSC_TRUE;
3438   mat->assembled    = PETSC_TRUE;
3439   mat->insertmode   = NOT_SET_VALUES;
3440 
3441   a             = (Mat_MPIBAIJ*)mat->data;
3442   mat->rmap->bs = matin->rmap->bs;
3443   a->bs2        = oldmat->bs2;
3444   a->mbs        = oldmat->mbs;
3445   a->nbs        = oldmat->nbs;
3446   a->Mbs        = oldmat->Mbs;
3447   a->Nbs        = oldmat->Nbs;
3448 
3449   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3450   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3451 
3452   a->size         = oldmat->size;
3453   a->rank         = oldmat->rank;
3454   a->donotstash   = oldmat->donotstash;
3455   a->roworiented  = oldmat->roworiented;
3456   a->rowindices   = 0;
3457   a->rowvalues    = 0;
3458   a->getrowactive = PETSC_FALSE;
3459   a->barray       = 0;
3460   a->rstartbs     = oldmat->rstartbs;
3461   a->rendbs       = oldmat->rendbs;
3462   a->cstartbs     = oldmat->cstartbs;
3463   a->cendbs       = oldmat->cendbs;
3464 
3465   /* hash table stuff */
3466   a->ht           = 0;
3467   a->hd           = 0;
3468   a->ht_size      = 0;
3469   a->ht_flag      = oldmat->ht_flag;
3470   a->ht_fact      = oldmat->ht_fact;
3471   a->ht_total_ct  = 0;
3472   a->ht_insert_ct = 0;
3473 
3474   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3475   if (oldmat->colmap) {
3476 #if defined(PETSC_USE_CTABLE)
3477     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3478 #else
3479     ierr = PetscMalloc1(a->Nbs,&a->colmap);CHKERRQ(ierr);
3480     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3481     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3482 #endif
3483   } else a->colmap = 0;
3484 
3485   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3486     ierr = PetscMalloc1(len,&a->garray);CHKERRQ(ierr);
3487     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3488     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3489   } else a->garray = 0;
3490 
3491   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3492   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3493   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
3494   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3495   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
3496 
3497   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3498   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
3499   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3500   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
3501   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3502   *newmat = mat;
3503   PetscFunctionReturn(0);
3504 }
3505 
3506 #undef __FUNCT__
3507 #define __FUNCT__ "MatLoad_MPIBAIJ"
3508 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3509 {
3510   PetscErrorCode ierr;
3511   int            fd;
3512   PetscInt       i,nz,j,rstart,rend;
3513   PetscScalar    *vals,*buf;
3514   MPI_Comm       comm;
3515   MPI_Status     status;
3516   PetscMPIInt    rank,size,maxnz;
3517   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3518   PetscInt       *locrowlens = NULL,*procsnz = NULL,*browners = NULL;
3519   PetscInt       jj,*mycols,*ibuf,bs = newmat->rmap->bs,Mbs,mbs,extra_rows,mmax;
3520   PetscMPIInt    tag    = ((PetscObject)viewer)->tag;
3521   PetscInt       *dlens = NULL,*odlens = NULL,*mask = NULL,*masked1 = NULL,*masked2 = NULL,rowcount,odcount;
3522   PetscInt       dcount,kmax,k,nzcount,tmp,mend;
3523 
3524   PetscFunctionBegin;
3525   /* force binary viewer to load .info file if it has not yet done so */
3526   ierr = PetscViewerSetUp(viewer);CHKERRQ(ierr);
3527   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
3528   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3529   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
3530   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3531   if (bs < 0) bs = 1;
3532 
3533   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3534   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3535   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3536   if (!rank) {
3537     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
3538     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3539     if (header[3] < 0) SETERRQ(PetscObjectComm((PetscObject)newmat),PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format on disk, cannot load as MPIAIJ");
3540   }
3541   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3542   M    = header[1]; N = header[2];
3543 
3544   /* If global sizes are set, check if they are consistent with that given in the file */
3545   if (newmat->rmap->N >= 0 && newmat->rmap->N != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of rows:Matrix in file has (%D) and input matrix has (%D)",newmat->rmap->N,M);
3546   if (newmat->cmap->N >= 0 && newmat->cmap->N != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of cols:Matrix in file has (%D) and input matrix has (%D)",newmat->cmap->N,N);
3547 
3548   if (M != N) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Can only do square matrices");
3549 
3550   /*
3551      This code adds extra rows to make sure the number of rows is
3552      divisible by the blocksize
3553   */
3554   Mbs        = M/bs;
3555   extra_rows = bs - M + bs*Mbs;
3556   if (extra_rows == bs) extra_rows = 0;
3557   else                  Mbs++;
3558   if (extra_rows && !rank) {
3559     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3560   }
3561 
3562   /* determine ownership of all rows */
3563   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3564     mbs = Mbs/size + ((Mbs % size) > rank);
3565     m   = mbs*bs;
3566   } else { /* User set */
3567     m   = newmat->rmap->n;
3568     mbs = m/bs;
3569   }
3570   ierr = PetscMalloc2(size+1,&rowners,size+1,&browners);CHKERRQ(ierr);
3571   ierr = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3572 
3573   /* process 0 needs enough room for process with most rows */
3574   if (!rank) {
3575     mmax = rowners[1];
3576     for (i=2; i<=size; i++) {
3577       mmax = PetscMax(mmax,rowners[i]);
3578     }
3579     mmax*=bs;
3580   } else mmax = -1;             /* unused, but compiler warns anyway */
3581 
3582   rowners[0] = 0;
3583   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
3584   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
3585   rstart = rowners[rank];
3586   rend   = rowners[rank+1];
3587 
3588   /* distribute row lengths to all processors */
3589   ierr = PetscMalloc1(m,&locrowlens);CHKERRQ(ierr);
3590   if (!rank) {
3591     mend = m;
3592     if (size == 1) mend = mend - extra_rows;
3593     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3594     for (j=mend; j<m; j++) locrowlens[j] = 1;
3595     ierr = PetscMalloc1(mmax,&rowlengths);CHKERRQ(ierr);
3596     ierr = PetscCalloc1(size,&procsnz);CHKERRQ(ierr);
3597     for (j=0; j<m; j++) {
3598       procsnz[0] += locrowlens[j];
3599     }
3600     for (i=1; i<size; i++) {
3601       mend = browners[i+1] - browners[i];
3602       if (i == size-1) mend = mend - extra_rows;
3603       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3604       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3605       /* calculate the number of nonzeros on each processor */
3606       for (j=0; j<browners[i+1]-browners[i]; j++) {
3607         procsnz[i] += rowlengths[j];
3608       }
3609       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3610     }
3611     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3612   } else {
3613     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3614   }
3615 
3616   if (!rank) {
3617     /* determine max buffer needed and allocate it */
3618     maxnz = procsnz[0];
3619     for (i=1; i<size; i++) {
3620       maxnz = PetscMax(maxnz,procsnz[i]);
3621     }
3622     ierr = PetscMalloc1(maxnz,&cols);CHKERRQ(ierr);
3623 
3624     /* read in my part of the matrix column indices  */
3625     nz     = procsnz[0];
3626     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3627     mycols = ibuf;
3628     if (size == 1) nz -= extra_rows;
3629     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3630     if (size == 1) {
3631       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
3632     }
3633 
3634     /* read in every ones (except the last) and ship off */
3635     for (i=1; i<size-1; i++) {
3636       nz   = procsnz[i];
3637       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3638       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3639     }
3640     /* read in the stuff for the last proc */
3641     if (size != 1) {
3642       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3643       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3644       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3645       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3646     }
3647     ierr = PetscFree(cols);CHKERRQ(ierr);
3648   } else {
3649     /* determine buffer space needed for message */
3650     nz = 0;
3651     for (i=0; i<m; i++) {
3652       nz += locrowlens[i];
3653     }
3654     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3655     mycols = ibuf;
3656     /* receive message of column indices*/
3657     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3658     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3659     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3660   }
3661 
3662   /* loop over local rows, determining number of off diagonal entries */
3663   ierr     = PetscMalloc2(rend-rstart,&dlens,rend-rstart,&odlens);CHKERRQ(ierr);
3664   ierr     = PetscCalloc3(Mbs,&mask,Mbs,&masked1,Mbs,&masked2);CHKERRQ(ierr);
3665   rowcount = 0; nzcount = 0;
3666   for (i=0; i<mbs; i++) {
3667     dcount  = 0;
3668     odcount = 0;
3669     for (j=0; j<bs; j++) {
3670       kmax = locrowlens[rowcount];
3671       for (k=0; k<kmax; k++) {
3672         tmp = mycols[nzcount++]/bs;
3673         if (!mask[tmp]) {
3674           mask[tmp] = 1;
3675           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3676           else masked1[dcount++] = tmp;
3677         }
3678       }
3679       rowcount++;
3680     }
3681 
3682     dlens[i]  = dcount;
3683     odlens[i] = odcount;
3684 
3685     /* zero out the mask elements we set */
3686     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3687     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3688   }
3689 
3690   ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3691   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3692 
3693   if (!rank) {
3694     ierr = PetscMalloc1(maxnz+1,&buf);CHKERRQ(ierr);
3695     /* read in my part of the matrix numerical values  */
3696     nz     = procsnz[0];
3697     vals   = buf;
3698     mycols = ibuf;
3699     if (size == 1) nz -= extra_rows;
3700     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3701     if (size == 1) {
3702       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
3703     }
3704 
3705     /* insert into matrix */
3706     jj = rstart*bs;
3707     for (i=0; i<m; i++) {
3708       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3709       mycols += locrowlens[i];
3710       vals   += locrowlens[i];
3711       jj++;
3712     }
3713     /* read in other processors (except the last one) and ship out */
3714     for (i=1; i<size-1; i++) {
3715       nz   = procsnz[i];
3716       vals = buf;
3717       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3718       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3719     }
3720     /* the last proc */
3721     if (size != 1) {
3722       nz   = procsnz[i] - extra_rows;
3723       vals = buf;
3724       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3725       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3726       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3727     }
3728     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3729   } else {
3730     /* receive numeric values */
3731     ierr = PetscMalloc1(nz+1,&buf);CHKERRQ(ierr);
3732 
3733     /* receive message of values*/
3734     vals   = buf;
3735     mycols = ibuf;
3736     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3737 
3738     /* insert into matrix */
3739     jj = rstart*bs;
3740     for (i=0; i<m; i++) {
3741       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3742       mycols += locrowlens[i];
3743       vals   += locrowlens[i];
3744       jj++;
3745     }
3746   }
3747   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3748   ierr = PetscFree(buf);CHKERRQ(ierr);
3749   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3750   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3751   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3752   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3753   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3754   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3755   PetscFunctionReturn(0);
3756 }
3757 
3758 #undef __FUNCT__
3759 #define __FUNCT__ "MatMPIBAIJSetHashTableFactor"
3760 /*@
3761    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3762 
3763    Input Parameters:
3764 .  mat  - the matrix
3765 .  fact - factor
3766 
3767    Not Collective, each process can use a different factor
3768 
3769    Level: advanced
3770 
3771   Notes:
3772    This can also be set by the command line option: -mat_use_hash_table <fact>
3773 
3774 .keywords: matrix, hashtable, factor, HT
3775 
3776 .seealso: MatSetOption()
3777 @*/
3778 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3779 {
3780   PetscErrorCode ierr;
3781 
3782   PetscFunctionBegin;
3783   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3784   PetscFunctionReturn(0);
3785 }
3786 
3787 #undef __FUNCT__
3788 #define __FUNCT__ "MatSetHashTableFactor_MPIBAIJ"
3789 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3790 {
3791   Mat_MPIBAIJ *baij;
3792 
3793   PetscFunctionBegin;
3794   baij          = (Mat_MPIBAIJ*)mat->data;
3795   baij->ht_fact = fact;
3796   PetscFunctionReturn(0);
3797 }
3798 
3799 #undef __FUNCT__
3800 #define __FUNCT__ "MatMPIBAIJGetSeqBAIJ"
3801 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3802 {
3803   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
3804 
3805   PetscFunctionBegin;
3806   if (Ad)     *Ad     = a->A;
3807   if (Ao)     *Ao     = a->B;
3808   if (colmap) *colmap = a->garray;
3809   PetscFunctionReturn(0);
3810 }
3811 
3812 /*
3813     Special version for direct calls from Fortran (to eliminate two function call overheads
3814 */
3815 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3816 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3817 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3818 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3819 #endif
3820 
3821 #undef __FUNCT__
3822 #define __FUNCT__ "matmpibiajsetvaluesblocked"
3823 /*@C
3824   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3825 
3826   Collective on Mat
3827 
3828   Input Parameters:
3829 + mat - the matrix
3830 . min - number of input rows
3831 . im - input rows
3832 . nin - number of input columns
3833 . in - input columns
3834 . v - numerical values input
3835 - addvin - INSERT_VALUES or ADD_VALUES
3836 
3837   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3838 
3839   Level: advanced
3840 
3841 .seealso:   MatSetValuesBlocked()
3842 @*/
3843 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3844 {
3845   /* convert input arguments to C version */
3846   Mat        mat  = *matin;
3847   PetscInt   m    = *min, n = *nin;
3848   InsertMode addv = *addvin;
3849 
3850   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
3851   const MatScalar *value;
3852   MatScalar       *barray     = baij->barray;
3853   PetscBool       roworiented = baij->roworiented;
3854   PetscErrorCode  ierr;
3855   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
3856   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
3857   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
3858 
3859   PetscFunctionBegin;
3860   /* tasks normally handled by MatSetValuesBlocked() */
3861   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
3862 #if defined(PETSC_USE_DEBUG)
3863   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
3864   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3865 #endif
3866   if (mat->assembled) {
3867     mat->was_assembled = PETSC_TRUE;
3868     mat->assembled     = PETSC_FALSE;
3869   }
3870   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3871 
3872 
3873   if (!barray) {
3874     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
3875     baij->barray = barray;
3876   }
3877 
3878   if (roworiented) stepval = (n-1)*bs;
3879   else stepval = (m-1)*bs;
3880 
3881   for (i=0; i<m; i++) {
3882     if (im[i] < 0) continue;
3883 #if defined(PETSC_USE_DEBUG)
3884     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
3885 #endif
3886     if (im[i] >= rstart && im[i] < rend) {
3887       row = im[i] - rstart;
3888       for (j=0; j<n; j++) {
3889         /* If NumCol = 1 then a copy is not required */
3890         if ((roworiented) && (n == 1)) {
3891           barray = (MatScalar*)v + i*bs2;
3892         } else if ((!roworiented) && (m == 1)) {
3893           barray = (MatScalar*)v + j*bs2;
3894         } else { /* Here a copy is required */
3895           if (roworiented) {
3896             value = v + i*(stepval+bs)*bs + j*bs;
3897           } else {
3898             value = v + j*(stepval+bs)*bs + i*bs;
3899           }
3900           for (ii=0; ii<bs; ii++,value+=stepval) {
3901             for (jj=0; jj<bs; jj++) {
3902               *barray++ = *value++;
3903             }
3904           }
3905           barray -=bs2;
3906         }
3907 
3908         if (in[j] >= cstart && in[j] < cend) {
3909           col  = in[j] - cstart;
3910           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3911         } else if (in[j] < 0) continue;
3912 #if defined(PETSC_USE_DEBUG)
3913         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
3914 #endif
3915         else {
3916           if (mat->was_assembled) {
3917             if (!baij->colmap) {
3918               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
3919             }
3920 
3921 #if defined(PETSC_USE_DEBUG)
3922 #if defined(PETSC_USE_CTABLE)
3923             { PetscInt data;
3924               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
3925               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3926             }
3927 #else
3928             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3929 #endif
3930 #endif
3931 #if defined(PETSC_USE_CTABLE)
3932             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
3933             col  = (col - 1)/bs;
3934 #else
3935             col = (baij->colmap[in[j]] - 1)/bs;
3936 #endif
3937             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
3938               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
3939               col  =  in[j];
3940             }
3941           } else col = in[j];
3942           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3943         }
3944       }
3945     } else {
3946       if (!baij->donotstash) {
3947         if (roworiented) {
3948           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3949         } else {
3950           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3951         }
3952       }
3953     }
3954   }
3955 
3956   /* task normally handled by MatSetValuesBlocked() */
3957   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3958   PetscFunctionReturn(0);
3959 }
3960 
3961 #undef __FUNCT__
3962 #define __FUNCT__ "MatCreateMPIBAIJWithArrays"
3963 /*@
3964      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
3965          CSR format the local rows.
3966 
3967    Collective on MPI_Comm
3968 
3969    Input Parameters:
3970 +  comm - MPI communicator
3971 .  bs - the block size, only a block size of 1 is supported
3972 .  m - number of local rows (Cannot be PETSC_DECIDE)
3973 .  n - This value should be the same as the local size used in creating the
3974        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3975        calculated if N is given) For square matrices n is almost always m.
3976 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3977 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3978 .   i - row indices
3979 .   j - column indices
3980 -   a - matrix values
3981 
3982    Output Parameter:
3983 .   mat - the matrix
3984 
3985    Level: intermediate
3986 
3987    Notes:
3988        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
3989      thus you CANNOT change the matrix entries by changing the values of a[] after you have
3990      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
3991 
3992      The order of the entries in values is the same as the block compressed sparse row storage format; that is, it is
3993      the same as a three dimensional array in Fortran values(bs,bs,nnz) that contains the first column of the first
3994      block, followed by the second column of the first block etc etc.  That is, the blocks are contiguous in memory
3995      with column-major ordering within blocks.
3996 
3997        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
3998 
3999 .keywords: matrix, aij, compressed row, sparse, parallel
4000 
4001 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
4002           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
4003 @*/
4004 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
4005 {
4006   PetscErrorCode ierr;
4007 
4008   PetscFunctionBegin;
4009   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
4010   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
4011   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
4012   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
4013   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
4014   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
4015   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
4016   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
4017   PetscFunctionReturn(0);
4018 }
4019 
4020 #undef __FUNCT__
4021 #define __FUNCT__ "MatCreateMPIMatConcatenateSeqMat_MPIBAIJ"
4022 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPIBAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
4023 {
4024   PetscErrorCode ierr;
4025   PetscInt       m,N,i,rstart,nnz,Ii,bs,cbs;
4026   PetscInt       *indx;
4027   PetscScalar    *values;
4028 
4029   PetscFunctionBegin;
4030   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
4031   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
4032     Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)inmat->data;
4033     PetscInt       *dnz,*onz,sum,mbs,Nbs;
4034     PetscInt       *bindx,rmax=a->rmax,j;
4035 
4036     ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4037     mbs = m/bs; Nbs = N/cbs;
4038     if (n == PETSC_DECIDE) {
4039       ierr = PetscSplitOwnership(comm,&n,&Nbs);CHKERRQ(ierr);
4040     }
4041     /* Check sum(n) = Nbs */
4042     ierr = MPIU_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4043     if (sum != Nbs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",Nbs);
4044 
4045     ierr    = MPI_Scan(&mbs, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4046     rstart -= mbs;
4047 
4048     ierr = PetscMalloc1(rmax,&bindx);CHKERRQ(ierr);
4049     ierr = MatPreallocateInitialize(comm,mbs,n,dnz,onz);CHKERRQ(ierr);
4050     for (i=0; i<mbs; i++) {
4051       ierr = MatGetRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr); /* non-blocked nnz and indx */
4052       nnz = nnz/bs;
4053       for (j=0; j<nnz; j++) bindx[j] = indx[j*bs]/bs;
4054       ierr = MatPreallocateSet(i+rstart,nnz,bindx,dnz,onz);CHKERRQ(ierr);
4055       ierr = MatRestoreRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr);
4056     }
4057     ierr = PetscFree(bindx);CHKERRQ(ierr);
4058 
4059     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
4060     ierr = MatSetSizes(*outmat,m,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
4061     ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr);
4062     ierr = MatSetType(*outmat,MATMPIBAIJ);CHKERRQ(ierr);
4063     ierr = MatMPIBAIJSetPreallocation(*outmat,bs,0,dnz,0,onz);CHKERRQ(ierr);
4064     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
4065   }
4066 
4067   /* numeric phase */
4068   ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4069   ierr = MatGetOwnershipRange(*outmat,&rstart,NULL);CHKERRQ(ierr);
4070 
4071   for (i=0; i<m; i++) {
4072     ierr = MatGetRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4073     Ii   = i + rstart;
4074     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
4075     ierr = MatRestoreRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4076   }
4077   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4078   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4079   PetscFunctionReturn(0);
4080 }
4081