xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision bcaeba4d41d6ca6c6dc4189db20683073a9959ce)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 #include <petscblaslapack.h>
4 
5 extern PetscErrorCode MatSetUpMultiply_MPIBAIJ(Mat);
6 extern PetscErrorCode MatDisAssemble_MPIBAIJ(Mat);
7 extern PetscErrorCode MatGetValues_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt [],PetscScalar []);
8 extern PetscErrorCode MatSetValues_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt [],const PetscScalar [],InsertMode);
9 extern PetscErrorCode MatSetValuesBlocked_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
10 extern PetscErrorCode MatGetRow_SeqBAIJ(Mat,PetscInt,PetscInt*,PetscInt*[],PetscScalar*[]);
11 extern PetscErrorCode MatRestoreRow_SeqBAIJ(Mat,PetscInt,PetscInt*,PetscInt*[],PetscScalar*[]);
12 extern PetscErrorCode MatZeroRows_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscScalar,Vec,Vec);
13 
14 #undef __FUNCT__
15 #define __FUNCT__ "MatGetRowMaxAbs_MPIBAIJ"
16 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
17 {
18   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
19   PetscErrorCode ierr;
20   PetscInt       i,*idxb = 0;
21   PetscScalar    *va,*vb;
22   Vec            vtmp;
23 
24   PetscFunctionBegin;
25   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
26   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
27   if (idx) {
28     for (i=0; i<A->rmap->n; i++) {if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;}
29   }
30 
31   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
32   if (idx) {ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr);}
33   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
34   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
35 
36   for (i=0; i<A->rmap->n; i++){
37     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {va[i] = vb[i]; if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);}
38   }
39 
40   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
41   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
42   ierr = PetscFree(idxb);CHKERRQ(ierr);
43   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
44   PetscFunctionReturn(0);
45 }
46 
47 EXTERN_C_BEGIN
48 #undef __FUNCT__
49 #define __FUNCT__ "MatStoreValues_MPIBAIJ"
50 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
51 {
52   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ *)mat->data;
53   PetscErrorCode ierr;
54 
55   PetscFunctionBegin;
56   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
57   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
58   PetscFunctionReturn(0);
59 }
60 EXTERN_C_END
61 
62 EXTERN_C_BEGIN
63 #undef __FUNCT__
64 #define __FUNCT__ "MatRetrieveValues_MPIBAIJ"
65 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
66 {
67   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ *)mat->data;
68   PetscErrorCode ierr;
69 
70   PetscFunctionBegin;
71   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
72   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
73   PetscFunctionReturn(0);
74 }
75 EXTERN_C_END
76 
77 /*
78      Local utility routine that creates a mapping from the global column
79    number to the local number in the off-diagonal part of the local
80    storage of the matrix.  This is done in a non scalable way since the
81    length of colmap equals the global matrix length.
82 */
83 #undef __FUNCT__
84 #define __FUNCT__ "MatCreateColmap_MPIBAIJ_Private"
85 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
86 {
87   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
88   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)baij->B->data;
89   PetscErrorCode ierr;
90   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
91 
92   PetscFunctionBegin;
93 #if defined (PETSC_USE_CTABLE)
94   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
95   for (i=0; i<nbs; i++){
96     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
97   }
98 #else
99   ierr = PetscMalloc((baij->Nbs+1)*sizeof(PetscInt),&baij->colmap);CHKERRQ(ierr);
100   ierr = PetscLogObjectMemory(mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
101   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
102   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
103 #endif
104   PetscFunctionReturn(0);
105 }
106 
107 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv) \
108 { \
109  \
110     brow = row/bs;  \
111     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
112     rmax = aimax[brow]; nrow = ailen[brow]; \
113       bcol = col/bs; \
114       ridx = row % bs; cidx = col % bs; \
115       low = 0; high = nrow; \
116       while (high-low > 3) { \
117         t = (low+high)/2; \
118         if (rp[t] > bcol) high = t; \
119         else              low  = t; \
120       } \
121       for (_i=low; _i<high; _i++) { \
122         if (rp[_i] > bcol) break; \
123         if (rp[_i] == bcol) { \
124           bap  = ap +  bs2*_i + bs*cidx + ridx; \
125           if (addv == ADD_VALUES) *bap += value;  \
126           else                    *bap  = value;  \
127           goto a_noinsert; \
128         } \
129       } \
130       if (a->nonew == 1) goto a_noinsert; \
131       if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
132       MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
133       N = nrow++ - 1;  \
134       /* shift up all the later entries in this row */ \
135       for (ii=N; ii>=_i; ii--) { \
136         rp[ii+1] = rp[ii]; \
137         ierr = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
138       } \
139       if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
140       rp[_i]                      = bcol;  \
141       ap[bs2*_i + bs*cidx + ridx] = value;  \
142       a_noinsert:; \
143     ailen[brow] = nrow; \
144 }
145 
146 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv) \
147 { \
148     brow = row/bs;  \
149     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
150     rmax = bimax[brow]; nrow = bilen[brow]; \
151       bcol = col/bs; \
152       ridx = row % bs; cidx = col % bs; \
153       low = 0; high = nrow; \
154       while (high-low > 3) { \
155         t = (low+high)/2; \
156         if (rp[t] > bcol) high = t; \
157         else              low  = t; \
158       } \
159       for (_i=low; _i<high; _i++) { \
160         if (rp[_i] > bcol) break; \
161         if (rp[_i] == bcol) { \
162           bap  = ap +  bs2*_i + bs*cidx + ridx; \
163           if (addv == ADD_VALUES) *bap += value;  \
164           else                    *bap  = value;  \
165           goto b_noinsert; \
166         } \
167       } \
168       if (b->nonew == 1) goto b_noinsert; \
169       if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
170       MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
171       CHKMEMQ;\
172       N = nrow++ - 1;  \
173       /* shift up all the later entries in this row */ \
174       for (ii=N; ii>=_i; ii--) { \
175         rp[ii+1] = rp[ii]; \
176         ierr = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
177       } \
178       if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
179       rp[_i]                      = bcol;  \
180       ap[bs2*_i + bs*cidx + ridx] = value;  \
181       b_noinsert:; \
182     bilen[brow] = nrow; \
183 }
184 
185 #undef __FUNCT__
186 #define __FUNCT__ "MatSetValues_MPIBAIJ"
187 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
188 {
189   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
190   MatScalar      value;
191   PetscBool      roworiented = baij->roworiented;
192   PetscErrorCode ierr;
193   PetscInt       i,j,row,col;
194   PetscInt       rstart_orig=mat->rmap->rstart;
195   PetscInt       rend_orig=mat->rmap->rend,cstart_orig=mat->cmap->rstart;
196   PetscInt       cend_orig=mat->cmap->rend,bs=mat->rmap->bs;
197 
198   /* Some Variables required in the macro */
199   Mat            A = baij->A;
200   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)(A)->data;
201   PetscInt       *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
202   MatScalar      *aa=a->a;
203 
204   Mat            B = baij->B;
205   Mat_SeqBAIJ    *b = (Mat_SeqBAIJ*)(B)->data;
206   PetscInt       *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
207   MatScalar      *ba=b->a;
208 
209   PetscInt       *rp,ii,nrow,_i,rmax,N,brow,bcol;
210   PetscInt       low,high,t,ridx,cidx,bs2=a->bs2;
211   MatScalar      *ap,*bap;
212 
213   PetscFunctionBegin;
214   if (v) PetscValidScalarPointer(v,6);
215   for (i=0; i<m; i++) {
216     if (im[i] < 0) continue;
217 #if defined(PETSC_USE_DEBUG)
218     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
219 #endif
220     if (im[i] >= rstart_orig && im[i] < rend_orig) {
221       row = im[i] - rstart_orig;
222       for (j=0; j<n; j++) {
223         if (in[j] >= cstart_orig && in[j] < cend_orig){
224           col = in[j] - cstart_orig;
225           if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
226           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv);
227           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
228         } else if (in[j] < 0) continue;
229 #if defined(PETSC_USE_DEBUG)
230         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
231 #endif
232         else {
233           if (mat->was_assembled) {
234             if (!baij->colmap) {
235               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
236             }
237 #if defined (PETSC_USE_CTABLE)
238             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
239             col  = col - 1;
240 #else
241             col = baij->colmap[in[j]/bs] - 1;
242 #endif
243             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
244               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
245               col =  in[j];
246               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
247               B = baij->B;
248               b = (Mat_SeqBAIJ*)(B)->data;
249               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
250               ba=b->a;
251             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
252             else col += in[j]%bs;
253           } else col = in[j];
254           if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
255           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv);
256           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
257         }
258       }
259     } else {
260       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
261       if (!baij->donotstash) {
262         mat->assembled = PETSC_FALSE;
263         if (roworiented) {
264           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
265         } else {
266           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
267         }
268       }
269     }
270   }
271   PetscFunctionReturn(0);
272 }
273 
274 #undef __FUNCT__
275 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ"
276 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
277 {
278   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
279   const PetscScalar *value;
280   MatScalar         *barray=baij->barray;
281   PetscBool         roworiented = baij->roworiented;
282   PetscErrorCode    ierr;
283   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
284   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
285   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
286 
287   PetscFunctionBegin;
288   if (!barray) {
289     ierr         = PetscMalloc(bs2*sizeof(MatScalar),&barray);CHKERRQ(ierr);
290     baij->barray = barray;
291   }
292 
293   if (roworiented) {
294     stepval = (n-1)*bs;
295   } else {
296     stepval = (m-1)*bs;
297   }
298   for (i=0; i<m; i++) {
299     if (im[i] < 0) continue;
300 #if defined(PETSC_USE_DEBUG)
301     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
302 #endif
303     if (im[i] >= rstart && im[i] < rend) {
304       row = im[i] - rstart;
305       for (j=0; j<n; j++) {
306         /* If NumCol = 1 then a copy is not required */
307         if ((roworiented) && (n == 1)) {
308           barray = (MatScalar*)v + i*bs2;
309         } else if ((!roworiented) && (m == 1)) {
310           barray = (MatScalar*)v + j*bs2;
311         } else { /* Here a copy is required */
312           if (roworiented) {
313             value = v + (i*(stepval+bs) + j)*bs;
314           } else {
315 	    value = v + (j*(stepval+bs) + i)*bs;
316           }
317           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
318             for (jj=0; jj<bs; jj++) {
319               barray[jj]  = value[jj];
320             }
321             barray += bs;
322           }
323           barray -= bs2;
324         }
325 
326         if (in[j] >= cstart && in[j] < cend){
327           col  = in[j] - cstart;
328           ierr = MatSetValuesBlocked_SeqBAIJ(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
329         }
330         else if (in[j] < 0) continue;
331 #if defined(PETSC_USE_DEBUG)
332         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
333 #endif
334         else {
335           if (mat->was_assembled) {
336             if (!baij->colmap) {
337               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
338             }
339 
340 #if defined(PETSC_USE_DEBUG)
341 #if defined (PETSC_USE_CTABLE)
342             { PetscInt data;
343               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
344               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
345             }
346 #else
347             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
348 #endif
349 #endif
350 #if defined (PETSC_USE_CTABLE)
351 	    ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
352             col  = (col - 1)/bs;
353 #else
354             col = (baij->colmap[in[j]] - 1)/bs;
355 #endif
356             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
357               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
358               col =  in[j];
359             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", bs*im[i], bs*in[j]);
360           }
361           else col = in[j];
362           ierr = MatSetValuesBlocked_SeqBAIJ(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
363         }
364       }
365     } else {
366       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
367       if (!baij->donotstash) {
368         if (roworiented) {
369           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
370         } else {
371           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
372         }
373       }
374     }
375   }
376   PetscFunctionReturn(0);
377 }
378 
379 #define HASH_KEY 0.6180339887
380 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
381 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
382 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
383 #undef __FUNCT__
384 #define __FUNCT__ "MatSetValues_MPIBAIJ_HT"
385 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
386 {
387   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
388   PetscBool      roworiented = baij->roworiented;
389   PetscErrorCode ierr;
390   PetscInt       i,j,row,col;
391   PetscInt       rstart_orig=mat->rmap->rstart;
392   PetscInt       rend_orig=mat->rmap->rend,Nbs=baij->Nbs;
393   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
394   PetscReal      tmp;
395   MatScalar      **HD = baij->hd,value;
396 #if defined(PETSC_USE_DEBUG)
397   PetscInt       total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
398 #endif
399 
400   PetscFunctionBegin;
401   if (v) PetscValidScalarPointer(v,6);
402   for (i=0; i<m; i++) {
403 #if defined(PETSC_USE_DEBUG)
404     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
405     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
406 #endif
407       row = im[i];
408     if (row >= rstart_orig && row < rend_orig) {
409       for (j=0; j<n; j++) {
410         col = in[j];
411         if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
412         /* Look up PetscInto the Hash Table */
413         key = (row/bs)*Nbs+(col/bs)+1;
414         h1  = HASH(size,key,tmp);
415 
416 
417         idx = h1;
418 #if defined(PETSC_USE_DEBUG)
419         insert_ct++;
420         total_ct++;
421         if (HT[idx] != key) {
422           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++);
423           if (idx == size) {
424             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++);
425             if (idx == h1) {
426               SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
427             }
428           }
429         }
430 #else
431         if (HT[idx] != key) {
432           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++);
433           if (idx == size) {
434             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++);
435             if (idx == h1) {
436               SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
437             }
438           }
439         }
440 #endif
441         /* A HASH table entry is found, so insert the values at the correct address */
442         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
443         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
444       }
445     } else {
446       if (!baij->donotstash) {
447         if (roworiented) {
448           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
449         } else {
450           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
451         }
452       }
453     }
454   }
455 #if defined(PETSC_USE_DEBUG)
456   baij->ht_total_ct = total_ct;
457   baij->ht_insert_ct = insert_ct;
458 #endif
459   PetscFunctionReturn(0);
460 }
461 
462 #undef __FUNCT__
463 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ_HT"
464 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
465 {
466   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
467   PetscBool         roworiented = baij->roworiented;
468   PetscErrorCode    ierr;
469   PetscInt          i,j,ii,jj,row,col;
470   PetscInt          rstart=baij->rstartbs;
471   PetscInt          rend=mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
472   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
473   PetscReal         tmp;
474   MatScalar         **HD = baij->hd,*baij_a;
475   const PetscScalar *v_t,*value;
476 #if defined(PETSC_USE_DEBUG)
477   PetscInt          total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
478 #endif
479 
480   PetscFunctionBegin;
481 
482   if (roworiented) {
483     stepval = (n-1)*bs;
484   } else {
485     stepval = (m-1)*bs;
486   }
487   for (i=0; i<m; i++) {
488 #if defined(PETSC_USE_DEBUG)
489     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
490     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
491 #endif
492     row   = im[i];
493     v_t   = v + i*nbs2;
494     if (row >= rstart && row < rend) {
495       for (j=0; j<n; j++) {
496         col = in[j];
497 
498         /* Look up into the Hash Table */
499         key = row*Nbs+col+1;
500         h1  = HASH(size,key,tmp);
501 
502         idx = h1;
503 #if defined(PETSC_USE_DEBUG)
504         total_ct++;
505         insert_ct++;
506        if (HT[idx] != key) {
507           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++);
508           if (idx == size) {
509             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++);
510             if (idx == h1) {
511               SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
512             }
513           }
514         }
515 #else
516         if (HT[idx] != key) {
517           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++);
518           if (idx == size) {
519             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++);
520             if (idx == h1) {
521               SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
522             }
523           }
524         }
525 #endif
526         baij_a = HD[idx];
527         if (roworiented) {
528           /*value = v + i*(stepval+bs)*bs + j*bs;*/
529           /* value = v + (i*(stepval+bs)+j)*bs; */
530           value = v_t;
531           v_t  += bs;
532           if (addv == ADD_VALUES) {
533             for (ii=0; ii<bs; ii++,value+=stepval) {
534               for (jj=ii; jj<bs2; jj+=bs) {
535                 baij_a[jj]  += *value++;
536               }
537             }
538           } else {
539             for (ii=0; ii<bs; ii++,value+=stepval) {
540               for (jj=ii; jj<bs2; jj+=bs) {
541                 baij_a[jj]  = *value++;
542               }
543             }
544           }
545         } else {
546           value = v + j*(stepval+bs)*bs + i*bs;
547           if (addv == ADD_VALUES) {
548             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
549               for (jj=0; jj<bs; jj++) {
550                 baij_a[jj]  += *value++;
551               }
552             }
553           } else {
554             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
555               for (jj=0; jj<bs; jj++) {
556                 baij_a[jj]  = *value++;
557               }
558             }
559           }
560         }
561       }
562     } else {
563       if (!baij->donotstash) {
564         if (roworiented) {
565           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
566         } else {
567           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
568         }
569       }
570     }
571   }
572 #if defined(PETSC_USE_DEBUG)
573   baij->ht_total_ct = total_ct;
574   baij->ht_insert_ct = insert_ct;
575 #endif
576   PetscFunctionReturn(0);
577 }
578 
579 #undef __FUNCT__
580 #define __FUNCT__ "MatGetValues_MPIBAIJ"
581 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
582 {
583   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
584   PetscErrorCode ierr;
585   PetscInt       bs=mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
586   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
587 
588   PetscFunctionBegin;
589   for (i=0; i<m; i++) {
590     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
591     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
592     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
593       row = idxm[i] - bsrstart;
594       for (j=0; j<n; j++) {
595         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
596         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
597         if (idxn[j] >= bscstart && idxn[j] < bscend){
598           col = idxn[j] - bscstart;
599           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
600         } else {
601           if (!baij->colmap) {
602             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
603           }
604 #if defined (PETSC_USE_CTABLE)
605           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
606           data --;
607 #else
608           data = baij->colmap[idxn[j]/bs]-1;
609 #endif
610           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
611           else {
612             col  = data + idxn[j]%bs;
613             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
614           }
615         }
616       }
617     } else {
618       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
619     }
620   }
621  PetscFunctionReturn(0);
622 }
623 
624 #undef __FUNCT__
625 #define __FUNCT__ "MatNorm_MPIBAIJ"
626 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
627 {
628   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
629   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
630   PetscErrorCode ierr;
631   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
632   PetscReal      sum = 0.0;
633   MatScalar      *v;
634 
635   PetscFunctionBegin;
636   if (baij->size == 1) {
637     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
638   } else {
639     if (type == NORM_FROBENIUS) {
640       v = amat->a;
641       nz = amat->nz*bs2;
642       for (i=0; i<nz; i++) {
643 #if defined(PETSC_USE_COMPLEX)
644         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
645 #else
646         sum += (*v)*(*v); v++;
647 #endif
648       }
649       v = bmat->a;
650       nz = bmat->nz*bs2;
651       for (i=0; i<nz; i++) {
652 #if defined(PETSC_USE_COMPLEX)
653         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
654 #else
655         sum += (*v)*(*v); v++;
656 #endif
657       }
658       ierr = MPI_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
659       *nrm = PetscSqrtReal(*nrm);
660     } else if (type == NORM_1) { /* max column sum */
661       PetscReal *tmp,*tmp2;
662       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
663       ierr = PetscMalloc2(mat->cmap->N,PetscReal,&tmp,mat->cmap->N,PetscReal,&tmp2);CHKERRQ(ierr);
664       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
665       v = amat->a; jj = amat->j;
666       for (i=0; i<amat->nz; i++) {
667         for (j=0; j<bs; j++){
668           col = bs*(cstart + *jj) + j; /* column index */
669           for (row=0; row<bs; row++){
670             tmp[col] += PetscAbsScalar(*v);  v++;
671           }
672         }
673         jj++;
674       }
675       v = bmat->a; jj = bmat->j;
676       for (i=0; i<bmat->nz; i++) {
677         for (j=0; j<bs; j++){
678           col = bs*garray[*jj] + j;
679           for (row=0; row<bs; row++){
680             tmp[col] += PetscAbsScalar(*v); v++;
681           }
682         }
683         jj++;
684       }
685       ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
686       *nrm = 0.0;
687       for (j=0; j<mat->cmap->N; j++) {
688         if (tmp2[j] > *nrm) *nrm = tmp2[j];
689       }
690       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
691     } else if (type == NORM_INFINITY) { /* max row sum */
692       PetscReal *sums;
693       ierr = PetscMalloc(bs*sizeof(PetscReal),&sums);CHKERRQ(ierr);
694       sum = 0.0;
695       for (j=0; j<amat->mbs; j++) {
696         for (row=0; row<bs; row++) sums[row] = 0.0;
697         v = amat->a + bs2*amat->i[j];
698         nz = amat->i[j+1]-amat->i[j];
699         for (i=0; i<nz; i++) {
700           for (col=0; col<bs; col++){
701             for (row=0; row<bs; row++){
702               sums[row] += PetscAbsScalar(*v); v++;
703             }
704           }
705         }
706         v = bmat->a + bs2*bmat->i[j];
707         nz = bmat->i[j+1]-bmat->i[j];
708         for (i=0; i<nz; i++) {
709           for (col=0; col<bs; col++){
710             for (row=0; row<bs; row++){
711               sums[row] += PetscAbsScalar(*v); v++;
712             }
713           }
714         }
715         for (row=0; row<bs; row++){
716           if (sums[row] > sum) sum = sums[row];
717         }
718       }
719       ierr = MPI_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr);
720       ierr = PetscFree(sums);CHKERRQ(ierr);
721     } else SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_SUP,"No support for this norm yet");
722   }
723   PetscFunctionReturn(0);
724 }
725 
726 /*
727   Creates the hash table, and sets the table
728   This table is created only once.
729   If new entried need to be added to the matrix
730   then the hash table has to be destroyed and
731   recreated.
732 */
733 #undef __FUNCT__
734 #define __FUNCT__ "MatCreateHashTable_MPIBAIJ_Private"
735 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
736 {
737   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
738   Mat            A = baij->A,B=baij->B;
739   Mat_SeqBAIJ    *a=(Mat_SeqBAIJ *)A->data,*b=(Mat_SeqBAIJ *)B->data;
740   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
741   PetscErrorCode ierr;
742   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
743   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
744   PetscInt       *HT,key;
745   MatScalar      **HD;
746   PetscReal      tmp;
747 #if defined(PETSC_USE_INFO)
748   PetscInt       ct=0,max=0;
749 #endif
750 
751   PetscFunctionBegin;
752   if (baij->ht) PetscFunctionReturn(0);
753 
754   baij->ht_size = (PetscInt)(factor*nz);
755   ht_size       = baij->ht_size;
756 
757   /* Allocate Memory for Hash Table */
758   ierr = PetscMalloc2(ht_size,MatScalar*,&baij->hd,ht_size,PetscInt,&baij->ht);CHKERRQ(ierr);
759   ierr = PetscMemzero(baij->hd,ht_size*sizeof(MatScalar*));CHKERRQ(ierr);
760   ierr = PetscMemzero(baij->ht,ht_size*sizeof(PetscInt));CHKERRQ(ierr);
761   HD   = baij->hd;
762   HT   = baij->ht;
763 
764   /* Loop Over A */
765   for (i=0; i<a->mbs; i++) {
766     for (j=ai[i]; j<ai[i+1]; j++) {
767       row = i+rstart;
768       col = aj[j]+cstart;
769 
770       key = row*Nbs + col + 1;
771       h1  = HASH(ht_size,key,tmp);
772       for (k=0; k<ht_size; k++){
773         if (!HT[(h1+k)%ht_size]) {
774           HT[(h1+k)%ht_size] = key;
775           HD[(h1+k)%ht_size] = a->a + j*bs2;
776           break;
777 #if defined(PETSC_USE_INFO)
778         } else {
779           ct++;
780 #endif
781         }
782       }
783 #if defined(PETSC_USE_INFO)
784       if (k> max) max = k;
785 #endif
786     }
787   }
788   /* Loop Over B */
789   for (i=0; i<b->mbs; i++) {
790     for (j=bi[i]; j<bi[i+1]; j++) {
791       row = i+rstart;
792       col = garray[bj[j]];
793       key = row*Nbs + col + 1;
794       h1  = HASH(ht_size,key,tmp);
795       for (k=0; k<ht_size; k++){
796         if (!HT[(h1+k)%ht_size]) {
797           HT[(h1+k)%ht_size] = key;
798           HD[(h1+k)%ht_size] = b->a + j*bs2;
799           break;
800 #if defined(PETSC_USE_INFO)
801         } else {
802           ct++;
803 #endif
804         }
805       }
806 #if defined(PETSC_USE_INFO)
807       if (k> max) max = k;
808 #endif
809     }
810   }
811 
812   /* Print Summary */
813 #if defined(PETSC_USE_INFO)
814   for (i=0,j=0; i<ht_size; i++) {
815     if (HT[i]) {j++;}
816   }
817   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
818 #endif
819   PetscFunctionReturn(0);
820 }
821 
822 #undef __FUNCT__
823 #define __FUNCT__ "MatAssemblyBegin_MPIBAIJ"
824 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
825 {
826   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
827   PetscErrorCode ierr;
828   PetscInt       nstash,reallocs;
829   InsertMode     addv;
830 
831   PetscFunctionBegin;
832   if (baij->donotstash || mat->nooffprocentries) {
833     PetscFunctionReturn(0);
834   }
835 
836   /* make sure all processors are either in INSERTMODE or ADDMODE */
837   ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr);
838   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
839   mat->insertmode = addv; /* in case this processor had no cache */
840 
841   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
842   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
843   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
844   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
845   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
846   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
847   PetscFunctionReturn(0);
848 }
849 
850 #undef __FUNCT__
851 #define __FUNCT__ "MatAssemblyEnd_MPIBAIJ"
852 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
853 {
854   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
855   Mat_SeqBAIJ    *a=(Mat_SeqBAIJ*)baij->A->data;
856   PetscErrorCode ierr;
857   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
858   PetscInt       *row,*col;
859   PetscBool      r1,r2,r3,other_disassembled;
860   MatScalar      *val;
861   InsertMode     addv = mat->insertmode;
862   PetscMPIInt    n;
863 
864   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
865   PetscFunctionBegin;
866   if (!baij->donotstash && !mat->nooffprocentries) {
867     while (1) {
868       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
869       if (!flg) break;
870 
871       for (i=0; i<n;) {
872         /* Now identify the consecutive vals belonging to the same row */
873         for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
874         if (j < n) ncols = j-i;
875         else       ncols = n-i;
876         /* Now assemble all these values with a single function call */
877         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
878         i = j;
879       }
880     }
881     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
882     /* Now process the block-stash. Since the values are stashed column-oriented,
883        set the roworiented flag to column oriented, and after MatSetValues()
884        restore the original flags */
885     r1 = baij->roworiented;
886     r2 = a->roworiented;
887     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
888     baij->roworiented = PETSC_FALSE;
889     a->roworiented    = PETSC_FALSE;
890     (((Mat_SeqBAIJ*)baij->B->data))->roworiented    = PETSC_FALSE; /* b->roworiented */
891     while (1) {
892       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
893       if (!flg) break;
894 
895       for (i=0; i<n;) {
896         /* Now identify the consecutive vals belonging to the same row */
897         for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
898         if (j < n) ncols = j-i;
899         else       ncols = n-i;
900         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,addv);CHKERRQ(ierr);
901         i = j;
902       }
903     }
904     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
905     baij->roworiented = r1;
906     a->roworiented    = r2;
907     ((Mat_SeqBAIJ*)baij->B->data)->roworiented    = r3; /* b->roworiented */
908   }
909 
910   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
911   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
912 
913   /* determine if any processor has disassembled, if so we must
914      also disassemble ourselfs, in order that we may reassemble. */
915   /*
916      if nonzero structure of submatrix B cannot change then we know that
917      no processor disassembled thus we can skip this stuff
918   */
919   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew)  {
920     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr);
921     if (mat->was_assembled && !other_disassembled) {
922       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
923     }
924   }
925 
926   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
927     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
928   }
929   ierr = MatSetOption(baij->B,MAT_CHECK_COMPRESSED_ROW,PETSC_FALSE);CHKERRQ(ierr);
930   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
931   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
932 
933 #if defined(PETSC_USE_INFO)
934   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
935     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
936     baij->ht_total_ct  = 0;
937     baij->ht_insert_ct = 0;
938   }
939 #endif
940   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
941     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
942     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
943     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
944   }
945 
946   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
947   baij->rowvalues = 0;
948   PetscFunctionReturn(0);
949 }
950 
951 #undef __FUNCT__
952 #define __FUNCT__ "MatView_MPIBAIJ_ASCIIorDraworSocket"
953 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
954 {
955   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
956   PetscErrorCode    ierr;
957   PetscMPIInt       size = baij->size,rank = baij->rank;
958   PetscInt          bs = mat->rmap->bs;
959   PetscBool         iascii,isdraw;
960   PetscViewer       sviewer;
961   PetscViewerFormat format;
962 
963   PetscFunctionBegin;
964   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
965   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
966   if (iascii) {
967     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
968     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
969       MatInfo info;
970       ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
971       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
972       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
973       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
974              rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
975       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
976       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
977       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
978       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
979       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
980       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
981       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
982       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
983       PetscFunctionReturn(0);
984     } else if (format == PETSC_VIEWER_ASCII_INFO) {
985       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
986       PetscFunctionReturn(0);
987     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
988       PetscFunctionReturn(0);
989     }
990   }
991 
992   if (isdraw) {
993     PetscDraw       draw;
994     PetscBool  isnull;
995     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
996     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
997   }
998 
999   if (size == 1) {
1000     ierr = PetscObjectSetName((PetscObject)baij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
1001     ierr = MatView(baij->A,viewer);CHKERRQ(ierr);
1002   } else {
1003     /* assemble the entire matrix onto first processor. */
1004     Mat         A;
1005     Mat_SeqBAIJ *Aloc;
1006     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
1007     MatScalar   *a;
1008 
1009     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
1010     /* Perhaps this should be the type of mat? */
1011     ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr);
1012     if (!rank) {
1013       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
1014     } else {
1015       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
1016     }
1017     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1018     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
1019     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1020     ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr);
1021 
1022     /* copy over the A part */
1023     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1024     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1025     ierr = PetscMalloc(bs*sizeof(PetscInt),&rvals);CHKERRQ(ierr);
1026 
1027     for (i=0; i<mbs; i++) {
1028       rvals[0] = bs*(baij->rstartbs + i);
1029       for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; }
1030       for (j=ai[i]; j<ai[i+1]; j++) {
1031         col = (baij->cstartbs+aj[j])*bs;
1032         for (k=0; k<bs; k++) {
1033           ierr = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1034           col++; a += bs;
1035         }
1036       }
1037     }
1038     /* copy over the B part */
1039     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1040     ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1041     for (i=0; i<mbs; i++) {
1042       rvals[0] = bs*(baij->rstartbs + i);
1043       for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; }
1044       for (j=ai[i]; j<ai[i+1]; j++) {
1045         col = baij->garray[aj[j]]*bs;
1046         for (k=0; k<bs; k++) {
1047           ierr = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1048           col++; a += bs;
1049         }
1050       }
1051     }
1052     ierr = PetscFree(rvals);CHKERRQ(ierr);
1053     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1054     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1055     /*
1056        Everyone has to call to draw the matrix since the graphics waits are
1057        synchronized across all processors that share the PetscDraw object
1058     */
1059     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
1060     if (!rank) {
1061       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr);
1062     /* Set the type name to MATMPIBAIJ so that the correct type can be printed out by PetscObjectPrintClassNamePrefixType() in MatView_SeqBAIJ_ASCII()*/
1063       PetscStrcpy(((PetscObject)((Mat_MPIBAIJ*)(A->data))->A)->type_name,MATMPIBAIJ);
1064       ierr = MatView(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1065     }
1066     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
1067     ierr = MatDestroy(&A);CHKERRQ(ierr);
1068   }
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 #undef __FUNCT__
1073 #define __FUNCT__ "MatView_MPIBAIJ_Binary"
1074 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1075 {
1076   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1077   Mat_SeqBAIJ*   A = (Mat_SeqBAIJ*)a->A->data;
1078   Mat_SeqBAIJ*   B = (Mat_SeqBAIJ*)a->B->data;
1079   PetscErrorCode ierr;
1080   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1081   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1082   int            fd;
1083   PetscScalar    *column_values;
1084   FILE           *file;
1085   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1086   PetscInt       message_count,flowcontrolcount;
1087 
1088   PetscFunctionBegin;
1089   ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
1090   ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr);
1091   nz   = bs2*(A->nz + B->nz);
1092   rlen = mat->rmap->n;
1093   if (!rank) {
1094     header[0] = MAT_FILE_CLASSID;
1095     header[1] = mat->rmap->N;
1096     header[2] = mat->cmap->N;
1097     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
1098     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1099     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1100     /* get largest number of rows any processor has */
1101     range = mat->rmap->range;
1102     for (i=1; i<size; i++) {
1103       rlen = PetscMax(rlen,range[i+1] - range[i]);
1104     }
1105   } else {
1106     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
1107   }
1108 
1109   ierr  = PetscMalloc((rlen/bs)*sizeof(PetscInt),&crow_lens);CHKERRQ(ierr);
1110   /* compute lengths of each row  */
1111   for (i=0; i<a->mbs; i++) {
1112     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1113   }
1114   /* store the row lengths to the file */
1115   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1116   if (!rank) {
1117     MPI_Status status;
1118     ierr  = PetscMalloc(rlen*sizeof(PetscInt),&row_lens);CHKERRQ(ierr);
1119     rlen  = (range[1] - range[0])/bs;
1120     for (i=0; i<rlen; i++) {
1121       for (j=0; j<bs; j++) {
1122         row_lens[i*bs+j] = bs*crow_lens[i];
1123       }
1124     }
1125     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1126     for (i=1; i<size; i++) {
1127       rlen = (range[i+1] - range[i])/bs;
1128       ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr);
1129       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
1130       for (k=0; k<rlen; k++) {
1131 	for (j=0; j<bs; j++) {
1132 	  row_lens[k*bs+j] = bs*crow_lens[k];
1133 	}
1134       }
1135       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1136     }
1137     ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr);
1138     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1139   } else {
1140     ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr);
1141     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
1142     ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr);
1143   }
1144   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1145 
1146   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1147      information needed to make it for each row from a block row. This does require more communication but still not more than
1148      the communication needed for the nonzero values  */
1149   nzmax = nz; /*  space a largest processor needs */
1150   ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
1151   ierr = PetscMalloc(nzmax*sizeof(PetscInt),&column_indices);CHKERRQ(ierr);
1152   cnt  = 0;
1153   for (i=0; i<a->mbs; i++) {
1154     pcnt = cnt;
1155     for (j=B->i[i]; j<B->i[i+1]; j++) {
1156       if ( (col = garray[B->j[j]]) > cstart) break;
1157       for (l=0; l<bs; l++) {
1158 	column_indices[cnt++] = bs*col+l;
1159       }
1160     }
1161     for (k=A->i[i]; k<A->i[i+1]; k++) {
1162       for (l=0; l<bs; l++) {
1163         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1164       }
1165     }
1166     for (; j<B->i[i+1]; j++) {
1167       for (l=0; l<bs; l++) {
1168         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1169       }
1170     }
1171     len = cnt - pcnt;
1172     for (k=1; k<bs; k++) {
1173       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1174       cnt += len;
1175     }
1176   }
1177   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1178 
1179   /* store the columns to the file */
1180   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1181   if (!rank) {
1182     MPI_Status status;
1183     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1184     for (i=1; i<size; i++) {
1185       ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr);
1186       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
1187       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
1188       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1189     }
1190     ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr);
1191   } else {
1192     ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr);
1193     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
1194     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
1195     ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr);
1196   }
1197   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1198 
1199   /* load up the numerical values */
1200   ierr = PetscMalloc(nzmax*sizeof(PetscScalar),&column_values);CHKERRQ(ierr);
1201   cnt = 0;
1202   for (i=0; i<a->mbs; i++) {
1203     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1204     for (j=B->i[i]; j<B->i[i+1]; j++) {
1205       if ( garray[B->j[j]] > cstart) break;
1206       for (l=0; l<bs; l++) {
1207         for (ll=0; ll<bs; ll++) {
1208 	  column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1209         }
1210       }
1211       cnt += bs;
1212     }
1213     for (k=A->i[i]; k<A->i[i+1]; k++) {
1214       for (l=0; l<bs; l++) {
1215         for (ll=0; ll<bs; ll++) {
1216           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1217         }
1218       }
1219       cnt += bs;
1220     }
1221     for (; j<B->i[i+1]; j++) {
1222       for (l=0; l<bs; l++) {
1223         for (ll=0; ll<bs; ll++) {
1224 	  column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1225         }
1226       }
1227       cnt += bs;
1228     }
1229     cnt += (bs-1)*rlen;
1230   }
1231   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1232 
1233   /* store the column values to the file */
1234   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1235   if (!rank) {
1236     MPI_Status status;
1237     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1238     for (i=1; i<size; i++) {
1239       ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr);
1240       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
1241       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
1242       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1243     }
1244     ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr);
1245   } else {
1246     ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr);
1247     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
1248     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
1249     ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr);
1250   }
1251   ierr = PetscFree(column_values);CHKERRQ(ierr);
1252 
1253   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1254   if (file) {
1255     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1256   }
1257   PetscFunctionReturn(0);
1258 }
1259 
1260 #undef __FUNCT__
1261 #define __FUNCT__ "MatView_MPIBAIJ"
1262 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1263 {
1264   PetscErrorCode ierr;
1265   PetscBool      iascii,isdraw,issocket,isbinary;
1266 
1267   PetscFunctionBegin;
1268   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1269   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1270   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1271   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1272   if (iascii || isdraw || issocket) {
1273     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1274   } else if (isbinary) {
1275     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1276   } else {
1277     SETERRQ1(((PetscObject)mat)->comm,PETSC_ERR_SUP,"Viewer type %s not supported by MPIBAIJ matrices",((PetscObject)viewer)->type_name);
1278   }
1279   PetscFunctionReturn(0);
1280 }
1281 
1282 #undef __FUNCT__
1283 #define __FUNCT__ "MatDestroy_MPIBAIJ"
1284 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1285 {
1286   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1287   PetscErrorCode ierr;
1288 
1289   PetscFunctionBegin;
1290 #if defined(PETSC_USE_LOG)
1291   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1292 #endif
1293   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1294   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1295   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1296   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1297 #if defined (PETSC_USE_CTABLE)
1298   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1299 #else
1300   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1301 #endif
1302   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1303   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1304   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1305   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1306   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1307   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1308   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1309   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1310 
1311   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1312   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr);
1313   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr);
1314   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr);
1315   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
1316   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr);
1317   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr);
1318   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C","",PETSC_NULL);CHKERRQ(ierr);
1319   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C","",PETSC_NULL);CHKERRQ(ierr);
1320   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C","",PETSC_NULL);CHKERRQ(ierr);
1321   PetscFunctionReturn(0);
1322 }
1323 
1324 #undef __FUNCT__
1325 #define __FUNCT__ "MatMult_MPIBAIJ"
1326 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1327 {
1328   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1329   PetscErrorCode ierr;
1330   PetscInt       nt;
1331 
1332   PetscFunctionBegin;
1333   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1334   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1335   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1336   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1337   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1338   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1339   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1340   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1341   PetscFunctionReturn(0);
1342 }
1343 
1344 #undef __FUNCT__
1345 #define __FUNCT__ "MatMultAdd_MPIBAIJ"
1346 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1347 {
1348   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1349   PetscErrorCode ierr;
1350 
1351   PetscFunctionBegin;
1352   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1353   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1354   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1355   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1356   PetscFunctionReturn(0);
1357 }
1358 
1359 #undef __FUNCT__
1360 #define __FUNCT__ "MatMultTranspose_MPIBAIJ"
1361 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1362 {
1363   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1364   PetscErrorCode ierr;
1365   PetscBool      merged;
1366 
1367   PetscFunctionBegin;
1368   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1369   /* do nondiagonal part */
1370   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1371   if (!merged) {
1372     /* send it on its way */
1373     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1374     /* do local part */
1375     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1376     /* receive remote parts: note this assumes the values are not actually */
1377     /* inserted in yy until the next line */
1378     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1379   } else {
1380     /* do local part */
1381     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1382     /* send it on its way */
1383     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1384     /* values actually were received in the Begin() but we need to call this nop */
1385     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1386   }
1387   PetscFunctionReturn(0);
1388 }
1389 
1390 #undef __FUNCT__
1391 #define __FUNCT__ "MatMultTransposeAdd_MPIBAIJ"
1392 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1393 {
1394   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1395   PetscErrorCode ierr;
1396 
1397   PetscFunctionBegin;
1398   /* do nondiagonal part */
1399   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1400   /* send it on its way */
1401   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1402   /* do local part */
1403   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1404   /* receive remote parts: note this assumes the values are not actually */
1405   /* inserted in yy until the next line, which is true for my implementation*/
1406   /* but is not perhaps always true. */
1407   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1408   PetscFunctionReturn(0);
1409 }
1410 
1411 /*
1412   This only works correctly for square matrices where the subblock A->A is the
1413    diagonal block
1414 */
1415 #undef __FUNCT__
1416 #define __FUNCT__ "MatGetDiagonal_MPIBAIJ"
1417 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1418 {
1419   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1420   PetscErrorCode ierr;
1421 
1422   PetscFunctionBegin;
1423   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1424   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 #undef __FUNCT__
1429 #define __FUNCT__ "MatScale_MPIBAIJ"
1430 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1431 {
1432   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1433   PetscErrorCode ierr;
1434 
1435   PetscFunctionBegin;
1436   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1437   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1438   PetscFunctionReturn(0);
1439 }
1440 
1441 #undef __FUNCT__
1442 #define __FUNCT__ "MatGetRow_MPIBAIJ"
1443 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1444 {
1445   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1446   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1447   PetscErrorCode ierr;
1448   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1449   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1450   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1451 
1452   PetscFunctionBegin;
1453   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1454   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1455   mat->getrowactive = PETSC_TRUE;
1456 
1457   if (!mat->rowvalues && (idx || v)) {
1458     /*
1459         allocate enough space to hold information from the longest row.
1460     */
1461     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1462     PetscInt     max = 1,mbs = mat->mbs,tmp;
1463     for (i=0; i<mbs; i++) {
1464       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1465       if (max < tmp) { max = tmp; }
1466     }
1467     ierr = PetscMalloc2(max*bs2,PetscScalar,&mat->rowvalues,max*bs2,PetscInt,&mat->rowindices);CHKERRQ(ierr);
1468   }
1469   lrow = row - brstart;
1470 
1471   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1472   if (!v)   {pvA = 0; pvB = 0;}
1473   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1474   ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1475   ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1476   nztot = nzA + nzB;
1477 
1478   cmap  = mat->garray;
1479   if (v  || idx) {
1480     if (nztot) {
1481       /* Sort by increasing column numbers, assuming A and B already sorted */
1482       PetscInt imark = -1;
1483       if (v) {
1484         *v = v_p = mat->rowvalues;
1485         for (i=0; i<nzB; i++) {
1486           if (cmap[cworkB[i]/bs] < cstart)   v_p[i] = vworkB[i];
1487           else break;
1488         }
1489         imark = i;
1490         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1491         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1492       }
1493       if (idx) {
1494         *idx = idx_p = mat->rowindices;
1495         if (imark > -1) {
1496           for (i=0; i<imark; i++) {
1497             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1498           }
1499         } else {
1500           for (i=0; i<nzB; i++) {
1501             if (cmap[cworkB[i]/bs] < cstart)
1502               idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1503             else break;
1504           }
1505           imark = i;
1506         }
1507         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1508         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1509       }
1510     } else {
1511       if (idx) *idx = 0;
1512       if (v)   *v   = 0;
1513     }
1514   }
1515   *nz = nztot;
1516   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1517   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1518   PetscFunctionReturn(0);
1519 }
1520 
1521 #undef __FUNCT__
1522 #define __FUNCT__ "MatRestoreRow_MPIBAIJ"
1523 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1524 {
1525   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1526 
1527   PetscFunctionBegin;
1528   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1529   baij->getrowactive = PETSC_FALSE;
1530   PetscFunctionReturn(0);
1531 }
1532 
1533 #undef __FUNCT__
1534 #define __FUNCT__ "MatZeroEntries_MPIBAIJ"
1535 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1536 {
1537   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1538   PetscErrorCode ierr;
1539 
1540   PetscFunctionBegin;
1541   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1542   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1543   PetscFunctionReturn(0);
1544 }
1545 
1546 #undef __FUNCT__
1547 #define __FUNCT__ "MatGetInfo_MPIBAIJ"
1548 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1549 {
1550   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1551   Mat            A = a->A,B = a->B;
1552   PetscErrorCode ierr;
1553   PetscReal      isend[5],irecv[5];
1554 
1555   PetscFunctionBegin;
1556   info->block_size     = (PetscReal)matin->rmap->bs;
1557   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1558   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1559   isend[3] = info->memory;  isend[4] = info->mallocs;
1560   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1561   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1562   isend[3] += info->memory;  isend[4] += info->mallocs;
1563   if (flag == MAT_LOCAL) {
1564     info->nz_used      = isend[0];
1565     info->nz_allocated = isend[1];
1566     info->nz_unneeded  = isend[2];
1567     info->memory       = isend[3];
1568     info->mallocs      = isend[4];
1569   } else if (flag == MAT_GLOBAL_MAX) {
1570     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr);
1571     info->nz_used      = irecv[0];
1572     info->nz_allocated = irecv[1];
1573     info->nz_unneeded  = irecv[2];
1574     info->memory       = irecv[3];
1575     info->mallocs      = irecv[4];
1576   } else if (flag == MAT_GLOBAL_SUM) {
1577     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr);
1578     info->nz_used      = irecv[0];
1579     info->nz_allocated = irecv[1];
1580     info->nz_unneeded  = irecv[2];
1581     info->memory       = irecv[3];
1582     info->mallocs      = irecv[4];
1583   } else {
1584     SETERRQ1(((PetscObject)matin)->comm,PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1585   }
1586   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1587   info->fill_ratio_needed = 0;
1588   info->factor_mallocs    = 0;
1589   PetscFunctionReturn(0);
1590 }
1591 
1592 #undef __FUNCT__
1593 #define __FUNCT__ "MatSetOption_MPIBAIJ"
1594 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool  flg)
1595 {
1596   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1597   PetscErrorCode ierr;
1598 
1599   PetscFunctionBegin;
1600   switch (op) {
1601   case MAT_NEW_NONZERO_LOCATIONS:
1602   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1603   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1604   case MAT_KEEP_NONZERO_PATTERN:
1605   case MAT_NEW_NONZERO_LOCATION_ERR:
1606     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1607     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1608     break;
1609   case MAT_ROW_ORIENTED:
1610     a->roworiented = flg;
1611     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1612     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1613     break;
1614   case MAT_NEW_DIAGONALS:
1615     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1616     break;
1617   case MAT_IGNORE_OFF_PROC_ENTRIES:
1618     a->donotstash = flg;
1619     break;
1620   case MAT_USE_HASH_TABLE:
1621     a->ht_flag = flg;
1622     break;
1623   case MAT_SYMMETRIC:
1624   case MAT_STRUCTURALLY_SYMMETRIC:
1625   case MAT_HERMITIAN:
1626   case MAT_SYMMETRY_ETERNAL:
1627     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1628     break;
1629   default:
1630     SETERRQ1(((PetscObject)A)->comm,PETSC_ERR_SUP,"unknown option %d",op);
1631   }
1632   PetscFunctionReturn(0);
1633 }
1634 
1635 #undef __FUNCT__
1636 #define __FUNCT__ "MatTranspose_MPIBAIJ"
1637 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1638 {
1639   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1640   Mat_SeqBAIJ    *Aloc;
1641   Mat            B;
1642   PetscErrorCode ierr;
1643   PetscInt       M=A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1644   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1645   MatScalar      *a;
1646 
1647   PetscFunctionBegin;
1648   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1649   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1650     ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
1651     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1652     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1653     /* Do not know preallocation information, but must set block size */
1654     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,PETSC_NULL,PETSC_DECIDE,PETSC_NULL);CHKERRQ(ierr);
1655   } else {
1656     B = *matout;
1657   }
1658 
1659   /* copy over the A part */
1660   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1661   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1662   ierr = PetscMalloc(bs*sizeof(PetscInt),&rvals);CHKERRQ(ierr);
1663 
1664   for (i=0; i<mbs; i++) {
1665     rvals[0] = bs*(baij->rstartbs + i);
1666     for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; }
1667     for (j=ai[i]; j<ai[i+1]; j++) {
1668       col = (baij->cstartbs+aj[j])*bs;
1669       for (k=0; k<bs; k++) {
1670         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1671         col++; a += bs;
1672       }
1673     }
1674   }
1675   /* copy over the B part */
1676   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1677   ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1678   for (i=0; i<mbs; i++) {
1679     rvals[0] = bs*(baij->rstartbs + i);
1680     for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; }
1681     for (j=ai[i]; j<ai[i+1]; j++) {
1682       col = baij->garray[aj[j]]*bs;
1683       for (k=0; k<bs; k++) {
1684         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1685         col++; a += bs;
1686       }
1687     }
1688   }
1689   ierr = PetscFree(rvals);CHKERRQ(ierr);
1690   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1691   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1692 
1693   if (reuse == MAT_INITIAL_MATRIX || *matout != A) {
1694     *matout = B;
1695   } else {
1696     ierr = MatHeaderMerge(A,B);CHKERRQ(ierr);
1697   }
1698   PetscFunctionReturn(0);
1699 }
1700 
1701 #undef __FUNCT__
1702 #define __FUNCT__ "MatDiagonalScale_MPIBAIJ"
1703 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1704 {
1705   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1706   Mat            a = baij->A,b = baij->B;
1707   PetscErrorCode ierr;
1708   PetscInt       s1,s2,s3;
1709 
1710   PetscFunctionBegin;
1711   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1712   if (rr) {
1713     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1714     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1715     /* Overlap communication with computation. */
1716     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1717   }
1718   if (ll) {
1719     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1720     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1721     ierr = (*b->ops->diagonalscale)(b,ll,PETSC_NULL);CHKERRQ(ierr);
1722   }
1723   /* scale  the diagonal block */
1724   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1725 
1726   if (rr) {
1727     /* Do a scatter end and then right scale the off-diagonal block */
1728     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1729     ierr = (*b->ops->diagonalscale)(b,PETSC_NULL,baij->lvec);CHKERRQ(ierr);
1730   }
1731 
1732   PetscFunctionReturn(0);
1733 }
1734 
1735 #undef __FUNCT__
1736 #define __FUNCT__ "MatZeroRows_MPIBAIJ"
1737 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1738 {
1739   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1740   PetscErrorCode    ierr;
1741   PetscMPIInt       imdex,size = l->size,n,rank = l->rank;
1742   PetscInt          i,*owners = A->rmap->range;
1743   PetscInt          *nprocs,j,idx,nsends,row;
1744   PetscInt          nmax,*svalues,*starts,*owner,nrecvs;
1745   PetscInt          *rvalues,tag = ((PetscObject)A)->tag,count,base,slen,*source,lastidx = -1;
1746   PetscInt          *lens,*lrows,*values,rstart_bs=A->rmap->rstart;
1747   MPI_Comm          comm = ((PetscObject)A)->comm;
1748   MPI_Request       *send_waits,*recv_waits;
1749   MPI_Status        recv_status,*send_status;
1750   const PetscScalar *xx;
1751   PetscScalar       *bb;
1752 #if defined(PETSC_DEBUG)
1753   PetscBool         found = PETSC_FALSE;
1754 #endif
1755 
1756   PetscFunctionBegin;
1757   /*  first count number of contributors to each processor */
1758   ierr  = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
1759   ierr  = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
1760   ierr  = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/
1761   j = 0;
1762   for (i=0; i<N; i++) {
1763     if (lastidx > (idx = rows[i])) j = 0;
1764     lastidx = idx;
1765     for (; j<size; j++) {
1766       if (idx >= owners[j] && idx < owners[j+1]) {
1767         nprocs[2*j]++;
1768         nprocs[2*j+1] = 1;
1769         owner[i] = j;
1770 #if defined(PETSC_DEBUG)
1771         found = PETSC_TRUE;
1772 #endif
1773         break;
1774       }
1775     }
1776 #if defined(PETSC_DEBUG)
1777     if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range");
1778     found = PETSC_FALSE;
1779 #endif
1780   }
1781   nsends = 0;  for (i=0; i<size; i++) { nsends += nprocs[2*i+1];}
1782 
1783   if (A->nooffproczerorows) {
1784     if (nsends > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You called MatSetOption(,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) but set an off process zero row");
1785     nrecvs = nsends;
1786     nmax   = N;
1787   } else {
1788     /* inform other processors of number of messages and max length*/
1789     ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1790   }
1791 
1792   /* post receives:   */
1793   ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr);
1794   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
1795   for (i=0; i<nrecvs; i++) {
1796     ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
1797   }
1798 
1799   /* do sends:
1800      1) starts[i] gives the starting index in svalues for stuff going to
1801      the ith processor
1802   */
1803   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr);
1804   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
1805   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
1806   starts[0]  = 0;
1807   for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
1808   for (i=0; i<N; i++) {
1809     svalues[starts[owner[i]]++] = rows[i];
1810   }
1811 
1812   starts[0] = 0;
1813   for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
1814   count = 0;
1815   for (i=0; i<size; i++) {
1816     if (nprocs[2*i+1]) {
1817       ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
1818     }
1819   }
1820   ierr = PetscFree(starts);CHKERRQ(ierr);
1821 
1822   base = owners[rank];
1823 
1824   /*  wait on receives */
1825   ierr   = PetscMalloc2(nrecvs+1,PetscInt,&lens,nrecvs+1,PetscInt,&source);CHKERRQ(ierr);
1826   count  = nrecvs;
1827   slen = 0;
1828   while (count) {
1829     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1830     /* unpack receives into our local space */
1831     ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
1832     source[imdex]  = recv_status.MPI_SOURCE;
1833     lens[imdex]    = n;
1834     slen          += n;
1835     count--;
1836   }
1837   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1838 
1839   /* move the data into the send scatter */
1840   ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr);
1841   count = 0;
1842   for (i=0; i<nrecvs; i++) {
1843     values = rvalues + i*nmax;
1844     for (j=0; j<lens[i]; j++) {
1845       lrows[count++] = values[j] - base;
1846     }
1847   }
1848   ierr = PetscFree(rvalues);CHKERRQ(ierr);
1849   ierr = PetscFree2(lens,source);CHKERRQ(ierr);
1850   ierr = PetscFree(owner);CHKERRQ(ierr);
1851   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1852 
1853   /* fix right hand side if needed */
1854   if (x && b) {
1855     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1856     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1857     for (i=0; i<slen; i++) {
1858       bb[lrows[i]] = diag*xx[lrows[i]];
1859     }
1860     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1861     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1862   }
1863 
1864   /* actually zap the local rows */
1865   /*
1866         Zero the required rows. If the "diagonal block" of the matrix
1867      is square and the user wishes to set the diagonal we use separate
1868      code so that MatSetValues() is not called for each diagonal allocating
1869      new memory, thus calling lots of mallocs and slowing things down.
1870 
1871   */
1872   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1873   ierr = MatZeroRows_SeqBAIJ(l->B,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1874   if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) {
1875     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,diag,0,0);CHKERRQ(ierr);
1876   } else if (diag != 0.0) {
1877     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1878     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1879        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1880     for (i=0; i<slen; i++) {
1881       row  = lrows[i] + rstart_bs;
1882       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1883     }
1884     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1885     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1886   } else {
1887     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1888   }
1889 
1890   ierr = PetscFree(lrows);CHKERRQ(ierr);
1891 
1892   /* wait on sends */
1893   if (nsends) {
1894     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
1895     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1896     ierr = PetscFree(send_status);CHKERRQ(ierr);
1897   }
1898   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1899   ierr = PetscFree(svalues);CHKERRQ(ierr);
1900 
1901   PetscFunctionReturn(0);
1902 }
1903 
1904 #undef __FUNCT__
1905 #define __FUNCT__ "MatSetUnfactored_MPIBAIJ"
1906 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
1907 {
1908   Mat_MPIBAIJ    *a   = (Mat_MPIBAIJ*)A->data;
1909   PetscErrorCode ierr;
1910 
1911   PetscFunctionBegin;
1912   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1913   PetscFunctionReturn(0);
1914 }
1915 
1916 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat *);
1917 
1918 #undef __FUNCT__
1919 #define __FUNCT__ "MatEqual_MPIBAIJ"
1920 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
1921 {
1922   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
1923   Mat            a,b,c,d;
1924   PetscBool      flg;
1925   PetscErrorCode ierr;
1926 
1927   PetscFunctionBegin;
1928   a = matA->A; b = matA->B;
1929   c = matB->A; d = matB->B;
1930 
1931   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1932   if (flg) {
1933     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1934   }
1935   ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
1936   PetscFunctionReturn(0);
1937 }
1938 
1939 #undef __FUNCT__
1940 #define __FUNCT__ "MatCopy_MPIBAIJ"
1941 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
1942 {
1943   PetscErrorCode ierr;
1944   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ *)A->data;
1945   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ *)B->data;
1946 
1947   PetscFunctionBegin;
1948   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1949   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1950     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1951   } else {
1952     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1953     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1954   }
1955   PetscFunctionReturn(0);
1956 }
1957 
1958 #undef __FUNCT__
1959 #define __FUNCT__ "MatSetUp_MPIBAIJ"
1960 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
1961 {
1962   PetscErrorCode ierr;
1963 
1964   PetscFunctionBegin;
1965   ierr =  MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1966   PetscFunctionReturn(0);
1967 }
1968 
1969 #undef __FUNCT__
1970 #define __FUNCT__ "MatAXPY_MPIBAIJ"
1971 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1972 {
1973   PetscErrorCode ierr;
1974   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ *)X->data,*yy=(Mat_MPIBAIJ *)Y->data;
1975   PetscBLASInt   bnz,one=1;
1976   Mat_SeqBAIJ    *x,*y;
1977 
1978   PetscFunctionBegin;
1979   if (str == SAME_NONZERO_PATTERN) {
1980     PetscScalar alpha = a;
1981     x = (Mat_SeqBAIJ *)xx->A->data;
1982     y = (Mat_SeqBAIJ *)yy->A->data;
1983     bnz = PetscBLASIntCast(x->nz);
1984     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
1985     x = (Mat_SeqBAIJ *)xx->B->data;
1986     y = (Mat_SeqBAIJ *)yy->B->data;
1987     bnz = PetscBLASIntCast(x->nz);
1988     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
1989   } else {
1990     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1991   }
1992   PetscFunctionReturn(0);
1993 }
1994 
1995 #undef __FUNCT__
1996 #define __FUNCT__ "MatRealPart_MPIBAIJ"
1997 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
1998 {
1999   Mat_MPIBAIJ   *a = (Mat_MPIBAIJ*)A->data;
2000   PetscErrorCode ierr;
2001 
2002   PetscFunctionBegin;
2003   ierr = MatRealPart(a->A);CHKERRQ(ierr);
2004   ierr = MatRealPart(a->B);CHKERRQ(ierr);
2005   PetscFunctionReturn(0);
2006 }
2007 
2008 #undef __FUNCT__
2009 #define __FUNCT__ "MatImaginaryPart_MPIBAIJ"
2010 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
2011 {
2012   Mat_MPIBAIJ   *a = (Mat_MPIBAIJ*)A->data;
2013   PetscErrorCode ierr;
2014 
2015   PetscFunctionBegin;
2016   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2017   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2018   PetscFunctionReturn(0);
2019 }
2020 
2021 #undef __FUNCT__
2022 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ"
2023 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2024 {
2025   PetscErrorCode ierr;
2026   IS             iscol_local;
2027   PetscInt       csize;
2028 
2029   PetscFunctionBegin;
2030   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2031   if (call == MAT_REUSE_MATRIX) {
2032     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2033     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2034   } else {
2035     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2036   }
2037   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2038   if (call == MAT_INITIAL_MATRIX) {
2039     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2040     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2041   }
2042   PetscFunctionReturn(0);
2043 }
2044 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2045 #undef __FUNCT__
2046 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ_Private"
2047 /*
2048   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2049   in local and then by concatenating the local matrices the end result.
2050   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ()
2051 */
2052 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2053 {
2054   PetscErrorCode ierr;
2055   PetscMPIInt    rank,size;
2056   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2057   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2058   Mat            M,Mreuse;
2059   MatScalar      *vwork,*aa;
2060   MPI_Comm       comm = ((PetscObject)mat)->comm;
2061   IS             isrow_new, iscol_new;
2062   PetscBool      idflag,allrows, allcols;
2063   Mat_SeqBAIJ    *aij;
2064 
2065 
2066   PetscFunctionBegin;
2067   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2068   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2069   /* The compression and expansion should be avoided. Doesn't point
2070      out errors, might change the indices, hence buggey */
2071   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2072   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2073 
2074   /* Check for special case: each processor gets entire matrix columns */
2075   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2076   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2077   if (idflag && ncol == mat->cmap->N){
2078     allcols = PETSC_TRUE;
2079   } else {
2080     allcols = PETSC_FALSE;
2081   }
2082 
2083   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2084   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2085   if (idflag && nrow == mat->rmap->N){
2086     allrows = PETSC_TRUE;
2087   } else {
2088     allrows = PETSC_FALSE;
2089   }
2090   if (call ==  MAT_REUSE_MATRIX) {
2091     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr);
2092     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2093     ierr  = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2094   } else {
2095     ierr   = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2096   }
2097   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2098   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2099   /*
2100       m - number of local rows
2101       n - number of columns (same on all processors)
2102       rstart - first row in new global matrix generated
2103   */
2104   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2105   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2106   m    = m/bs;
2107   n    = n/bs;
2108 
2109   if (call == MAT_INITIAL_MATRIX) {
2110     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2111     ii  = aij->i;
2112     jj  = aij->j;
2113 
2114     /*
2115         Determine the number of non-zeros in the diagonal and off-diagonal
2116         portions of the matrix in order to do correct preallocation
2117     */
2118 
2119     /* first get start and end of "diagonal" columns */
2120     if (csize == PETSC_DECIDE) {
2121       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2122       if (mglobal == n*bs) { /* square matrix */
2123 	nlocal = m;
2124       } else {
2125         nlocal = n/size + ((n % size) > rank);
2126       }
2127     } else {
2128       nlocal = csize/bs;
2129     }
2130     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2131     rstart = rend - nlocal;
2132     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2133 
2134     /* next, compute all the lengths */
2135     ierr  = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr);
2136     olens = dlens + m;
2137     for (i=0; i<m; i++) {
2138       jend = ii[i+1] - ii[i];
2139       olen = 0;
2140       dlen = 0;
2141       for (j=0; j<jend; j++) {
2142         if (*jj < rstart || *jj >= rend) olen++;
2143         else dlen++;
2144         jj++;
2145       }
2146       olens[i] = olen;
2147       dlens[i] = dlen;
2148     }
2149     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2150     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2151     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2152     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2153     ierr = PetscFree(dlens);CHKERRQ(ierr);
2154   } else {
2155     PetscInt ml,nl;
2156 
2157     M = *newmat;
2158     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2159     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2160     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2161     /*
2162          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2163        rather than the slower MatSetValues().
2164     */
2165     M->was_assembled = PETSC_TRUE;
2166     M->assembled     = PETSC_FALSE;
2167   }
2168   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2169   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2170   aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2171   ii  = aij->i;
2172   jj  = aij->j;
2173   aa  = aij->a;
2174   for (i=0; i<m; i++) {
2175     row   = rstart/bs + i;
2176     nz    = ii[i+1] - ii[i];
2177     cwork = jj;     jj += nz;
2178     vwork = aa;     aa += nz*bs*bs;
2179     ierr = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2180   }
2181 
2182   ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2183   ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2184   *newmat = M;
2185 
2186   /* save submatrix used in processor for next request */
2187   if (call ==  MAT_INITIAL_MATRIX) {
2188     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2189     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2190   }
2191 
2192   PetscFunctionReturn(0);
2193 }
2194 
2195 #undef __FUNCT__
2196 #define __FUNCT__ "MatPermute_MPIBAIJ"
2197 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2198 {
2199   MPI_Comm       comm,pcomm;
2200   PetscInt       first,rlocal_size,clocal_size,nrows;
2201   const PetscInt *rows;
2202   PetscMPIInt    size;
2203   IS             crowp,growp,irowp,lrowp,lcolp;
2204   PetscErrorCode ierr;
2205 
2206   PetscFunctionBegin;
2207   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2208   /* make a collective version of 'rowp' */
2209   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2210   if (pcomm==comm) {
2211     crowp = rowp;
2212   } else {
2213     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2214     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2215     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2216     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2217   }
2218   /* collect the global row permutation and invert it */
2219   ierr = ISAllGather(crowp,&growp);CHKERRQ(ierr);
2220   ierr = ISSetPermutation(growp);CHKERRQ(ierr);
2221   if (pcomm!=comm) {
2222     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2223   }
2224   ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr);
2225   ierr = ISDestroy(&growp);CHKERRQ(ierr);
2226   /* get the local target indices */
2227   ierr = MatGetOwnershipRange(A,&first,PETSC_NULL);CHKERRQ(ierr);
2228   ierr = MatGetLocalSize(A,&rlocal_size,&clocal_size);CHKERRQ(ierr);
2229   ierr = ISGetIndices(irowp,&rows);CHKERRQ(ierr);
2230   ierr = ISCreateGeneral(MPI_COMM_SELF,rlocal_size,rows+first,PETSC_COPY_VALUES,&lrowp);CHKERRQ(ierr);
2231   ierr = ISRestoreIndices(irowp,&rows);CHKERRQ(ierr);
2232   ierr = ISDestroy(&irowp);CHKERRQ(ierr);
2233   /* the column permutation is so much easier;
2234      make a local version of 'colp' and invert it */
2235   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2236   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2237   if (size==1) {
2238     lcolp = colp;
2239   } else {
2240     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2241   }
2242   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2243   /* now we just get the submatrix */
2244   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,lrowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2245   if (size>1) {
2246     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2247   }
2248   /* clean up */
2249   ierr = ISDestroy(&lrowp);CHKERRQ(ierr);
2250   PetscFunctionReturn(0);
2251 }
2252 
2253 #undef __FUNCT__
2254 #define __FUNCT__ "MatGetGhosts_MPIBAIJ"
2255 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2256 {
2257   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*) mat->data;
2258   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)baij->B->data;
2259 
2260   PetscFunctionBegin;
2261   if (nghosts) { *nghosts = B->nbs;}
2262   if (ghosts) {*ghosts = baij->garray;}
2263   PetscFunctionReturn(0);
2264 }
2265 
2266 extern PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat);
2267 
2268 #undef __FUNCT__
2269 #define __FUNCT__ "MatFDColoringCreate_MPIBAIJ"
2270 /*
2271     This routine is almost identical to MatFDColoringCreate_MPIBAIJ()!
2272 */
2273 PetscErrorCode MatFDColoringCreate_MPIBAIJ(Mat mat,ISColoring iscoloring,MatFDColoring c)
2274 {
2275   Mat_MPIBAIJ            *baij = (Mat_MPIBAIJ*)mat->data;
2276   PetscErrorCode        ierr;
2277   PetscMPIInt           size,*ncolsonproc,*disp,nn;
2278   PetscInt              bs,i,n,nrows,j,k,m,ncols,col;
2279   const PetscInt        *is,*rows = 0,*A_ci,*A_cj,*B_ci,*B_cj;
2280   PetscInt              nis = iscoloring->n,nctot,*cols;
2281   PetscInt              *rowhit,M,cstart,cend,colb;
2282   PetscInt              *columnsforrow,l;
2283   IS                    *isa;
2284   PetscBool              done,flg;
2285   ISLocalToGlobalMapping map = mat->cmap->bmapping;
2286   PetscInt               *ltog = (map ? map->indices : (PetscInt*) PETSC_NULL) ,ctype=c->ctype;
2287 
2288   PetscFunctionBegin;
2289   if (!mat->assembled) SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_ARG_WRONGSTATE,"Matrix must be assembled first; MatAssemblyBegin/End();");
2290   if (ctype == IS_COLORING_GHOSTED && !map) SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_ARG_INCOMP,"When using ghosted differencing matrix must have local to global mapping provided with MatSetLocalToGlobalMappingBlock");
2291 
2292   ierr = ISColoringGetIS(iscoloring,PETSC_IGNORE,&isa);CHKERRQ(ierr);
2293   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2294   M                = mat->rmap->n/bs;
2295   cstart           = mat->cmap->rstart/bs;
2296   cend             = mat->cmap->rend/bs;
2297   c->M             = mat->rmap->N/bs;  /* set the global rows and columns and local rows */
2298   c->N             = mat->cmap->N/bs;
2299   c->m             = mat->rmap->n/bs;
2300   c->rstart        = mat->rmap->rstart/bs;
2301 
2302   c->ncolors       = nis;
2303   ierr             = PetscMalloc(nis*sizeof(PetscInt),&c->ncolumns);CHKERRQ(ierr);
2304   ierr             = PetscMalloc(nis*sizeof(PetscInt*),&c->columns);CHKERRQ(ierr);
2305   ierr             = PetscMalloc(nis*sizeof(PetscInt),&c->nrows);CHKERRQ(ierr);
2306   ierr             = PetscMalloc(nis*sizeof(PetscInt*),&c->rows);CHKERRQ(ierr);
2307   ierr             = PetscMalloc(nis*sizeof(PetscInt*),&c->columnsforrow);CHKERRQ(ierr);
2308   ierr = PetscLogObjectMemory(c,5*nis*sizeof(PetscInt));CHKERRQ(ierr);
2309 
2310   /* Allow access to data structures of local part of matrix */
2311   if (!baij->colmap) {
2312     ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
2313   }
2314   ierr = MatGetColumnIJ(baij->A,0,PETSC_FALSE,PETSC_FALSE,&ncols,&A_ci,&A_cj,&done);CHKERRQ(ierr);
2315   ierr = MatGetColumnIJ(baij->B,0,PETSC_FALSE,PETSC_FALSE,&ncols,&B_ci,&B_cj,&done);CHKERRQ(ierr);
2316 
2317   ierr = PetscMalloc((M+1)*sizeof(PetscInt),&rowhit);CHKERRQ(ierr);
2318   ierr = PetscMalloc((M+1)*sizeof(PetscInt),&columnsforrow);CHKERRQ(ierr);
2319 
2320   for (i=0; i<nis; i++) {
2321     ierr = ISGetLocalSize(isa[i],&n);CHKERRQ(ierr);
2322     ierr = ISGetIndices(isa[i],&is);CHKERRQ(ierr);
2323     c->ncolumns[i] = n;
2324     if (n) {
2325       ierr = PetscMalloc(n*sizeof(PetscInt),&c->columns[i]);CHKERRQ(ierr);
2326       ierr = PetscLogObjectMemory(c,n*sizeof(PetscInt));CHKERRQ(ierr);
2327       ierr = PetscMemcpy(c->columns[i],is,n*sizeof(PetscInt));CHKERRQ(ierr);
2328     } else {
2329       c->columns[i]  = 0;
2330     }
2331 
2332     if (ctype == IS_COLORING_GLOBAL){
2333       /* Determine the total (parallel) number of columns of this color */
2334       ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr);
2335       ierr = PetscMalloc2(size,PetscMPIInt,&ncolsonproc,size,PetscMPIInt,&disp);CHKERRQ(ierr);
2336 
2337       nn   = PetscMPIIntCast(n);
2338       ierr = MPI_Allgather(&nn,1,MPI_INT,ncolsonproc,1,MPI_INT,((PetscObject)mat)->comm);CHKERRQ(ierr);
2339       nctot = 0; for (j=0; j<size; j++) {nctot += ncolsonproc[j];}
2340       if (!nctot) {
2341         ierr = PetscInfo(mat,"Coloring of matrix has some unneeded colors with no corresponding rows\n");CHKERRQ(ierr);
2342       }
2343 
2344       disp[0] = 0;
2345       for (j=1; j<size; j++) {
2346         disp[j] = disp[j-1] + ncolsonproc[j-1];
2347       }
2348 
2349       /* Get complete list of columns for color on each processor */
2350       ierr = PetscMalloc((nctot+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr);
2351       ierr = MPI_Allgatherv((void*)is,n,MPIU_INT,cols,ncolsonproc,disp,MPIU_INT,((PetscObject)mat)->comm);CHKERRQ(ierr);
2352       ierr = PetscFree2(ncolsonproc,disp);CHKERRQ(ierr);
2353     } else if (ctype == IS_COLORING_GHOSTED){
2354       /* Determine local number of columns of this color on this process, including ghost points */
2355       nctot = n;
2356       ierr = PetscMalloc((nctot+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr);
2357       ierr = PetscMemcpy(cols,is,n*sizeof(PetscInt));CHKERRQ(ierr);
2358     } else {
2359       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not provided for this MatFDColoring type");
2360     }
2361 
2362     /*
2363        Mark all rows affect by these columns
2364     */
2365     /* Temporary option to allow for debugging/testing */
2366     flg  = PETSC_FALSE;
2367     ierr = PetscOptionsGetBool(PETSC_NULL,"-matfdcoloring_slow",&flg,PETSC_NULL);CHKERRQ(ierr);
2368     if (!flg) {/*-----------------------------------------------------------------------------*/
2369       /* crude, fast version */
2370       ierr = PetscMemzero(rowhit,M*sizeof(PetscInt));CHKERRQ(ierr);
2371       /* loop over columns*/
2372       for (j=0; j<nctot; j++) {
2373         if (ctype == IS_COLORING_GHOSTED) {
2374           col = ltog[cols[j]];
2375         } else {
2376           col  = cols[j];
2377         }
2378         if (col >= cstart && col < cend) {
2379           /* column is in diagonal block of matrix */
2380           rows = A_cj + A_ci[col-cstart];
2381           m    = A_ci[col-cstart+1] - A_ci[col-cstart];
2382         } else {
2383 #if defined (PETSC_USE_CTABLE)
2384           ierr = PetscTableFind(baij->colmap,col+1,&colb);CHKERRQ(ierr);
2385 	  colb --;
2386 #else
2387           colb = baij->colmap[col] - 1;
2388 #endif
2389           if (colb == -1) {
2390             m = 0;
2391           } else {
2392             colb = colb/bs;
2393             rows = B_cj + B_ci[colb];
2394             m    = B_ci[colb+1] - B_ci[colb];
2395           }
2396         }
2397         /* loop over columns marking them in rowhit */
2398         for (k=0; k<m; k++) {
2399           rowhit[*rows++] = col + 1;
2400         }
2401       }
2402 
2403       /* count the number of hits */
2404       nrows = 0;
2405       for (j=0; j<M; j++) {
2406         if (rowhit[j]) nrows++;
2407       }
2408       c->nrows[i]         = nrows;
2409       ierr                = PetscMalloc((nrows+1)*sizeof(PetscInt),&c->rows[i]);CHKERRQ(ierr);
2410       ierr                = PetscMalloc((nrows+1)*sizeof(PetscInt),&c->columnsforrow[i]);CHKERRQ(ierr);
2411       ierr = PetscLogObjectMemory(c,2*(nrows+1)*sizeof(PetscInt));CHKERRQ(ierr);
2412       nrows = 0;
2413       for (j=0; j<M; j++) {
2414         if (rowhit[j]) {
2415           c->rows[i][nrows]           = j;
2416           c->columnsforrow[i][nrows] = rowhit[j] - 1;
2417           nrows++;
2418         }
2419       }
2420     } else {/*-------------------------------------------------------------------------------*/
2421       /* slow version, using rowhit as a linked list */
2422       PetscInt currentcol,fm,mfm;
2423       rowhit[M] = M;
2424       nrows     = 0;
2425       /* loop over columns*/
2426       for (j=0; j<nctot; j++) {
2427         if (ctype == IS_COLORING_GHOSTED) {
2428           col = ltog[cols[j]];
2429         } else {
2430           col  = cols[j];
2431         }
2432         if (col >= cstart && col < cend) {
2433           /* column is in diagonal block of matrix */
2434           rows = A_cj + A_ci[col-cstart];
2435           m    = A_ci[col-cstart+1] - A_ci[col-cstart];
2436         } else {
2437 #if defined (PETSC_USE_CTABLE)
2438           ierr = PetscTableFind(baij->colmap,col+1,&colb);CHKERRQ(ierr);
2439           colb --;
2440 #else
2441           colb = baij->colmap[col] - 1;
2442 #endif
2443           if (colb == -1) {
2444             m = 0;
2445           } else {
2446             colb = colb/bs;
2447             rows = B_cj + B_ci[colb];
2448             m    = B_ci[colb+1] - B_ci[colb];
2449           }
2450         }
2451 
2452         /* loop over columns marking them in rowhit */
2453         fm    = M; /* fm points to first entry in linked list */
2454         for (k=0; k<m; k++) {
2455           currentcol = *rows++;
2456 	  /* is it already in the list? */
2457           do {
2458             mfm  = fm;
2459             fm   = rowhit[fm];
2460           } while (fm < currentcol);
2461           /* not in list so add it */
2462           if (fm != currentcol) {
2463             nrows++;
2464             columnsforrow[currentcol] = col;
2465             /* next three lines insert new entry into linked list */
2466             rowhit[mfm]               = currentcol;
2467             rowhit[currentcol]        = fm;
2468             fm                        = currentcol;
2469             /* fm points to present position in list since we know the columns are sorted */
2470           } else {
2471             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Invalid coloring of matrix detected");
2472           }
2473         }
2474       }
2475       c->nrows[i]         = nrows;
2476       ierr = PetscMalloc((nrows+1)*sizeof(PetscInt),&c->rows[i]);CHKERRQ(ierr);
2477       ierr = PetscMalloc((nrows+1)*sizeof(PetscInt),&c->columnsforrow[i]);CHKERRQ(ierr);
2478       ierr = PetscLogObjectMemory(c,(nrows+1)*sizeof(PetscInt));CHKERRQ(ierr);
2479       /* now store the linked list of rows into c->rows[i] */
2480       nrows = 0;
2481       fm    = rowhit[M];
2482       do {
2483         c->rows[i][nrows]            = fm;
2484         c->columnsforrow[i][nrows++] = columnsforrow[fm];
2485         fm                           = rowhit[fm];
2486       } while (fm < M);
2487     } /* ---------------------------------------------------------------------------------------*/
2488     ierr = PetscFree(cols);CHKERRQ(ierr);
2489   }
2490 
2491   /* Optimize by adding the vscale, and scaleforrow[][] fields */
2492   /*
2493        vscale will contain the "diagonal" on processor scalings followed by the off processor
2494   */
2495   if (ctype == IS_COLORING_GLOBAL) {
2496     PetscInt *garray;
2497     ierr = PetscMalloc(baij->B->cmap->n*sizeof(PetscInt),&garray);CHKERRQ(ierr);
2498     for (i=0; i<baij->B->cmap->n/bs; i++) {
2499       for (j=0; j<bs; j++) {
2500         garray[i*bs+j] = bs*baij->garray[i]+j;
2501       }
2502     }
2503     ierr = VecCreateGhost(((PetscObject)mat)->comm,baij->A->rmap->n,PETSC_DETERMINE,baij->B->cmap->n,garray,&c->vscale);CHKERRQ(ierr);
2504     ierr = PetscFree(garray);CHKERRQ(ierr);
2505     CHKMEMQ;
2506     ierr = PetscMalloc(c->ncolors*sizeof(PetscInt*),&c->vscaleforrow);CHKERRQ(ierr);
2507     for (k=0; k<c->ncolors; k++) {
2508       ierr = PetscMalloc((c->nrows[k]+1)*sizeof(PetscInt),&c->vscaleforrow[k]);CHKERRQ(ierr);
2509       for (l=0; l<c->nrows[k]; l++) {
2510         col = c->columnsforrow[k][l];
2511         if (col >= cstart && col < cend) {
2512           /* column is in diagonal block of matrix */
2513           colb = col - cstart;
2514         } else {
2515           /* column  is in "off-processor" part */
2516 #if defined (PETSC_USE_CTABLE)
2517           ierr = PetscTableFind(baij->colmap,col+1,&colb);CHKERRQ(ierr);
2518           colb --;
2519 #else
2520           colb = baij->colmap[col] - 1;
2521 #endif
2522           colb = colb/bs;
2523           colb += cend - cstart;
2524         }
2525         c->vscaleforrow[k][l] = colb;
2526       }
2527     }
2528   } else if (ctype == IS_COLORING_GHOSTED) {
2529     /* Get gtol mapping */
2530     PetscInt N = mat->cmap->N, *gtol;
2531     ierr = PetscMalloc((N+1)*sizeof(PetscInt),&gtol);CHKERRQ(ierr);
2532     for (i=0; i<N; i++) gtol[i] = -1;
2533     for (i=0; i<map->n; i++) gtol[ltog[i]] = i;
2534 
2535     c->vscale = 0; /* will be created in MatFDColoringApply() */
2536     ierr = PetscMalloc(c->ncolors*sizeof(PetscInt*),&c->vscaleforrow);CHKERRQ(ierr);
2537     for (k=0; k<c->ncolors; k++) {
2538       ierr = PetscMalloc((c->nrows[k]+1)*sizeof(PetscInt),&c->vscaleforrow[k]);CHKERRQ(ierr);
2539       for (l=0; l<c->nrows[k]; l++) {
2540         col = c->columnsforrow[k][l];      /* global column index */
2541         c->vscaleforrow[k][l] = gtol[col]; /* local column index */
2542       }
2543     }
2544     ierr = PetscFree(gtol);CHKERRQ(ierr);
2545   }
2546   ierr = ISColoringRestoreIS(iscoloring,&isa);CHKERRQ(ierr);
2547 
2548   ierr = PetscFree(rowhit);CHKERRQ(ierr);
2549   ierr = PetscFree(columnsforrow);CHKERRQ(ierr);
2550   ierr = MatRestoreColumnIJ(baij->A,0,PETSC_FALSE,PETSC_FALSE,&ncols,&A_ci,&A_cj,&done);CHKERRQ(ierr);
2551   ierr = MatRestoreColumnIJ(baij->B,0,PETSC_FALSE,PETSC_FALSE,&ncols,&B_ci,&B_cj,&done);CHKERRQ(ierr);
2552     CHKMEMQ;
2553   PetscFunctionReturn(0);
2554 }
2555 
2556 #undef __FUNCT__
2557 #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIBAIJ"
2558 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2559 {
2560   Mat            B;
2561   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ *)A->data;
2562   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2563   Mat_SeqAIJ     *b;
2564   PetscErrorCode ierr;
2565   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2566   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2567   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2568 
2569   PetscFunctionBegin;
2570   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
2571   ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr);
2572 
2573   /* ----------------------------------------------------------------
2574      Tell every processor the number of nonzeros per row
2575   */
2576   ierr = PetscMalloc((A->rmap->N/bs)*sizeof(PetscInt),&lens);CHKERRQ(ierr);
2577   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2578     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2579   }
2580   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2581   ierr = PetscMalloc(2*size*sizeof(PetscMPIInt),&recvcounts);CHKERRQ(ierr);
2582   displs     = recvcounts + size;
2583   for (i=0; i<size; i++) {
2584     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2585     displs[i]     = A->rmap->range[i]/bs;
2586   }
2587 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2588   ierr  = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,((PetscObject)A)->comm);CHKERRQ(ierr);
2589 #else
2590   ierr  = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,((PetscObject)A)->comm);CHKERRQ(ierr);
2591 #endif
2592   /* ---------------------------------------------------------------
2593      Create the sequential matrix of the same type as the local block diagonal
2594   */
2595   ierr  = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2596   ierr  = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2597   ierr  = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2598   ierr  = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2599   b = (Mat_SeqAIJ *)B->data;
2600 
2601   /*--------------------------------------------------------------------
2602     Copy my part of matrix column indices over
2603   */
2604   sendcount  = ad->nz + bd->nz;
2605   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2606   a_jsendbuf = ad->j;
2607   b_jsendbuf = bd->j;
2608   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2609   cnt        = 0;
2610   for (i=0; i<n; i++) {
2611 
2612     /* put in lower diagonal portion */
2613     m = bd->i[i+1] - bd->i[i];
2614     while (m > 0) {
2615       /* is it above diagonal (in bd (compressed) numbering) */
2616       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2617       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2618       m--;
2619     }
2620 
2621     /* put in diagonal portion */
2622     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2623       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2624     }
2625 
2626     /* put in upper diagonal portion */
2627     while (m-- > 0) {
2628       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2629     }
2630   }
2631   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2632 
2633   /*--------------------------------------------------------------------
2634     Gather all column indices to all processors
2635   */
2636   for (i=0; i<size; i++) {
2637     recvcounts[i] = 0;
2638     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2639       recvcounts[i] += lens[j];
2640     }
2641   }
2642   displs[0]  = 0;
2643   for (i=1; i<size; i++) {
2644     displs[i] = displs[i-1] + recvcounts[i-1];
2645   }
2646 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2647   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,((PetscObject)A)->comm);CHKERRQ(ierr);
2648 #else
2649   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,((PetscObject)A)->comm);CHKERRQ(ierr);
2650 #endif
2651   /*--------------------------------------------------------------------
2652     Assemble the matrix into useable form (note numerical values not yet set)
2653   */
2654   /* set the b->ilen (length of each row) values */
2655   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2656   /* set the b->i indices */
2657   b->i[0] = 0;
2658   for (i=1; i<=A->rmap->N/bs; i++) {
2659     b->i[i] = b->i[i-1] + lens[i-1];
2660   }
2661   ierr = PetscFree(lens);CHKERRQ(ierr);
2662   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2663   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2664   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2665 
2666   if (A->symmetric){
2667     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2668   } else if (A->hermitian) {
2669     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2670   } else if (A->structurally_symmetric) {
2671     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2672   }
2673   *newmat = B;
2674   PetscFunctionReturn(0);
2675 }
2676 
2677 #undef __FUNCT__
2678 #define __FUNCT__ "MatSOR_MPIBAIJ"
2679 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2680 {
2681   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2682   PetscErrorCode ierr;
2683   Vec            bb1 = 0;
2684 
2685   PetscFunctionBegin;
2686   if (flag == SOR_APPLY_UPPER) {
2687     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2688     PetscFunctionReturn(0);
2689   }
2690 
2691   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2692     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2693   }
2694 
2695   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){
2696     if (flag & SOR_ZERO_INITIAL_GUESS) {
2697       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2698       its--;
2699     }
2700 
2701     while (its--) {
2702       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2703       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2704 
2705       /* update rhs: bb1 = bb - B*x */
2706       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2707       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2708 
2709       /* local sweep */
2710       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2711     }
2712   } else if (flag & SOR_LOCAL_FORWARD_SWEEP){
2713     if (flag & SOR_ZERO_INITIAL_GUESS) {
2714       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2715       its--;
2716     }
2717     while (its--) {
2718       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2719       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2720 
2721       /* update rhs: bb1 = bb - B*x */
2722       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2723       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2724 
2725       /* local sweep */
2726       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2727     }
2728   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){
2729     if (flag & SOR_ZERO_INITIAL_GUESS) {
2730       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2731       its--;
2732     }
2733     while (its--) {
2734       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2735       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2736 
2737       /* update rhs: bb1 = bb - B*x */
2738       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2739       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2740 
2741       /* local sweep */
2742       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2743     }
2744   } else SETERRQ(((PetscObject)matin)->comm,PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2745 
2746   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2747   PetscFunctionReturn(0);
2748 }
2749 
2750 extern PetscErrorCode  MatFDColoringApply_BAIJ(Mat,MatFDColoring,Vec,MatStructure*,void*);
2751 
2752 #undef __FUNCT__
2753 #define __FUNCT__ "MatInvertBlockDiagonal_MPIBAIJ"
2754 PetscErrorCode  MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2755 {
2756   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2757   PetscErrorCode ierr;
2758 
2759   PetscFunctionBegin;
2760   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2761   PetscFunctionReturn(0);
2762 }
2763 
2764 
2765 /* -------------------------------------------------------------------*/
2766 static struct _MatOps MatOps_Values = {
2767        MatSetValues_MPIBAIJ,
2768        MatGetRow_MPIBAIJ,
2769        MatRestoreRow_MPIBAIJ,
2770        MatMult_MPIBAIJ,
2771 /* 4*/ MatMultAdd_MPIBAIJ,
2772        MatMultTranspose_MPIBAIJ,
2773        MatMultTransposeAdd_MPIBAIJ,
2774        0,
2775        0,
2776        0,
2777 /*10*/ 0,
2778        0,
2779        0,
2780        MatSOR_MPIBAIJ,
2781        MatTranspose_MPIBAIJ,
2782 /*15*/ MatGetInfo_MPIBAIJ,
2783        MatEqual_MPIBAIJ,
2784        MatGetDiagonal_MPIBAIJ,
2785        MatDiagonalScale_MPIBAIJ,
2786        MatNorm_MPIBAIJ,
2787 /*20*/ MatAssemblyBegin_MPIBAIJ,
2788        MatAssemblyEnd_MPIBAIJ,
2789        MatSetOption_MPIBAIJ,
2790        MatZeroEntries_MPIBAIJ,
2791 /*24*/ MatZeroRows_MPIBAIJ,
2792        0,
2793        0,
2794        0,
2795        0,
2796 /*29*/ MatSetUp_MPIBAIJ,
2797        0,
2798        0,
2799        0,
2800        0,
2801 /*34*/ MatDuplicate_MPIBAIJ,
2802        0,
2803        0,
2804        0,
2805        0,
2806 /*39*/ MatAXPY_MPIBAIJ,
2807        MatGetSubMatrices_MPIBAIJ,
2808        MatIncreaseOverlap_MPIBAIJ,
2809        MatGetValues_MPIBAIJ,
2810        MatCopy_MPIBAIJ,
2811 /*44*/ 0,
2812        MatScale_MPIBAIJ,
2813        0,
2814        0,
2815        0,
2816 /*49*/ 0,
2817        0,
2818        0,
2819        0,
2820        0,
2821 /*54*/ MatFDColoringCreate_MPIBAIJ,
2822        0,
2823        MatSetUnfactored_MPIBAIJ,
2824        MatPermute_MPIBAIJ,
2825        MatSetValuesBlocked_MPIBAIJ,
2826 /*59*/ MatGetSubMatrix_MPIBAIJ,
2827        MatDestroy_MPIBAIJ,
2828        MatView_MPIBAIJ,
2829        0,
2830        0,
2831 /*64*/ 0,
2832        0,
2833        0,
2834        0,
2835        0,
2836 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2837        0,
2838        0,
2839        0,
2840        0,
2841 /*74*/ 0,
2842        MatFDColoringApply_BAIJ,
2843        0,
2844        0,
2845        0,
2846 /*79*/ 0,
2847        0,
2848        0,
2849        0,
2850        MatLoad_MPIBAIJ,
2851 /*84*/ 0,
2852        0,
2853        0,
2854        0,
2855        0,
2856 /*89*/ 0,
2857        0,
2858        0,
2859        0,
2860        0,
2861 /*94*/ 0,
2862        0,
2863        0,
2864        0,
2865        0,
2866 /*99*/ 0,
2867        0,
2868        0,
2869        0,
2870        0,
2871 /*104*/0,
2872        MatRealPart_MPIBAIJ,
2873        MatImaginaryPart_MPIBAIJ,
2874        0,
2875        0,
2876 /*109*/0,
2877        0,
2878        0,
2879        0,
2880        0,
2881 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2882        0,
2883        MatGetGhosts_MPIBAIJ,
2884        0,
2885        0,
2886 /*119*/0,
2887        0,
2888        0,
2889        0,
2890        0,
2891 /*124*/0,
2892        0,
2893        MatInvertBlockDiagonal_MPIBAIJ
2894 };
2895 
2896 EXTERN_C_BEGIN
2897 #undef __FUNCT__
2898 #define __FUNCT__ "MatGetDiagonalBlock_MPIBAIJ"
2899 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2900 {
2901   PetscFunctionBegin;
2902   *a = ((Mat_MPIBAIJ *)A->data)->A;
2903   PetscFunctionReturn(0);
2904 }
2905 EXTERN_C_END
2906 
2907 EXTERN_C_BEGIN
2908 extern PetscErrorCode  MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2909 EXTERN_C_END
2910 
2911 EXTERN_C_BEGIN
2912 #undef __FUNCT__
2913 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR_MPIBAIJ"
2914 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2915 {
2916   PetscInt       m,rstart,cstart,cend;
2917   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2918   const PetscInt *JJ=0;
2919   PetscScalar    *values=0;
2920   PetscErrorCode ierr;
2921 
2922   PetscFunctionBegin;
2923   ierr = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2924   ierr = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2925   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2926   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2927   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2928   m      = B->rmap->n/bs;
2929   rstart = B->rmap->rstart/bs;
2930   cstart = B->cmap->rstart/bs;
2931   cend   = B->cmap->rend/bs;
2932 
2933   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2934   ierr  = PetscMalloc2(m,PetscInt,&d_nnz,m,PetscInt,&o_nnz);CHKERRQ(ierr);
2935   for (i=0; i<m; i++) {
2936     nz = ii[i+1] - ii[i];
2937     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
2938     nz_max = PetscMax(nz_max,nz);
2939     JJ  = jj + ii[i];
2940     for (j=0; j<nz; j++) {
2941       if (*JJ >= cstart) break;
2942       JJ++;
2943     }
2944     d = 0;
2945     for (; j<nz; j++) {
2946       if (*JJ++ >= cend) break;
2947       d++;
2948     }
2949     d_nnz[i] = d;
2950     o_nnz[i] = nz - d;
2951   }
2952   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2953   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
2954 
2955   values = (PetscScalar*)V;
2956   if (!values) {
2957     ierr = PetscMalloc(bs*bs*nz_max*sizeof(PetscScalar),&values);CHKERRQ(ierr);
2958     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2959   }
2960   for (i=0; i<m; i++) {
2961     PetscInt          row    = i + rstart;
2962     PetscInt          ncols  = ii[i+1] - ii[i];
2963     const PetscInt    *icols = jj + ii[i];
2964     const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2965     ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2966   }
2967 
2968   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2969   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2970   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2971   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2972   PetscFunctionReturn(0);
2973 }
2974 EXTERN_C_END
2975 
2976 #undef __FUNCT__
2977 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR"
2978 /*@C
2979    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2980    (the default parallel PETSc format).
2981 
2982    Collective on MPI_Comm
2983 
2984    Input Parameters:
2985 +  A - the matrix
2986 .  bs - the block size
2987 .  i - the indices into j for the start of each local row (starts with zero)
2988 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2989 -  v - optional values in the matrix
2990 
2991    Level: developer
2992 
2993 .keywords: matrix, aij, compressed row, sparse, parallel
2994 
2995 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ
2996 @*/
2997 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2998 {
2999   PetscErrorCode ierr;
3000 
3001   PetscFunctionBegin;
3002   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3003   PetscValidType(B,1);
3004   PetscValidLogicalCollectiveInt(B,bs,2);
3005   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
3006   PetscFunctionReturn(0);
3007 }
3008 
3009 EXTERN_C_BEGIN
3010 #undef __FUNCT__
3011 #define __FUNCT__ "MatMPIBAIJSetPreallocation_MPIBAIJ"
3012 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,PetscInt *d_nnz,PetscInt o_nz,PetscInt *o_nnz)
3013 {
3014   Mat_MPIBAIJ    *b;
3015   PetscErrorCode ierr;
3016   PetscInt       i;
3017   PetscBool      d_realalloc = PETSC_FALSE,o_realalloc = PETSC_FALSE;
3018 
3019   PetscFunctionBegin;
3020   if (d_nz >= 0 || d_nnz) d_realalloc = PETSC_TRUE;
3021   if (o_nz >= 0 || o_nnz) o_realalloc = PETSC_TRUE;
3022 
3023   if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5;
3024   if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2;
3025   if (d_nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz);
3026   if (o_nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz);
3027 
3028   ierr = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
3029   ierr = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
3030   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
3031   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
3032   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
3033 
3034   if (d_nnz) {
3035     for (i=0; i<B->rmap->n/bs; i++) {
3036       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
3037     }
3038   }
3039   if (o_nnz) {
3040     for (i=0; i<B->rmap->n/bs; i++) {
3041       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
3042     }
3043   }
3044 
3045   b = (Mat_MPIBAIJ*)B->data;
3046   b->bs2 = bs*bs;
3047   b->mbs = B->rmap->n/bs;
3048   b->nbs = B->cmap->n/bs;
3049   b->Mbs = B->rmap->N/bs;
3050   b->Nbs = B->cmap->N/bs;
3051 
3052   for (i=0; i<=b->size; i++) {
3053     b->rangebs[i] = B->rmap->range[i]/bs;
3054   }
3055   b->rstartbs = B->rmap->rstart/bs;
3056   b->rendbs   = B->rmap->rend/bs;
3057   b->cstartbs = B->cmap->rstart/bs;
3058   b->cendbs   = B->cmap->rend/bs;
3059 
3060   if (!B->preallocated) {
3061     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
3062     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
3063     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
3064     ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr);
3065     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
3066     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
3067     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
3068     ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr);
3069     ierr = MatStashCreate_Private(((PetscObject)B)->comm,bs,&B->bstash);CHKERRQ(ierr);
3070   }
3071 
3072   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3073   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
3074   /* Do not error if the user did not give real preallocation information. Ugly because this would overwrite a previous user call to MatSetOption(). */
3075   if (!d_realalloc) {ierr = MatSetOption(b->A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);}
3076   if (!o_realalloc) {ierr = MatSetOption(b->B,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);}
3077   B->preallocated = PETSC_TRUE;
3078   PetscFunctionReturn(0);
3079 }
3080 EXTERN_C_END
3081 
3082 EXTERN_C_BEGIN
3083 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
3084 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
3085 EXTERN_C_END
3086 
3087 
3088 EXTERN_C_BEGIN
3089 #undef __FUNCT__
3090 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAdj"
3091 PetscErrorCode  MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
3092 {
3093   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
3094   PetscErrorCode ierr;
3095   Mat_SeqBAIJ    *d = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
3096   PetscInt       M = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
3097   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
3098 
3099   PetscFunctionBegin;
3100   ierr = PetscMalloc((M+1)*sizeof(PetscInt),&ii);CHKERRQ(ierr);
3101   ii[0] = 0;
3102   CHKMEMQ;
3103   for (i=0; i<M; i++) {
3104     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
3105     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
3106     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
3107     /* remove one from count of matrix has diagonal */
3108     for (j=id[i]; j<id[i+1]; j++) {
3109       if (jd[j] == i) {ii[i+1]--;break;}
3110     }
3111   CHKMEMQ;
3112   }
3113   ierr = PetscMalloc(ii[M]*sizeof(PetscInt),&jj);CHKERRQ(ierr);
3114   cnt = 0;
3115   for (i=0; i<M; i++) {
3116     for (j=io[i]; j<io[i+1]; j++) {
3117       if (garray[jo[j]] > rstart) break;
3118       jj[cnt++] = garray[jo[j]];
3119   CHKMEMQ;
3120     }
3121     for (k=id[i]; k<id[i+1]; k++) {
3122       if (jd[k] != i) {
3123         jj[cnt++] = rstart + jd[k];
3124   CHKMEMQ;
3125       }
3126     }
3127     for (;j<io[i+1]; j++) {
3128       jj[cnt++] = garray[jo[j]];
3129   CHKMEMQ;
3130     }
3131   }
3132   ierr = MatCreateMPIAdj(((PetscObject)B)->comm,M,B->cmap->N/B->rmap->bs,ii,jj,PETSC_NULL,adj);CHKERRQ(ierr);
3133   PetscFunctionReturn(0);
3134 }
3135 EXTERN_C_END
3136 
3137 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3138 EXTERN_C_BEGIN
3139 PetscErrorCode  MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
3140 EXTERN_C_END
3141 
3142 EXTERN_C_BEGIN
3143 #undef __FUNCT__
3144 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAIJ"
3145 PetscErrorCode  MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
3146 {
3147   PetscErrorCode ierr;
3148   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
3149   Mat            B;
3150   Mat_MPIAIJ     *b;
3151 
3152   PetscFunctionBegin;
3153   if (!A->assembled) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Matrix must be assembled");
3154 
3155   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
3156   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
3157   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
3158   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr);
3159   ierr = MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
3160   b = (Mat_MPIAIJ*) B->data;
3161 
3162   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
3163   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
3164   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
3165   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
3166   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
3167   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3168   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3169   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3170   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3171   if (reuse == MAT_REUSE_MATRIX) {
3172     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
3173   } else {
3174    *newmat = B;
3175   }
3176   PetscFunctionReturn(0);
3177 }
3178 EXTERN_C_END
3179 
3180 EXTERN_C_BEGIN
3181 #if defined(PETSC_HAVE_MUMPS)
3182 extern PetscErrorCode MatGetFactor_baij_mumps(Mat,MatFactorType,Mat*);
3183 #endif
3184 EXTERN_C_END
3185 
3186 /*MC
3187    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
3188 
3189    Options Database Keys:
3190 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
3191 . -mat_block_size <bs> - set the blocksize used to store the matrix
3192 - -mat_use_hash_table <fact>
3193 
3194   Level: beginner
3195 
3196 .seealso: MatCreateMPIBAIJ
3197 M*/
3198 
3199 EXTERN_C_BEGIN
3200 extern PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
3201 EXTERN_C_END
3202 
3203 EXTERN_C_BEGIN
3204 #undef __FUNCT__
3205 #define __FUNCT__ "MatCreate_MPIBAIJ"
3206 PetscErrorCode  MatCreate_MPIBAIJ(Mat B)
3207 {
3208   Mat_MPIBAIJ    *b;
3209   PetscErrorCode ierr;
3210   PetscBool      flg;
3211 
3212   PetscFunctionBegin;
3213   ierr = PetscNewLog(B,Mat_MPIBAIJ,&b);CHKERRQ(ierr);
3214   B->data = (void*)b;
3215 
3216   ierr    = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
3217   B->assembled  = PETSC_FALSE;
3218 
3219   B->insertmode = NOT_SET_VALUES;
3220   ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr);
3221   ierr = MPI_Comm_size(((PetscObject)B)->comm,&b->size);CHKERRQ(ierr);
3222 
3223   /* build local table of row and column ownerships */
3224   ierr = PetscMalloc((b->size+1)*sizeof(PetscInt),&b->rangebs);CHKERRQ(ierr);
3225 
3226   /* build cache for off array entries formed */
3227   ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr);
3228   b->donotstash  = PETSC_FALSE;
3229   b->colmap      = PETSC_NULL;
3230   b->garray      = PETSC_NULL;
3231   b->roworiented = PETSC_TRUE;
3232 
3233   /* stuff used in block assembly */
3234   b->barray       = 0;
3235 
3236   /* stuff used for matrix vector multiply */
3237   b->lvec         = 0;
3238   b->Mvctx        = 0;
3239 
3240   /* stuff for MatGetRow() */
3241   b->rowindices   = 0;
3242   b->rowvalues    = 0;
3243   b->getrowactive = PETSC_FALSE;
3244 
3245   /* hash table stuff */
3246   b->ht           = 0;
3247   b->hd           = 0;
3248   b->ht_size      = 0;
3249   b->ht_flag      = PETSC_FALSE;
3250   b->ht_fact      = 0;
3251   b->ht_total_ct  = 0;
3252   b->ht_insert_ct = 0;
3253 
3254   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
3255   b->ijonly       = PETSC_FALSE;
3256 
3257   ierr = PetscOptionsBegin(((PetscObject)B)->comm,PETSC_NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
3258     ierr = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",PETSC_FALSE,&flg,PETSC_NULL);CHKERRQ(ierr);
3259     if (flg) {
3260       PetscReal fact = 1.39;
3261       ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
3262       ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,PETSC_NULL);CHKERRQ(ierr);
3263       if (fact <= 1.0) fact = 1.39;
3264       ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
3265       ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
3266     }
3267   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3268 
3269 #if defined(PETSC_HAVE_MUMPS)
3270   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mumps_C", "MatGetFactor_baij_mumps",MatGetFactor_baij_mumps);CHKERRQ(ierr);
3271 #endif
3272   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",
3273                                      "MatConvert_MPIBAIJ_MPIAdj",
3274                                       MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
3275   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",
3276                                      "MatConvert_MPIBAIJ_MPIAIJ",
3277                                       MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
3278   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",
3279                                      "MatConvert_MPIBAIJ_MPISBAIJ",
3280                                       MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
3281   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C",
3282                                      "MatStoreValues_MPIBAIJ",
3283                                      MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
3284   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C",
3285                                      "MatRetrieveValues_MPIBAIJ",
3286                                      MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
3287   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C",
3288                                      "MatGetDiagonalBlock_MPIBAIJ",
3289                                      MatGetDiagonalBlock_MPIBAIJ);CHKERRQ(ierr);
3290   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIBAIJSetPreallocation_C",
3291                                      "MatMPIBAIJSetPreallocation_MPIBAIJ",
3292                                      MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
3293   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",
3294 				     "MatMPIBAIJSetPreallocationCSR_MPIBAIJ",
3295 				     MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
3296   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C",
3297                                      "MatDiagonalScaleLocal_MPIBAIJ",
3298                                      MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
3299   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatSetHashTableFactor_C",
3300                                      "MatSetHashTableFactor_MPIBAIJ",
3301                                      MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
3302   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpibaij_mpibstrm_C",
3303                                      "MatConvert_MPIBAIJ_MPIBSTRM",
3304                                       MatConvert_MPIBAIJ_MPIBSTRM);CHKERRQ(ierr);
3305   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
3306   PetscFunctionReturn(0);
3307 }
3308 EXTERN_C_END
3309 
3310 /*MC
3311    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
3312 
3313    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
3314    and MATMPIBAIJ otherwise.
3315 
3316    Options Database Keys:
3317 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
3318 
3319   Level: beginner
3320 
3321 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3322 M*/
3323 
3324 #undef __FUNCT__
3325 #define __FUNCT__ "MatMPIBAIJSetPreallocation"
3326 /*@C
3327    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
3328    (block compressed row).  For good matrix assembly performance
3329    the user should preallocate the matrix storage by setting the parameters
3330    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3331    performance can be increased by more than a factor of 50.
3332 
3333    Collective on Mat
3334 
3335    Input Parameters:
3336 +  A - the matrix
3337 .  bs   - size of blockk
3338 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
3339            submatrix  (same for all local rows)
3340 .  d_nnz - array containing the number of block nonzeros in the various block rows
3341            of the in diagonal portion of the local (possibly different for each block
3342            row) or PETSC_NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
3343            set it even if it is zero.
3344 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
3345            submatrix (same for all local rows).
3346 -  o_nnz - array containing the number of nonzeros in the various block rows of the
3347            off-diagonal portion of the local submatrix (possibly different for
3348            each block row) or PETSC_NULL.
3349 
3350    If the *_nnz parameter is given then the *_nz parameter is ignored
3351 
3352    Options Database Keys:
3353 +   -mat_block_size - size of the blocks to use
3354 -   -mat_use_hash_table <fact>
3355 
3356    Notes:
3357    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3358    than it must be used on all processors that share the object for that argument.
3359 
3360    Storage Information:
3361    For a square global matrix we define each processor's diagonal portion
3362    to be its local rows and the corresponding columns (a square submatrix);
3363    each processor's off-diagonal portion encompasses the remainder of the
3364    local matrix (a rectangular submatrix).
3365 
3366    The user can specify preallocated storage for the diagonal part of
3367    the local submatrix with either d_nz or d_nnz (not both).  Set
3368    d_nz=PETSC_DEFAULT and d_nnz=PETSC_NULL for PETSc to control dynamic
3369    memory allocation.  Likewise, specify preallocated storage for the
3370    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3371 
3372    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3373    the figure below we depict these three local rows and all columns (0-11).
3374 
3375 .vb
3376            0 1 2 3 4 5 6 7 8 9 10 11
3377           -------------------
3378    row 3  |  o o o d d d o o o o o o
3379    row 4  |  o o o d d d o o o o o o
3380    row 5  |  o o o d d d o o o o o o
3381           -------------------
3382 .ve
3383 
3384    Thus, any entries in the d locations are stored in the d (diagonal)
3385    submatrix, and any entries in the o locations are stored in the
3386    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3387    stored simply in the MATSEQBAIJ format for compressed row storage.
3388 
3389    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3390    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3391    In general, for PDE problems in which most nonzeros are near the diagonal,
3392    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3393    or you will get TERRIBLE performance; see the users' manual chapter on
3394    matrices.
3395 
3396    You can call MatGetInfo() to get information on how effective the preallocation was;
3397    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3398    You can also run with the option -info and look for messages with the string
3399    malloc in them to see if additional memory allocation was needed.
3400 
3401    Level: intermediate
3402 
3403 .keywords: matrix, block, aij, compressed row, sparse, parallel
3404 
3405 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3406 @*/
3407 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3408 {
3409   PetscErrorCode ierr;
3410 
3411   PetscFunctionBegin;
3412   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3413   PetscValidType(B,1);
3414   PetscValidLogicalCollectiveInt(B,bs,2);
3415   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3416   PetscFunctionReturn(0);
3417 }
3418 
3419 #undef __FUNCT__
3420 #define __FUNCT__ "MatCreateBAIJ"
3421 /*@C
3422    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3423    (block compressed row).  For good matrix assembly performance
3424    the user should preallocate the matrix storage by setting the parameters
3425    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3426    performance can be increased by more than a factor of 50.
3427 
3428    Collective on MPI_Comm
3429 
3430    Input Parameters:
3431 +  comm - MPI communicator
3432 .  bs   - size of blockk
3433 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3434            This value should be the same as the local size used in creating the
3435            y vector for the matrix-vector product y = Ax.
3436 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3437            This value should be the same as the local size used in creating the
3438            x vector for the matrix-vector product y = Ax.
3439 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3440 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3441 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3442            submatrix  (same for all local rows)
3443 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3444            of the in diagonal portion of the local (possibly different for each block
3445            row) or PETSC_NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3446            and set it even if it is zero.
3447 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3448            submatrix (same for all local rows).
3449 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3450            off-diagonal portion of the local submatrix (possibly different for
3451            each block row) or PETSC_NULL.
3452 
3453    Output Parameter:
3454 .  A - the matrix
3455 
3456    Options Database Keys:
3457 +   -mat_block_size - size of the blocks to use
3458 -   -mat_use_hash_table <fact>
3459 
3460    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3461    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3462    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3463 
3464    Notes:
3465    If the *_nnz parameter is given then the *_nz parameter is ignored
3466 
3467    A nonzero block is any block that as 1 or more nonzeros in it
3468 
3469    The user MUST specify either the local or global matrix dimensions
3470    (possibly both).
3471 
3472    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3473    than it must be used on all processors that share the object for that argument.
3474 
3475    Storage Information:
3476    For a square global matrix we define each processor's diagonal portion
3477    to be its local rows and the corresponding columns (a square submatrix);
3478    each processor's off-diagonal portion encompasses the remainder of the
3479    local matrix (a rectangular submatrix).
3480 
3481    The user can specify preallocated storage for the diagonal part of
3482    the local submatrix with either d_nz or d_nnz (not both).  Set
3483    d_nz=PETSC_DEFAULT and d_nnz=PETSC_NULL for PETSc to control dynamic
3484    memory allocation.  Likewise, specify preallocated storage for the
3485    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3486 
3487    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3488    the figure below we depict these three local rows and all columns (0-11).
3489 
3490 .vb
3491            0 1 2 3 4 5 6 7 8 9 10 11
3492           -------------------
3493    row 3  |  o o o d d d o o o o o o
3494    row 4  |  o o o d d d o o o o o o
3495    row 5  |  o o o d d d o o o o o o
3496           -------------------
3497 .ve
3498 
3499    Thus, any entries in the d locations are stored in the d (diagonal)
3500    submatrix, and any entries in the o locations are stored in the
3501    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3502    stored simply in the MATSEQBAIJ format for compressed row storage.
3503 
3504    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3505    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3506    In general, for PDE problems in which most nonzeros are near the diagonal,
3507    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3508    or you will get TERRIBLE performance; see the users' manual chapter on
3509    matrices.
3510 
3511    Level: intermediate
3512 
3513 .keywords: matrix, block, aij, compressed row, sparse, parallel
3514 
3515 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3516 @*/
3517 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3518 {
3519   PetscErrorCode ierr;
3520   PetscMPIInt    size;
3521 
3522   PetscFunctionBegin;
3523   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3524   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3525   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3526   if (size > 1) {
3527     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3528     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3529   } else {
3530     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3531     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3532   }
3533   PetscFunctionReturn(0);
3534 }
3535 
3536 #undef __FUNCT__
3537 #define __FUNCT__ "MatDuplicate_MPIBAIJ"
3538 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3539 {
3540   Mat            mat;
3541   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3542   PetscErrorCode ierr;
3543   PetscInt       len=0;
3544 
3545   PetscFunctionBegin;
3546   *newmat       = 0;
3547   ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr);
3548   ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3549   ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3550   ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3551 
3552   mat->factortype   = matin->factortype;
3553   mat->preallocated = PETSC_TRUE;
3554   mat->assembled    = PETSC_TRUE;
3555   mat->insertmode   = NOT_SET_VALUES;
3556 
3557   a      = (Mat_MPIBAIJ*)mat->data;
3558   mat->rmap->bs  = matin->rmap->bs;
3559   a->bs2   = oldmat->bs2;
3560   a->mbs   = oldmat->mbs;
3561   a->nbs   = oldmat->nbs;
3562   a->Mbs   = oldmat->Mbs;
3563   a->Nbs   = oldmat->Nbs;
3564 
3565   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3566   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3567 
3568   a->size         = oldmat->size;
3569   a->rank         = oldmat->rank;
3570   a->donotstash   = oldmat->donotstash;
3571   a->roworiented  = oldmat->roworiented;
3572   a->rowindices   = 0;
3573   a->rowvalues    = 0;
3574   a->getrowactive = PETSC_FALSE;
3575   a->barray       = 0;
3576   a->rstartbs     = oldmat->rstartbs;
3577   a->rendbs       = oldmat->rendbs;
3578   a->cstartbs     = oldmat->cstartbs;
3579   a->cendbs       = oldmat->cendbs;
3580 
3581   /* hash table stuff */
3582   a->ht           = 0;
3583   a->hd           = 0;
3584   a->ht_size      = 0;
3585   a->ht_flag      = oldmat->ht_flag;
3586   a->ht_fact      = oldmat->ht_fact;
3587   a->ht_total_ct  = 0;
3588   a->ht_insert_ct = 0;
3589 
3590   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3591   if (oldmat->colmap) {
3592 #if defined (PETSC_USE_CTABLE)
3593   ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3594 #else
3595   ierr = PetscMalloc((a->Nbs)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr);
3596   ierr = PetscLogObjectMemory(mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3597   ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3598 #endif
3599   } else a->colmap = 0;
3600 
3601   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3602     ierr = PetscMalloc(len*sizeof(PetscInt),&a->garray);CHKERRQ(ierr);
3603     ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3604     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3605   } else a->garray = 0;
3606 
3607   ierr = MatStashCreate_Private(((PetscObject)matin)->comm,matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3608   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3609   ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr);
3610   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3611   ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr);
3612 
3613   ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3614   ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr);
3615   ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3616   ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr);
3617   ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3618   *newmat = mat;
3619 
3620   PetscFunctionReturn(0);
3621 }
3622 
3623 #undef __FUNCT__
3624 #define __FUNCT__ "MatLoad_MPIBAIJ"
3625 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3626 {
3627   PetscErrorCode ierr;
3628   int            fd;
3629   PetscInt       i,nz,j,rstart,rend;
3630   PetscScalar    *vals,*buf;
3631   MPI_Comm       comm = ((PetscObject)viewer)->comm;
3632   MPI_Status     status;
3633   PetscMPIInt    rank,size,maxnz;
3634   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3635   PetscInt       *locrowlens = PETSC_NULL,*procsnz = PETSC_NULL,*browners = PETSC_NULL;
3636   PetscInt       jj,*mycols,*ibuf,bs=1,Mbs,mbs,extra_rows,mmax;
3637   PetscMPIInt    tag = ((PetscObject)viewer)->tag;
3638   PetscInt       *dlens = PETSC_NULL,*odlens = PETSC_NULL,*mask = PETSC_NULL,*masked1 = PETSC_NULL,*masked2 = PETSC_NULL,rowcount,odcount;
3639   PetscInt       dcount,kmax,k,nzcount,tmp,mend,sizesset=1,grows,gcols;
3640 
3641   PetscFunctionBegin;
3642   ierr = PetscOptionsBegin(comm,PETSC_NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3643     ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr);
3644   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3645 
3646   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3647   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3648   if (!rank) {
3649     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3650     ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr);
3651     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3652   }
3653 
3654   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) sizesset = 0;
3655 
3656   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3657   M = header[1]; N = header[2];
3658 
3659   /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */
3660   if (sizesset && newmat->rmap->N < 0) newmat->rmap->N = M;
3661   if (sizesset && newmat->cmap->N < 0) newmat->cmap->N = N;
3662 
3663   /* If global sizes are set, check if they are consistent with that given in the file */
3664   if (sizesset) {
3665     ierr = MatGetSize(newmat,&grows,&gcols);CHKERRQ(ierr);
3666   }
3667   if (sizesset && newmat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows);
3668   if (sizesset && newmat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols);
3669 
3670   if (M != N) SETERRQ(((PetscObject)viewer)->comm,PETSC_ERR_SUP,"Can only do square matrices");
3671 
3672   /*
3673      This code adds extra rows to make sure the number of rows is
3674      divisible by the blocksize
3675   */
3676   Mbs        = M/bs;
3677   extra_rows = bs - M + bs*Mbs;
3678   if (extra_rows == bs) extra_rows = 0;
3679   else                  Mbs++;
3680   if (extra_rows && !rank) {
3681     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3682   }
3683 
3684   /* determine ownership of all rows */
3685   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3686     mbs        = Mbs/size + ((Mbs % size) > rank);
3687     m          = mbs*bs;
3688   } else { /* User set */
3689     m          = newmat->rmap->n;
3690     mbs        = m/bs;
3691   }
3692   ierr       = PetscMalloc2(size+1,PetscInt,&rowners,size+1,PetscInt,&browners);CHKERRQ(ierr);
3693   ierr       = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3694 
3695   /* process 0 needs enough room for process with most rows */
3696   if (!rank) {
3697     mmax = rowners[1];
3698     for (i=2; i<=size; i++) {
3699       mmax = PetscMax(mmax,rowners[i]);
3700     }
3701     mmax*=bs;
3702   } else mmax = m;
3703 
3704   rowners[0] = 0;
3705   for (i=2; i<=size; i++)  rowners[i] += rowners[i-1];
3706   for (i=0; i<=size;  i++) browners[i] = rowners[i]*bs;
3707   rstart = rowners[rank];
3708   rend   = rowners[rank+1];
3709 
3710   /* distribute row lengths to all processors */
3711   ierr = PetscMalloc((mmax+1)*sizeof(PetscInt),&locrowlens);CHKERRQ(ierr);
3712   if (!rank) {
3713     mend = m;
3714     if (size == 1) mend = mend - extra_rows;
3715     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3716     for (j=mend; j<m; j++) locrowlens[j] = 1;
3717     ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr);
3718     ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr);
3719     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
3720     for (j=0; j<m; j++) {
3721       procsnz[0] += locrowlens[j];
3722     }
3723     for (i=1; i<size; i++) {
3724       mend = browners[i+1] - browners[i];
3725       if (i == size-1) mend = mend - extra_rows;
3726       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3727       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3728       /* calculate the number of nonzeros on each processor */
3729       for (j=0; j<browners[i+1]-browners[i]; j++) {
3730         procsnz[i] += rowlengths[j];
3731       }
3732       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3733     }
3734     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3735   } else {
3736     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3737   }
3738 
3739   if (!rank) {
3740     /* determine max buffer needed and allocate it */
3741     maxnz = procsnz[0];
3742     for (i=1; i<size; i++) {
3743       maxnz = PetscMax(maxnz,procsnz[i]);
3744     }
3745     ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr);
3746 
3747     /* read in my part of the matrix column indices  */
3748     nz     = procsnz[0];
3749     ierr   = PetscMalloc((nz+1)*sizeof(PetscInt),&ibuf);CHKERRQ(ierr);
3750     mycols = ibuf;
3751     if (size == 1)  nz -= extra_rows;
3752     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3753     if (size == 1)  for (i=0; i< extra_rows; i++) { mycols[nz+i] = M+i; }
3754 
3755     /* read in every ones (except the last) and ship off */
3756     for (i=1; i<size-1; i++) {
3757       nz   = procsnz[i];
3758       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3759       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3760     }
3761     /* read in the stuff for the last proc */
3762     if (size != 1) {
3763       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3764       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3765       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3766       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3767     }
3768     ierr = PetscFree(cols);CHKERRQ(ierr);
3769   } else {
3770     /* determine buffer space needed for message */
3771     nz = 0;
3772     for (i=0; i<m; i++) {
3773       nz += locrowlens[i];
3774     }
3775     ierr   = PetscMalloc((nz+1)*sizeof(PetscInt),&ibuf);CHKERRQ(ierr);
3776     mycols = ibuf;
3777     /* receive message of column indices*/
3778     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3779     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3780     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3781   }
3782 
3783   /* loop over local rows, determining number of off diagonal entries */
3784   ierr     = PetscMalloc2(rend-rstart,PetscInt,&dlens,rend-rstart,PetscInt,&odlens);CHKERRQ(ierr);
3785   ierr     = PetscMalloc3(Mbs,PetscInt,&mask,Mbs,PetscInt,&masked1,Mbs,PetscInt,&masked2);CHKERRQ(ierr);
3786   ierr     = PetscMemzero(mask,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3787   ierr     = PetscMemzero(masked1,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3788   ierr     = PetscMemzero(masked2,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3789   rowcount = 0; nzcount = 0;
3790   for (i=0; i<mbs; i++) {
3791     dcount  = 0;
3792     odcount = 0;
3793     for (j=0; j<bs; j++) {
3794       kmax = locrowlens[rowcount];
3795       for (k=0; k<kmax; k++) {
3796         tmp = mycols[nzcount++]/bs;
3797         if (!mask[tmp]) {
3798           mask[tmp] = 1;
3799           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3800           else masked1[dcount++] = tmp;
3801         }
3802       }
3803       rowcount++;
3804     }
3805 
3806     dlens[i]  = dcount;
3807     odlens[i] = odcount;
3808 
3809     /* zero out the mask elements we set */
3810     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3811     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3812   }
3813 
3814 
3815   if (!sizesset) {
3816     ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3817   }
3818   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3819 
3820   if (!rank) {
3821     ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&buf);CHKERRQ(ierr);
3822     /* read in my part of the matrix numerical values  */
3823     nz = procsnz[0];
3824     vals = buf;
3825     mycols = ibuf;
3826     if (size == 1)  nz -= extra_rows;
3827     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3828     if (size == 1)  for (i=0; i< extra_rows; i++) { vals[nz+i] = 1.0; }
3829 
3830     /* insert into matrix */
3831     jj      = rstart*bs;
3832     for (i=0; i<m; i++) {
3833       ierr = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3834       mycols += locrowlens[i];
3835       vals   += locrowlens[i];
3836       jj++;
3837     }
3838     /* read in other processors (except the last one) and ship out */
3839     for (i=1; i<size-1; i++) {
3840       nz   = procsnz[i];
3841       vals = buf;
3842       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3843       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3844     }
3845     /* the last proc */
3846     if (size != 1){
3847       nz   = procsnz[i] - extra_rows;
3848       vals = buf;
3849       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3850       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3851       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3852     }
3853     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3854   } else {
3855     /* receive numeric values */
3856     ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&buf);CHKERRQ(ierr);
3857 
3858     /* receive message of values*/
3859     vals   = buf;
3860     mycols = ibuf;
3861     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3862 
3863     /* insert into matrix */
3864     jj      = rstart*bs;
3865     for (i=0; i<m; i++) {
3866       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3867       mycols += locrowlens[i];
3868       vals   += locrowlens[i];
3869       jj++;
3870     }
3871   }
3872   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3873   ierr = PetscFree(buf);CHKERRQ(ierr);
3874   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3875   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3876   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3877   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3878   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3879   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3880 
3881   PetscFunctionReturn(0);
3882 }
3883 
3884 #undef __FUNCT__
3885 #define __FUNCT__ "MatMPIBAIJSetHashTableFactor"
3886 /*@
3887    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3888 
3889    Input Parameters:
3890 .  mat  - the matrix
3891 .  fact - factor
3892 
3893    Not Collective, each process can use a different factor
3894 
3895    Level: advanced
3896 
3897   Notes:
3898    This can also be set by the command line option: -mat_use_hash_table <fact>
3899 
3900 .keywords: matrix, hashtable, factor, HT
3901 
3902 .seealso: MatSetOption()
3903 @*/
3904 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3905 {
3906   PetscErrorCode ierr;
3907 
3908   PetscFunctionBegin;
3909   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3910   PetscFunctionReturn(0);
3911 }
3912 
3913 EXTERN_C_BEGIN
3914 #undef __FUNCT__
3915 #define __FUNCT__ "MatSetHashTableFactor_MPIBAIJ"
3916 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3917 {
3918   Mat_MPIBAIJ *baij;
3919 
3920   PetscFunctionBegin;
3921   baij = (Mat_MPIBAIJ*)mat->data;
3922   baij->ht_fact = fact;
3923   PetscFunctionReturn(0);
3924 }
3925 EXTERN_C_END
3926 
3927 #undef __FUNCT__
3928 #define __FUNCT__ "MatMPIBAIJGetSeqBAIJ"
3929 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3930 {
3931   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
3932   PetscFunctionBegin;
3933   *Ad     = a->A;
3934   *Ao     = a->B;
3935   *colmap = a->garray;
3936   PetscFunctionReturn(0);
3937 }
3938 
3939 /*
3940     Special version for direct calls from Fortran (to eliminate two function call overheads
3941 */
3942 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3943 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3944 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3945 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3946 #endif
3947 
3948 #undef __FUNCT__
3949 #define __FUNCT__ "matmpibiajsetvaluesblocked"
3950 /*@C
3951   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3952 
3953   Collective on Mat
3954 
3955   Input Parameters:
3956 + mat - the matrix
3957 . min - number of input rows
3958 . im - input rows
3959 . nin - number of input columns
3960 . in - input columns
3961 . v - numerical values input
3962 - addvin - INSERT_VALUES or ADD_VALUES
3963 
3964   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3965 
3966   Level: advanced
3967 
3968 .seealso:   MatSetValuesBlocked()
3969 @*/
3970 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3971 {
3972   /* convert input arguments to C version */
3973   Mat             mat = *matin;
3974   PetscInt        m = *min, n = *nin;
3975   InsertMode      addv = *addvin;
3976 
3977   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
3978   const MatScalar *value;
3979   MatScalar       *barray=baij->barray;
3980   PetscBool       roworiented = baij->roworiented;
3981   PetscErrorCode  ierr;
3982   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
3983   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
3984   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
3985 
3986   PetscFunctionBegin;
3987   /* tasks normally handled by MatSetValuesBlocked() */
3988   if (mat->insertmode == NOT_SET_VALUES) {
3989     mat->insertmode = addv;
3990   }
3991 #if defined(PETSC_USE_DEBUG)
3992   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
3993   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3994 #endif
3995   if (mat->assembled) {
3996     mat->was_assembled = PETSC_TRUE;
3997     mat->assembled     = PETSC_FALSE;
3998   }
3999   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
4000 
4001 
4002   if (!barray) {
4003     ierr         = PetscMalloc(bs2*sizeof(MatScalar),&barray);CHKERRQ(ierr);
4004     baij->barray = barray;
4005   }
4006 
4007   if (roworiented) {
4008     stepval = (n-1)*bs;
4009   } else {
4010     stepval = (m-1)*bs;
4011   }
4012   for (i=0; i<m; i++) {
4013     if (im[i] < 0) continue;
4014 #if defined(PETSC_USE_DEBUG)
4015     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
4016 #endif
4017     if (im[i] >= rstart && im[i] < rend) {
4018       row = im[i] - rstart;
4019       for (j=0; j<n; j++) {
4020         /* If NumCol = 1 then a copy is not required */
4021         if ((roworiented) && (n == 1)) {
4022           barray = (MatScalar*)v + i*bs2;
4023         } else if ((!roworiented) && (m == 1)) {
4024           barray = (MatScalar*)v + j*bs2;
4025         } else { /* Here a copy is required */
4026           if (roworiented) {
4027             value = v + i*(stepval+bs)*bs + j*bs;
4028           } else {
4029             value = v + j*(stepval+bs)*bs + i*bs;
4030           }
4031           for (ii=0; ii<bs; ii++,value+=stepval) {
4032             for (jj=0; jj<bs; jj++) {
4033               *barray++  = *value++;
4034             }
4035           }
4036           barray -=bs2;
4037         }
4038 
4039         if (in[j] >= cstart && in[j] < cend){
4040           col  = in[j] - cstart;
4041           ierr = MatSetValuesBlocked_SeqBAIJ(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
4042         }
4043         else if (in[j] < 0) continue;
4044 #if defined(PETSC_USE_DEBUG)
4045         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
4046 #endif
4047         else {
4048           if (mat->was_assembled) {
4049             if (!baij->colmap) {
4050               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
4051             }
4052 
4053 #if defined(PETSC_USE_DEBUG)
4054 #if defined (PETSC_USE_CTABLE)
4055             { PetscInt data;
4056               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
4057               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
4058             }
4059 #else
4060             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
4061 #endif
4062 #endif
4063 #if defined (PETSC_USE_CTABLE)
4064 	    ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
4065             col  = (col - 1)/bs;
4066 #else
4067             col = (baij->colmap[in[j]] - 1)/bs;
4068 #endif
4069             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
4070               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
4071               col =  in[j];
4072             }
4073           }
4074           else col = in[j];
4075           ierr = MatSetValuesBlocked_SeqBAIJ(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
4076         }
4077       }
4078     } else {
4079       if (!baij->donotstash) {
4080         if (roworiented) {
4081           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
4082         } else {
4083           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
4084         }
4085       }
4086     }
4087   }
4088 
4089   /* task normally handled by MatSetValuesBlocked() */
4090   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
4091   PetscFunctionReturn(0);
4092 }
4093 
4094 #undef __FUNCT__
4095 #define __FUNCT__ "MatCreateMPIBAIJWithArrays"
4096 /*@
4097      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
4098          CSR format the local rows.
4099 
4100    Collective on MPI_Comm
4101 
4102    Input Parameters:
4103 +  comm - MPI communicator
4104 .  bs - the block size, only a block size of 1 is supported
4105 .  m - number of local rows (Cannot be PETSC_DECIDE)
4106 .  n - This value should be the same as the local size used in creating the
4107        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
4108        calculated if N is given) For square matrices n is almost always m.
4109 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
4110 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
4111 .   i - row indices
4112 .   j - column indices
4113 -   a - matrix values
4114 
4115    Output Parameter:
4116 .   mat - the matrix
4117 
4118    Level: intermediate
4119 
4120    Notes:
4121        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
4122      thus you CANNOT change the matrix entries by changing the values of a[] after you have
4123      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
4124 
4125        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
4126 
4127 .keywords: matrix, aij, compressed row, sparse, parallel
4128 
4129 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
4130           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
4131 @*/
4132 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
4133 {
4134   PetscErrorCode ierr;
4135 
4136 
4137  PetscFunctionBegin;
4138   if (i[0]) {
4139     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
4140   }
4141   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
4142   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
4143   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
4144   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
4145   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
4146   PetscFunctionReturn(0);
4147 }
4148