xref: /petsc/src/mat/impls/baij/mpi/baijov.c (revision 94ef8dde638caef1d0cd84a7dc8a2db65fcda8b6)
1 
2 /*
3    Routines to compute overlapping regions of a parallel MPI matrix
4   and to find submatrices that were shared across processors.
5 */
6 #include <../src/mat/impls/baij/mpi/mpibaij.h>
7 #include <petscbt.h>
8 
9 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat,PetscInt,char**,PetscInt*,PetscInt**);
10 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat,PetscInt,PetscInt**,PetscInt**,PetscInt*);
11 extern PetscErrorCode MatGetRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
12 extern PetscErrorCode MatRestoreRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
13 
14 PetscErrorCode MatIncreaseOverlap_MPIBAIJ(Mat C,PetscInt imax,IS is[],PetscInt ov)
15 {
16   PetscErrorCode ierr;
17   PetscInt       i,N=C->cmap->N, bs=C->rmap->bs;
18   IS             *is_new;
19 
20   PetscFunctionBegin;
21   ierr = PetscMalloc1(imax,&is_new);CHKERRQ(ierr);
22   /* Convert the indices into block format */
23   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,imax,is,is_new);CHKERRQ(ierr);
24   if (ov < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap specified\n");
25   for (i=0; i<ov; ++i) {
26     ierr = MatIncreaseOverlap_MPIBAIJ_Once(C,imax,is_new);CHKERRQ(ierr);
27   }
28   for (i=0; i<imax; i++) {ierr = ISDestroy(&is[i]);CHKERRQ(ierr);}
29   ierr = ISExpandIndicesGeneral(N,N,bs,imax,is_new,is);CHKERRQ(ierr);
30   for (i=0; i<imax; i++) {ierr = ISDestroy(&is_new[i]);CHKERRQ(ierr);}
31   ierr = PetscFree(is_new);CHKERRQ(ierr);
32   PetscFunctionReturn(0);
33 }
34 
35 /*
36   Sample message format:
37   If a processor A wants processor B to process some elements corresponding
38   to index sets is[1], is[5]
39   mesg [0] = 2   (no of index sets in the mesg)
40   -----------
41   mesg [1] = 1 => is[1]
42   mesg [2] = sizeof(is[1]);
43   -----------
44   mesg [5] = 5  => is[5]
45   mesg [6] = sizeof(is[5]);
46   -----------
47   mesg [7]
48   mesg [n]  data(is[1])
49   -----------
50   mesg[n+1]
51   mesg[m]  data(is[5])
52   -----------
53 
54   Notes:
55   nrqs - no of requests sent (or to be sent out)
56   nrqr - no of requests recieved (which have to be or which have been processed
57 */
58 PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat C,PetscInt imax,IS is[])
59 {
60   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
61   const PetscInt **idx,*idx_i;
62   PetscInt       *n,*w3,*w4,**data,len;
63   PetscErrorCode ierr;
64   PetscMPIInt    size,rank,tag1,tag2,*w2,*w1,nrqr;
65   PetscInt       Mbs,i,j,k,**rbuf,row,proc=-1,nrqs,msz,**outdat,**ptr;
66   PetscInt       *ctr,*pa,*tmp,*isz,*isz1,**xdata,**rbuf2,*d_p;
67   PetscMPIInt    *onodes1,*olengths1,*onodes2,*olengths2;
68   PetscBT        *table;
69   MPI_Comm       comm;
70   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2;
71   MPI_Status     *s_status,*recv_status;
72   char           *t_p;
73 
74   PetscFunctionBegin;
75   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
76   size = c->size;
77   rank = c->rank;
78   Mbs  = c->Mbs;
79 
80   ierr = PetscObjectGetNewTag((PetscObject)C,&tag1);CHKERRQ(ierr);
81   ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
82 
83   ierr = PetscMalloc2(imax+1,&idx,imax,&n);CHKERRQ(ierr);
84 
85   for (i=0; i<imax; i++) {
86     ierr = ISGetIndices(is[i],&idx[i]);CHKERRQ(ierr);
87     ierr = ISGetLocalSize(is[i],&n[i]);CHKERRQ(ierr);
88   }
89 
90   /* evaluate communication - mesg to who,length of mesg, and buffer space
91      required. Based on this, buffers are allocated, and data copied into them*/
92   ierr = PetscCalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);
93   for (i=0; i<imax; i++) {
94     ierr  = PetscMemzero(w4,size*sizeof(PetscInt));CHKERRQ(ierr); /* initialise work vector*/
95     idx_i = idx[i];
96     len   = n[i];
97     for (j=0; j<len; j++) {
98       row = idx_i[j];
99       if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index set cannot have negative entries");
100       ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
101       w4[proc]++;
102     }
103     for (j=0; j<size; j++) {
104       if (w4[j]) { w1[j] += w4[j]; w3[j]++;}
105     }
106   }
107 
108   nrqs     = 0;              /* no of outgoing messages */
109   msz      = 0;              /* total mesg length (for all proc */
110   w1[rank] = 0;              /* no mesg sent to itself */
111   w3[rank] = 0;
112   for (i=0; i<size; i++) {
113     if (w1[i])  {w2[i] = 1; nrqs++;} /* there exists a message to proc i */
114   }
115   /* pa - is list of processors to communicate with */
116   ierr = PetscMalloc1(nrqs+1,&pa);CHKERRQ(ierr);
117   for (i=0,j=0; i<size; i++) {
118     if (w1[i]) {pa[j] = i; j++;}
119   }
120 
121   /* Each message would have a header = 1 + 2*(no of IS) + data */
122   for (i=0; i<nrqs; i++) {
123     j      = pa[i];
124     w1[j] += w2[j] + 2*w3[j];
125     msz   += w1[j];
126   }
127 
128   /* Determine the number of messages to expect, their lengths, from from-ids */
129   ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
130   ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
131 
132   /* Now post the Irecvs corresponding to these messages */
133   ierr = PetscPostIrecvInt(comm,tag1,nrqr,onodes1,olengths1,&rbuf,&r_waits1);CHKERRQ(ierr);
134 
135   /* Allocate Memory for outgoing messages */
136   ierr = PetscMalloc4(size,&outdat,size,&ptr,msz,&tmp,size,&ctr);CHKERRQ(ierr);
137   ierr = PetscMemzero(outdat,size*sizeof(PetscInt*));CHKERRQ(ierr);
138   ierr = PetscMemzero(ptr,size*sizeof(PetscInt*));CHKERRQ(ierr);
139   {
140     PetscInt *iptr = tmp,ict  = 0;
141     for (i=0; i<nrqs; i++) {
142       j         = pa[i];
143       iptr     +=  ict;
144       outdat[j] = iptr;
145       ict       = w1[j];
146     }
147   }
148 
149   /* Form the outgoing messages */
150   /*plug in the headers*/
151   for (i=0; i<nrqs; i++) {
152     j            = pa[i];
153     outdat[j][0] = 0;
154     ierr         = PetscMemzero(outdat[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
155     ptr[j]       = outdat[j] + 2*w3[j] + 1;
156   }
157 
158   /* Memory for doing local proc's work*/
159   {
160     ierr = PetscCalloc5(imax,&table, imax,&data, imax,&isz, Mbs*imax,&d_p, (Mbs/PETSC_BITS_PER_BYTE+1)*imax,&t_p);CHKERRQ(ierr);
161 
162     for (i=0; i<imax; i++) {
163       table[i] = t_p + (Mbs/PETSC_BITS_PER_BYTE+1)*i;
164       data[i]  = d_p + (Mbs)*i;
165     }
166   }
167 
168   /* Parse the IS and update local tables and the outgoing buf with the data*/
169   {
170     PetscInt n_i,*data_i,isz_i,*outdat_j,ctr_j;
171     PetscBT  table_i;
172 
173     for (i=0; i<imax; i++) {
174       ierr    = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
175       n_i     = n[i];
176       table_i = table[i];
177       idx_i   = idx[i];
178       data_i  = data[i];
179       isz_i   = isz[i];
180       for (j=0; j<n_i; j++) {   /* parse the indices of each IS */
181         row  = idx_i[j];
182         ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
183         if (proc != rank) { /* copy to the outgoing buffer */
184           ctr[proc]++;
185           *ptr[proc] = row;
186           ptr[proc]++;
187         } else { /* Update the local table */
188           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
189         }
190       }
191       /* Update the headers for the current IS */
192       for (j=0; j<size; j++) { /* Can Optimise this loop by using pa[] */
193         if ((ctr_j = ctr[j])) {
194           outdat_j        = outdat[j];
195           k               = ++outdat_j[0];
196           outdat_j[2*k]   = ctr_j;
197           outdat_j[2*k-1] = i;
198         }
199       }
200       isz[i] = isz_i;
201     }
202   }
203 
204   /*  Now  post the sends */
205   ierr = PetscMalloc1(nrqs+1,&s_waits1);CHKERRQ(ierr);
206   for (i=0; i<nrqs; ++i) {
207     j    = pa[i];
208     ierr = MPI_Isend(outdat[j],w1[j],MPIU_INT,j,tag1,comm,s_waits1+i);CHKERRQ(ierr);
209   }
210 
211   /* No longer need the original indices*/
212   for (i=0; i<imax; ++i) {
213     ierr = ISRestoreIndices(is[i],idx+i);CHKERRQ(ierr);
214   }
215   ierr = PetscFree2(idx,n);CHKERRQ(ierr);
216 
217   for (i=0; i<imax; ++i) {
218     ierr = ISDestroy(&is[i]);CHKERRQ(ierr);
219   }
220 
221   /* Do Local work*/
222   ierr = MatIncreaseOverlap_MPIBAIJ_Local(C,imax,table,isz,data);CHKERRQ(ierr);
223 
224   /* Receive messages*/
225   ierr = PetscMalloc1(nrqr+1,&recv_status);CHKERRQ(ierr);
226   if (nrqr) {ierr = MPI_Waitall(nrqr,r_waits1,recv_status);CHKERRQ(ierr);}
227 
228   ierr = PetscMalloc1(nrqs+1,&s_status);CHKERRQ(ierr);
229   if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status);CHKERRQ(ierr);}
230 
231   /* Phase 1 sends are complete - deallocate buffers */
232   ierr = PetscFree4(outdat,ptr,tmp,ctr);CHKERRQ(ierr);
233   ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
234 
235   ierr = PetscMalloc1(nrqr+1,&xdata);CHKERRQ(ierr);
236   ierr = PetscMalloc1(nrqr+1,&isz1);CHKERRQ(ierr);
237   ierr = MatIncreaseOverlap_MPIBAIJ_Receive(C,nrqr,rbuf,xdata,isz1);CHKERRQ(ierr);
238   ierr = PetscFree(rbuf[0]);CHKERRQ(ierr);
239   ierr = PetscFree(rbuf);CHKERRQ(ierr);
240 
241   /* Send the data back*/
242   /* Do a global reduction to know the buffer space req for incoming messages*/
243   {
244     PetscMPIInt *rw1;
245 
246     ierr = PetscCalloc1(size,&rw1);CHKERRQ(ierr);
247 
248     for (i=0; i<nrqr; ++i) {
249       proc = recv_status[i].MPI_SOURCE;
250       if (proc != onodes1[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MPI_SOURCE mismatch");
251       rw1[proc] = isz1[i];
252     }
253 
254     ierr = PetscFree(onodes1);CHKERRQ(ierr);
255     ierr = PetscFree(olengths1);CHKERRQ(ierr);
256 
257     /* Determine the number of messages to expect, their lengths, from from-ids */
258     ierr = PetscGatherMessageLengths(comm,nrqr,nrqs,rw1,&onodes2,&olengths2);CHKERRQ(ierr);
259     ierr = PetscFree(rw1);CHKERRQ(ierr);
260   }
261   /* Now post the Irecvs corresponding to these messages */
262   ierr = PetscPostIrecvInt(comm,tag2,nrqs,onodes2,olengths2,&rbuf2,&r_waits2);CHKERRQ(ierr);
263 
264   /*  Now  post the sends */
265   ierr = PetscMalloc1(nrqr+1,&s_waits2);CHKERRQ(ierr);
266   for (i=0; i<nrqr; ++i) {
267     j    = recv_status[i].MPI_SOURCE;
268     ierr = MPI_Isend(xdata[i],isz1[i],MPIU_INT,j,tag2,comm,s_waits2+i);CHKERRQ(ierr);
269   }
270 
271   /* receive work done on other processors*/
272   {
273     PetscMPIInt idex;
274     PetscInt    is_no,ct1,max,*rbuf2_i,isz_i,*data_i,jmax;
275     PetscBT     table_i;
276     MPI_Status  *status2;
277 
278     ierr = PetscMalloc1(PetscMax(nrqr,nrqs)+1,&status2);CHKERRQ(ierr);
279     for (i=0; i<nrqs; ++i) {
280       ierr = MPI_Waitany(nrqs,r_waits2,&idex,status2+i);CHKERRQ(ierr);
281       /* Process the message*/
282       rbuf2_i = rbuf2[idex];
283       ct1     = 2*rbuf2_i[0]+1;
284       jmax    = rbuf2[idex][0];
285       for (j=1; j<=jmax; j++) {
286         max     = rbuf2_i[2*j];
287         is_no   = rbuf2_i[2*j-1];
288         isz_i   = isz[is_no];
289         data_i  = data[is_no];
290         table_i = table[is_no];
291         for (k=0; k<max; k++,ct1++) {
292           row = rbuf2_i[ct1];
293           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
294         }
295         isz[is_no] = isz_i;
296       }
297     }
298     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,status2);CHKERRQ(ierr);}
299     ierr = PetscFree(status2);CHKERRQ(ierr);
300   }
301 
302   for (i=0; i<imax; ++i) {
303     ierr = ISCreateGeneral(PETSC_COMM_SELF,isz[i],data[i],PETSC_COPY_VALUES,is+i);CHKERRQ(ierr);
304   }
305 
306 
307   ierr = PetscFree(onodes2);CHKERRQ(ierr);
308   ierr = PetscFree(olengths2);CHKERRQ(ierr);
309 
310   ierr = PetscFree(pa);CHKERRQ(ierr);
311   ierr = PetscFree(rbuf2[0]);CHKERRQ(ierr);
312   ierr = PetscFree(rbuf2);CHKERRQ(ierr);
313   ierr = PetscFree(s_waits1);CHKERRQ(ierr);
314   ierr = PetscFree(r_waits1);CHKERRQ(ierr);
315   ierr = PetscFree(s_waits2);CHKERRQ(ierr);
316   ierr = PetscFree(r_waits2);CHKERRQ(ierr);
317   ierr = PetscFree5(table,data,isz,d_p,t_p);CHKERRQ(ierr);
318   ierr = PetscFree(s_status);CHKERRQ(ierr);
319   ierr = PetscFree(recv_status);CHKERRQ(ierr);
320   ierr = PetscFree(xdata[0]);CHKERRQ(ierr);
321   ierr = PetscFree(xdata);CHKERRQ(ierr);
322   ierr = PetscFree(isz1);CHKERRQ(ierr);
323   PetscFunctionReturn(0);
324 }
325 
326 /*
327    MatIncreaseOverlap_MPIBAIJ_Local - Called by MatincreaseOverlap, to do
328        the work on the local processor.
329 
330      Inputs:
331       C      - MAT_MPIBAIJ;
332       imax - total no of index sets processed at a time;
333       table  - an array of char - size = Mbs bits.
334 
335      Output:
336       isz    - array containing the count of the solution elements corresponding
337                to each index set;
338       data   - pointer to the solutions
339 */
340 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat C,PetscInt imax,PetscBT *table,PetscInt *isz,PetscInt **data)
341 {
342   Mat_MPIBAIJ *c = (Mat_MPIBAIJ*)C->data;
343   Mat         A  = c->A,B = c->B;
344   Mat_SeqBAIJ *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
345   PetscInt    start,end,val,max,rstart,cstart,*ai,*aj;
346   PetscInt    *bi,*bj,*garray,i,j,k,row,*data_i,isz_i;
347   PetscBT     table_i;
348 
349   PetscFunctionBegin;
350   rstart = c->rstartbs;
351   cstart = c->cstartbs;
352   ai     = a->i;
353   aj     = a->j;
354   bi     = b->i;
355   bj     = b->j;
356   garray = c->garray;
357 
358 
359   for (i=0; i<imax; i++) {
360     data_i  = data[i];
361     table_i = table[i];
362     isz_i   = isz[i];
363     for (j=0,max=isz[i]; j<max; j++) {
364       row   = data_i[j] - rstart;
365       start = ai[row];
366       end   = ai[row+1];
367       for (k=start; k<end; k++) { /* Amat */
368         val = aj[k] + cstart;
369         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
370       }
371       start = bi[row];
372       end   = bi[row+1];
373       for (k=start; k<end; k++) { /* Bmat */
374         val = garray[bj[k]];
375         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
376       }
377     }
378     isz[i] = isz_i;
379   }
380   PetscFunctionReturn(0);
381 }
382 /*
383       MatIncreaseOverlap_MPIBAIJ_Receive - Process the recieved messages,
384          and return the output
385 
386          Input:
387            C    - the matrix
388            nrqr - no of messages being processed.
389            rbuf - an array of pointers to the recieved requests
390 
391          Output:
392            xdata - array of messages to be sent back
393            isz1  - size of each message
394 
395   For better efficiency perhaps we should malloc separately each xdata[i],
396 then if a remalloc is required we need only copy the data for that one row
397 rather than all previous rows as it is now where a single large chunck of
398 memory is used.
399 
400 */
401 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat C,PetscInt nrqr,PetscInt **rbuf,PetscInt **xdata,PetscInt * isz1)
402 {
403   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
404   Mat            A  = c->A,B = c->B;
405   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
406   PetscErrorCode ierr;
407   PetscInt       rstart,cstart,*ai,*aj,*bi,*bj,*garray,i,j,k;
408   PetscInt       row,total_sz,ct,ct1,ct2,ct3,mem_estimate,oct2,l,start,end;
409   PetscInt       val,max1,max2,Mbs,no_malloc =0,*tmp,new_estimate,ctr;
410   PetscInt       *rbuf_i,kmax,rbuf_0;
411   PetscBT        xtable;
412 
413   PetscFunctionBegin;
414   Mbs    = c->Mbs;
415   rstart = c->rstartbs;
416   cstart = c->cstartbs;
417   ai     = a->i;
418   aj     = a->j;
419   bi     = b->i;
420   bj     = b->j;
421   garray = c->garray;
422 
423 
424   for (i=0,ct=0,total_sz=0; i<nrqr; ++i) {
425     rbuf_i =  rbuf[i];
426     rbuf_0 =  rbuf_i[0];
427     ct    += rbuf_0;
428     for (j=1; j<=rbuf_0; j++) total_sz += rbuf_i[2*j];
429   }
430 
431   if (c->Mbs) max1 = ct*(a->nz +b->nz)/c->Mbs;
432   else        max1 = 1;
433   mem_estimate = 3*((total_sz > max1 ? total_sz : max1)+1);
434   ierr         = PetscMalloc1(mem_estimate,&xdata[0]);CHKERRQ(ierr);
435   ++no_malloc;
436   ierr = PetscBTCreate(Mbs,&xtable);CHKERRQ(ierr);
437   ierr = PetscMemzero(isz1,nrqr*sizeof(PetscInt));CHKERRQ(ierr);
438 
439   ct3 = 0;
440   for (i=0; i<nrqr; i++) { /* for easch mesg from proc i */
441     rbuf_i =  rbuf[i];
442     rbuf_0 =  rbuf_i[0];
443     ct1    =  2*rbuf_0+1;
444     ct2    =  ct1;
445     ct3   += ct1;
446     for (j=1; j<=rbuf_0; j++) { /* for each IS from proc i*/
447       ierr = PetscBTMemzero(Mbs,xtable);CHKERRQ(ierr);
448       oct2 = ct2;
449       kmax = rbuf_i[2*j];
450       for (k=0; k<kmax; k++,ct1++) {
451         row = rbuf_i[ct1];
452         if (!PetscBTLookupSet(xtable,row)) {
453           if (!(ct3 < mem_estimate)) {
454             new_estimate = (PetscInt)(1.5*mem_estimate)+1;
455             ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
456             ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
457             ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
458             xdata[0]     = tmp;
459             mem_estimate = new_estimate; ++no_malloc;
460             for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
461           }
462           xdata[i][ct2++] = row;
463           ct3++;
464         }
465       }
466       for (k=oct2,max2=ct2; k<max2; k++)  {
467         row   = xdata[i][k] - rstart;
468         start = ai[row];
469         end   = ai[row+1];
470         for (l=start; l<end; l++) {
471           val = aj[l] + cstart;
472           if (!PetscBTLookupSet(xtable,val)) {
473             if (!(ct3 < mem_estimate)) {
474               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
475               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
476               ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
477               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
478               xdata[0]     = tmp;
479               mem_estimate = new_estimate; ++no_malloc;
480               for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
481             }
482             xdata[i][ct2++] = val;
483             ct3++;
484           }
485         }
486         start = bi[row];
487         end   = bi[row+1];
488         for (l=start; l<end; l++) {
489           val = garray[bj[l]];
490           if (!PetscBTLookupSet(xtable,val)) {
491             if (!(ct3 < mem_estimate)) {
492               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
493               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
494               ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
495               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
496               xdata[0]     = tmp;
497               mem_estimate = new_estimate; ++no_malloc;
498               for (ctr =1; ctr <=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
499             }
500             xdata[i][ct2++] = val;
501             ct3++;
502           }
503         }
504       }
505       /* Update the header*/
506       xdata[i][2*j]   = ct2 - oct2; /* Undo the vector isz1 and use only a var*/
507       xdata[i][2*j-1] = rbuf_i[2*j-1];
508     }
509     xdata[i][0] = rbuf_0;
510     xdata[i+1]  = xdata[i] + ct2;
511     isz1[i]     = ct2; /* size of each message */
512   }
513   ierr = PetscBTDestroy(&xtable);CHKERRQ(ierr);
514   ierr = PetscInfo3(C,"Allocated %D bytes, required %D, no of mallocs = %D\n",mem_estimate,ct3,no_malloc);CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 PetscErrorCode MatCreateSubMatrices_MPIBAIJ(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submat[])
519 {
520   IS             *isrow_block,*iscol_block;
521   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
522   PetscErrorCode ierr;
523   PetscInt       nmax,nstages_local,nstages,i,pos,max_no,N=C->cmap->N,bs=C->rmap->bs;
524   Mat_SeqBAIJ    *subc;
525   Mat_SubSppt    *smat;
526 
527   PetscFunctionBegin;
528   /* The compression and expansion should be avoided. Doesn't point
529      out errors, might change the indices, hence buggey */
530   ierr = PetscMalloc2(ismax+1,&isrow_block,ismax+1,&iscol_block);CHKERRQ(ierr);
531   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,ismax,isrow,isrow_block);CHKERRQ(ierr);
532   ierr = ISCompressIndicesGeneral(N,C->cmap->n,bs,ismax,iscol,iscol_block);CHKERRQ(ierr);
533 
534   /* Determine the number of stages through which submatrices are done */
535   if (!C->cmap->N) nmax=20*1000000/sizeof(PetscInt);
536   else nmax = 20*1000000 / (c->Nbs * sizeof(PetscInt));
537   if (!nmax) nmax = 1;
538 
539   if (scall == MAT_INITIAL_MATRIX) {
540     nstages_local = ismax/nmax + ((ismax % nmax) ? 1 : 0); /* local nstages */
541 
542     /* Make sure every processor loops through the nstages */
543     ierr = MPIU_Allreduce(&nstages_local,&nstages,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)C));CHKERRQ(ierr);
544 
545     /* Allocate memory to hold all the submatrices and dummy submatrices */
546     ierr = PetscCalloc1(ismax+nstages,submat);CHKERRQ(ierr);
547   } else { /* MAT_REUSE_MATRIX */
548     if (ismax) {
549       subc = (Mat_SeqBAIJ*)((*submat)[0]->data);
550       smat   = subc->submatis1;
551     } else { /* (*submat)[0] is a dummy matrix */
552       smat = (Mat_SubSppt*)(*submat)[0]->data;
553     }
554     if (!smat) {
555       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"MatCreateSubMatrices(...,MAT_REUSE_MATRIX,...) requires submat");
556     }
557     nstages = smat->nstages;
558   }
559 
560   for (i=0,pos=0; i<nstages; i++) {
561     if (pos+nmax <= ismax) max_no = nmax;
562     else if (pos == ismax) max_no = 0;
563     else                   max_no = ismax-pos;
564 
565     ierr = MatCreateSubMatrices_MPIBAIJ_local(C,max_no,isrow_block+pos,iscol_block+pos,scall,*submat+pos);CHKERRQ(ierr);
566     if (!max_no && scall == MAT_INITIAL_MATRIX) { /* submat[pos] is a dummy matrix */
567       smat = (Mat_SubSppt*)(*submat)[pos]->data;
568       smat->nstages = nstages;
569     }
570     pos += max_no;
571   }
572 
573   if (scall == MAT_INITIAL_MATRIX && ismax) {
574     /* save nstages for reuse */
575     subc = (Mat_SeqBAIJ*)((*submat)[0]->data);
576     smat = subc->submatis1;
577     smat->nstages = nstages;
578   }
579 
580   for (i=0; i<ismax; i++) {
581     ierr = ISDestroy(&isrow_block[i]);CHKERRQ(ierr);
582     ierr = ISDestroy(&iscol_block[i]);CHKERRQ(ierr);
583   }
584   ierr = PetscFree2(isrow_block,iscol_block);CHKERRQ(ierr);
585   PetscFunctionReturn(0);
586 }
587 
588 #if defined(PETSC_USE_CTABLE)
589 PetscErrorCode PetscGetProc(const PetscInt row, const PetscMPIInt size, const PetscInt proc_gnode[], PetscMPIInt *rank)
590 {
591   PetscInt       nGlobalNd = proc_gnode[size];
592   PetscMPIInt    fproc;
593   PetscErrorCode ierr;
594 
595   PetscFunctionBegin;
596   ierr = PetscMPIIntCast((PetscInt)(((float)row * (float)size / (float)nGlobalNd + 0.5)),&fproc);CHKERRQ(ierr);
597   if (fproc > size) fproc = size;
598   while (row < proc_gnode[fproc] || row >= proc_gnode[fproc+1]) {
599     if (row < proc_gnode[fproc]) fproc--;
600     else                         fproc++;
601   }
602   *rank = fproc;
603   PetscFunctionReturn(0);
604 }
605 #endif
606 
607 /* -------------------------------------------------------------------------*/
608 /* This code is used for BAIJ and SBAIJ matrices (unfortunate dependency) */
609 PetscErrorCode MatCreateSubMatrices_MPIBAIJ_local(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submats)
610 {
611   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
612   Mat            A  = c->A;
613   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)c->B->data,*subc;
614   const PetscInt **icol,**irow;
615   PetscInt       *nrow,*ncol,start;
616   PetscErrorCode ierr;
617   PetscMPIInt    rank,size,tag0,tag2,tag3,tag4,*w1,*w2,*w3,*w4,nrqr;
618   PetscInt       **sbuf1,**sbuf2,*sbuf2_i,i,j,k,l,ct1,ct2,**rbuf1,row,proc=-1;
619   PetscInt       nrqs=0,msz,**ptr=NULL,*req_size=NULL,*ctr=NULL,*pa,*tmp=NULL,tcol;
620   PetscInt       **rbuf3=NULL,*req_source1=NULL,*req_source2,**sbuf_aj,**rbuf2=NULL,max1,max2;
621   PetscInt       **lens,is_no,ncols,*cols,mat_i,*mat_j,tmp2,jmax;
622 #if defined(PETSC_USE_CTABLE)
623   PetscTable     *cmap,cmap_i=NULL,*rmap,rmap_i;
624 #else
625   PetscInt       **cmap,*cmap_i=NULL,**rmap,*rmap_i;
626 #endif
627   const PetscInt *irow_i,*icol_i;
628   PetscInt       ctr_j,*sbuf1_j,*sbuf_aj_i,*rbuf1_i,kmax,*lens_i;
629   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2,*r_waits3;
630   MPI_Request    *r_waits4,*s_waits3,*s_waits4;
631   MPI_Status     *r_status1,*r_status2,*s_status1,*s_status3,*s_status2;
632   MPI_Status     *r_status3,*r_status4,*s_status4;
633   MPI_Comm       comm;
634   PetscScalar    **rbuf4,*rbuf4_i=NULL,**sbuf_aa,*vals,*mat_a=NULL,*imat_a,*sbuf_aa_i;
635   PetscMPIInt    *onodes1,*olengths1,end;
636   PetscInt       **row2proc,*row2proc_i,*imat_ilen,*imat_j,*imat_i;
637   Mat_SubSppt    *smat_i;
638   PetscBool      *issorted,colflag,iscsorted=PETSC_TRUE;
639   PetscInt       *sbuf1_i,*rbuf2_i,*rbuf3_i,ilen;
640   PetscInt       bs=C->rmap->bs,bs2=c->bs2,rstart = c->rstartbs;
641   PetscBool      ijonly=c->ijonly; /* private flag indicates only matrix data structures are requested */
642   PetscInt       nzA,nzB,*a_i=a->i,*b_i=b->i,*a_j = a->j,*b_j = b->j,ctmp,imark,*cworkA,*cworkB;
643   PetscScalar    *vworkA=NULL,*vworkB=NULL,*a_a = a->a,*b_a = b->a;
644   PetscInt       cstart = c->cstartbs,*bmap = c->garray;
645   PetscBool      *allrows,*allcolumns;
646 
647   PetscFunctionBegin;
648   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
649   size = c->size;
650   rank = c->rank;
651 
652   ierr = PetscMalloc5(ismax,&row2proc,ismax,&cmap,ismax,&rmap,ismax+1,&allcolumns,ismax,&allrows);CHKERRQ(ierr);
653   ierr = PetscMalloc5(ismax,&irow,ismax,&icol,ismax,&nrow,ismax,&ncol,ismax,&issorted);CHKERRQ(ierr);
654 
655   for (i=0; i<ismax; i++) {
656     ierr = ISSorted(iscol[i],&issorted[i]);CHKERRQ(ierr);
657     if (!issorted[i]) iscsorted = issorted[i]; /* columns are not sorted! */
658     ierr = ISSorted(isrow[i],&issorted[i]);CHKERRQ(ierr);
659 
660     /* Check for special case: allcolumns */
661     ierr = ISIdentity(iscol[i],&colflag);CHKERRQ(ierr);
662     ierr = ISGetLocalSize(iscol[i],&ncol[i]);CHKERRQ(ierr);
663 
664     if (colflag && ncol[i] == c->Nbs) {
665       allcolumns[i] = PETSC_TRUE;
666       icol[i]       = NULL;
667     } else {
668       allcolumns[i] = PETSC_FALSE;
669       ierr = ISGetIndices(iscol[i],&icol[i]);CHKERRQ(ierr);
670     }
671 
672     /* Check for special case: allrows */
673     ierr = ISIdentity(isrow[i],&colflag);CHKERRQ(ierr);
674     ierr = ISGetLocalSize(isrow[i],&nrow[i]);CHKERRQ(ierr);
675     if (colflag && nrow[i] == c->Mbs) {
676       allrows[i] = PETSC_TRUE;
677       irow[i]    = NULL;
678     } else {
679       allrows[i] = PETSC_FALSE;
680       ierr = ISGetIndices(isrow[i],&irow[i]);CHKERRQ(ierr);
681     }
682   }
683 
684   if (scall == MAT_REUSE_MATRIX) {
685     /* Assumes new rows are same length as the old rows */
686     for (i=0; i<ismax; i++) {
687       subc = (Mat_SeqBAIJ*)(submats[i]->data);
688       if (subc->mbs != nrow[i] || subc->nbs != ncol[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. wrong size");
689 
690       /* Initial matrix as if empty */
691       ierr = PetscMemzero(subc->ilen,subc->mbs*sizeof(PetscInt));CHKERRQ(ierr);
692 
693       /* Initial matrix as if empty */
694       submats[i]->factortype = C->factortype;
695 
696       smat_i   = subc->submatis1;
697 
698       nrqs        = smat_i->nrqs;
699       nrqr        = smat_i->nrqr;
700       rbuf1       = smat_i->rbuf1;
701       rbuf2       = smat_i->rbuf2;
702       rbuf3       = smat_i->rbuf3;
703       req_source2 = smat_i->req_source2;
704 
705       sbuf1     = smat_i->sbuf1;
706       sbuf2     = smat_i->sbuf2;
707       ptr       = smat_i->ptr;
708       tmp       = smat_i->tmp;
709       ctr       = smat_i->ctr;
710 
711       pa          = smat_i->pa;
712       req_size    = smat_i->req_size;
713       req_source1 = smat_i->req_source1;
714 
715       allcolumns[i] = smat_i->allcolumns;
716       allrows[i]    = smat_i->allrows;
717       row2proc[i]   = smat_i->row2proc;
718       rmap[i]       = smat_i->rmap;
719       cmap[i]       = smat_i->cmap;
720     }
721 
722     if (!ismax){ /* Get dummy submatrices and retrieve struct submatis1 */
723       if (!submats[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"submats are null, cannot reuse");
724       smat_i = (Mat_SubSppt*)submats[0]->data;
725 
726       nrqs        = smat_i->nrqs;
727       nrqr        = smat_i->nrqr;
728       rbuf1       = smat_i->rbuf1;
729       rbuf2       = smat_i->rbuf2;
730       rbuf3       = smat_i->rbuf3;
731       req_source2 = smat_i->req_source2;
732 
733       sbuf1       = smat_i->sbuf1;
734       sbuf2       = smat_i->sbuf2;
735       ptr         = smat_i->ptr;
736       tmp         = smat_i->tmp;
737       ctr         = smat_i->ctr;
738 
739       pa          = smat_i->pa;
740       req_size    = smat_i->req_size;
741       req_source1 = smat_i->req_source1;
742 
743       allcolumns[0] = PETSC_FALSE;
744     }
745   } else { /* scall == MAT_INITIAL_MATRIX */
746     /* Get some new tags to keep the communication clean */
747     ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
748     ierr = PetscObjectGetNewTag((PetscObject)C,&tag3);CHKERRQ(ierr);
749 
750     /* evaluate communication - mesg to who, length of mesg, and buffer space
751      required. Based on this, buffers are allocated, and data copied into them*/
752     ierr = PetscCalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);   /* mesg size, initialize work vectors */
753 
754     for (i=0; i<ismax; i++) {
755       jmax   = nrow[i];
756       irow_i = irow[i];
757 
758       ierr   = PetscMalloc1(jmax,&row2proc_i);CHKERRQ(ierr);
759       row2proc[i] = row2proc_i;
760 
761       if (issorted[i]) proc = 0;
762       for (j=0; j<jmax; j++) {
763         if (!issorted[i]) proc = 0;
764         if (allrows[i]) row = j;
765         else row = irow_i[j];
766 
767         while (row >= c->rangebs[proc+1]) proc++;
768         w4[proc]++;
769         row2proc_i[j] = proc; /* map row index to proc */
770       }
771       for (j=0; j<size; j++) {
772         if (w4[j]) { w1[j] += w4[j];  w3[j]++; w4[j] = 0;}
773       }
774     }
775 
776     nrqs     = 0;              /* no of outgoing messages */
777     msz      = 0;              /* total mesg length (for all procs) */
778     w1[rank] = 0;              /* no mesg sent to self */
779     w3[rank] = 0;
780     for (i=0; i<size; i++) {
781       if (w1[i])  { w2[i] = 1; nrqs++;} /* there exists a message to proc i */
782     }
783     ierr = PetscMalloc1(nrqs+1,&pa);CHKERRQ(ierr); /*(proc -array)*/
784     for (i=0,j=0; i<size; i++) {
785       if (w1[i]) { pa[j] = i; j++; }
786     }
787 
788     /* Each message would have a header = 1 + 2*(no of IS) + data */
789     for (i=0; i<nrqs; i++) {
790       j      = pa[i];
791       w1[j] += w2[j] + 2* w3[j];
792       msz   += w1[j];
793     }
794     ierr = PetscInfo2(0,"Number of outgoing messages %D Total message length %D\n",nrqs,msz);CHKERRQ(ierr);
795 
796     /* Determine the number of messages to expect, their lengths, from from-ids */
797     ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
798     ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
799 
800     /* Now post the Irecvs corresponding to these messages */
801     tag0 = ((PetscObject)C)->tag;
802     ierr = PetscPostIrecvInt(comm,tag0,nrqr,onodes1,olengths1,&rbuf1,&r_waits1);CHKERRQ(ierr);
803 
804     ierr = PetscFree(onodes1);CHKERRQ(ierr);
805     ierr = PetscFree(olengths1);CHKERRQ(ierr);
806 
807     /* Allocate Memory for outgoing messages */
808     ierr = PetscMalloc4(size,&sbuf1,size,&ptr,2*msz,&tmp,size,&ctr);CHKERRQ(ierr);
809     ierr = PetscMemzero(sbuf1,size*sizeof(PetscInt*));CHKERRQ(ierr);
810     ierr = PetscMemzero(ptr,size*sizeof(PetscInt*));CHKERRQ(ierr);
811 
812     {
813       PetscInt *iptr = tmp;
814       k    = 0;
815       for (i=0; i<nrqs; i++) {
816         j        = pa[i];
817         iptr    += k;
818         sbuf1[j] = iptr;
819         k        = w1[j];
820       }
821     }
822 
823     /* Form the outgoing messages. Initialize the header space */
824     for (i=0; i<nrqs; i++) {
825       j           = pa[i];
826       sbuf1[j][0] = 0;
827       ierr        = PetscMemzero(sbuf1[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
828       ptr[j]      = sbuf1[j] + 2*w3[j] + 1;
829     }
830 
831     /* Parse the isrow and copy data into outbuf */
832     for (i=0; i<ismax; i++) {
833       row2proc_i = row2proc[i];
834       ierr   = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
835       irow_i = irow[i];
836       jmax   = nrow[i];
837       for (j=0; j<jmax; j++) {  /* parse the indices of each IS */
838         proc = row2proc_i[j];
839         if (allrows[i]) row = j;
840         else row = irow_i[j];
841 
842         if (proc != rank) { /* copy to the outgoing buf*/
843           ctr[proc]++;
844           *ptr[proc] = row;
845           ptr[proc]++;
846         }
847       }
848       /* Update the headers for the current IS */
849       for (j=0; j<size; j++) { /* Can Optimise this loop too */
850         if ((ctr_j = ctr[j])) {
851           sbuf1_j        = sbuf1[j];
852           k              = ++sbuf1_j[0];
853           sbuf1_j[2*k]   = ctr_j;
854           sbuf1_j[2*k-1] = i;
855         }
856       }
857     }
858 
859     /*  Now  post the sends */
860     ierr = PetscMalloc1(nrqs+1,&s_waits1);CHKERRQ(ierr);
861     for (i=0; i<nrqs; ++i) {
862       j    = pa[i];
863       ierr = MPI_Isend(sbuf1[j],w1[j],MPIU_INT,j,tag0,comm,s_waits1+i);CHKERRQ(ierr);
864     }
865 
866     /* Post Receives to capture the buffer size */
867     ierr = PetscMalloc1(nrqs+1,&r_waits2);CHKERRQ(ierr);
868     ierr = PetscMalloc3(nrqs+1,&req_source2,nrqs+1,&rbuf2,nrqs+1,&rbuf3);CHKERRQ(ierr);
869     rbuf2[0] = tmp + msz;
870     for (i=1; i<nrqs; ++i) {
871       rbuf2[i] = rbuf2[i-1]+w1[pa[i-1]];
872     }
873     for (i=0; i<nrqs; ++i) {
874       j    = pa[i];
875       ierr = MPI_Irecv(rbuf2[i],w1[j],MPIU_INT,j,tag2,comm,r_waits2+i);CHKERRQ(ierr);
876     }
877 
878     /* Send to other procs the buf size they should allocate */
879     /* Receive messages*/
880     ierr = PetscMalloc1(nrqr+1,&s_waits2);CHKERRQ(ierr);
881     ierr = PetscMalloc1(nrqr+1,&r_status1);CHKERRQ(ierr);
882     ierr = PetscMalloc3(nrqr,&sbuf2,nrqr,&req_size,nrqr,&req_source1);CHKERRQ(ierr);
883 
884     ierr = MPI_Waitall(nrqr,r_waits1,r_status1);CHKERRQ(ierr);
885     for (i=0; i<nrqr; ++i) {
886       req_size[i] = 0;
887       rbuf1_i        = rbuf1[i];
888       start          = 2*rbuf1_i[0] + 1;
889       ierr           = MPI_Get_count(r_status1+i,MPIU_INT,&end);CHKERRQ(ierr);
890       ierr           = PetscMalloc1(end+1,&sbuf2[i]);CHKERRQ(ierr);
891       sbuf2_i        = sbuf2[i];
892       for (j=start; j<end; j++) {
893         row             = rbuf1_i[j] - rstart;
894         ncols           = a_i[row+1] - a_i[row] + b_i[row+1] - b_i[row];
895         sbuf2_i[j]      = ncols;
896         req_size[i] += ncols;
897       }
898       req_source1[i] = r_status1[i].MPI_SOURCE;
899       /* form the header */
900       sbuf2_i[0] = req_size[i];
901       for (j=1; j<start; j++) sbuf2_i[j] = rbuf1_i[j];
902 
903       ierr = MPI_Isend(sbuf2_i,end,MPIU_INT,req_source1[i],tag2,comm,s_waits2+i);CHKERRQ(ierr);
904     }
905 
906     ierr = PetscFree(r_status1);CHKERRQ(ierr);
907     ierr = PetscFree(r_waits1);CHKERRQ(ierr);
908     ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
909 
910     /* Receive messages*/
911     ierr = PetscMalloc1(nrqs+1,&r_waits3);CHKERRQ(ierr);
912     ierr = PetscMalloc1(nrqs+1,&r_status2);CHKERRQ(ierr);
913 
914     ierr = MPI_Waitall(nrqs,r_waits2,r_status2);CHKERRQ(ierr);
915     for (i=0; i<nrqs; ++i) {
916       ierr = PetscMalloc1(rbuf2[i][0]+1,&rbuf3[i]);CHKERRQ(ierr);
917       req_source2[i] = r_status2[i].MPI_SOURCE;
918       ierr = MPI_Irecv(rbuf3[i],rbuf2[i][0],MPIU_INT,req_source2[i],tag3,comm,r_waits3+i);CHKERRQ(ierr);
919     }
920     ierr = PetscFree(r_status2);CHKERRQ(ierr);
921     ierr = PetscFree(r_waits2);CHKERRQ(ierr);
922 
923     /* Wait on sends1 and sends2 */
924     ierr = PetscMalloc1(nrqs+1,&s_status1);CHKERRQ(ierr);
925     ierr = PetscMalloc1(nrqr+1,&s_status2);CHKERRQ(ierr);
926 
927     if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status1);CHKERRQ(ierr);}
928     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,s_status2);CHKERRQ(ierr);}
929     ierr = PetscFree(s_status1);CHKERRQ(ierr);
930     ierr = PetscFree(s_status2);CHKERRQ(ierr);
931     ierr = PetscFree(s_waits1);CHKERRQ(ierr);
932     ierr = PetscFree(s_waits2);CHKERRQ(ierr);
933 
934     /* Now allocate sending buffers for a->j, and send them off */
935     ierr = PetscMalloc1(nrqr+1,&sbuf_aj);CHKERRQ(ierr);
936     for (i=0,j=0; i<nrqr; i++) j += req_size[i];
937     ierr = PetscMalloc1(j+1,&sbuf_aj[0]);CHKERRQ(ierr);
938     for (i=1; i<nrqr; i++) sbuf_aj[i] = sbuf_aj[i-1] + req_size[i-1];
939 
940     ierr = PetscMalloc1(nrqr+1,&s_waits3);CHKERRQ(ierr);
941     {
942 
943       for (i=0; i<nrqr; i++) {
944         rbuf1_i   = rbuf1[i];
945         sbuf_aj_i = sbuf_aj[i];
946         ct1       = 2*rbuf1_i[0] + 1;
947         ct2       = 0;
948         for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
949           kmax = rbuf1[i][2*j];
950           for (k=0; k<kmax; k++,ct1++) {
951             row    = rbuf1_i[ct1] - rstart;
952             nzA    = a_i[row+1] - a_i[row]; nzB = b_i[row+1] - b_i[row];
953             ncols  = nzA + nzB;
954             cworkA = a_j + a_i[row]; cworkB = b_j + b_i[row];
955 
956             /* load the column indices for this row into cols */
957             cols = sbuf_aj_i + ct2;
958             for (l=0; l<nzB; l++) {
959               if ((ctmp = bmap[cworkB[l]]) < cstart) cols[l] = ctmp;
960               else break;
961             }
962             imark = l;
963             for (l=0; l<nzA; l++) {cols[imark+l] = cstart + cworkA[l];}
964             for (l=imark; l<nzB; l++) cols[nzA+l] = bmap[cworkB[l]];
965             ct2 += ncols;
966           }
967         }
968         ierr = MPI_Isend(sbuf_aj_i,req_size[i],MPIU_INT,req_source1[i],tag3,comm,s_waits3+i);CHKERRQ(ierr);
969       }
970     }
971     ierr = PetscMalloc2(nrqs+1,&r_status3,nrqr+1,&s_status3);CHKERRQ(ierr);
972 
973     /* create col map: global col of C -> local col of submatrices */
974 #if defined(PETSC_USE_CTABLE)
975     for (i=0; i<ismax; i++) {
976       if (!allcolumns[i]) {
977         ierr = PetscTableCreate(ncol[i]+1,c->Nbs+1,&cmap[i]);CHKERRQ(ierr);
978 
979         jmax   = ncol[i];
980         icol_i = icol[i];
981         cmap_i = cmap[i];
982         for (j=0; j<jmax; j++) {
983           ierr = PetscTableAdd(cmap[i],icol_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
984         }
985       } else cmap[i] = NULL;
986     }
987 #else
988     for (i=0; i<ismax; i++) {
989       if (!allcolumns[i]) {
990         ierr   = PetscCalloc1(c->Nbs,&cmap[i]);CHKERRQ(ierr);
991         jmax   = ncol[i];
992         icol_i = icol[i];
993         cmap_i = cmap[i];
994         for (j=0; j<jmax; j++) cmap_i[icol_i[j]] = j+1;
995       } else cmap[i] = NULL;
996     }
997 #endif
998 
999     /* Create lens which is required for MatCreate... */
1000     for (i=0,j=0; i<ismax; i++) j += nrow[i];
1001     ierr = PetscMalloc1(ismax,&lens);CHKERRQ(ierr);
1002 
1003     if (ismax) {
1004       ierr = PetscCalloc1(j,&lens[0]);CHKERRQ(ierr);
1005     }
1006     for (i=1; i<ismax; i++) lens[i] = lens[i-1] + nrow[i-1];
1007 
1008     /* Update lens from local data */
1009     for (i=0; i<ismax; i++) {
1010       row2proc_i = row2proc[i];
1011       jmax = nrow[i];
1012       if (!allcolumns[i]) cmap_i = cmap[i];
1013       irow_i = irow[i];
1014       lens_i = lens[i];
1015       for (j=0; j<jmax; j++) {
1016         if (allrows[i]) row = j;
1017         else row = irow_i[j]; /* global blocked row of C */
1018 
1019         proc = row2proc_i[j];
1020         if (proc == rank) {
1021           /* Get indices from matA and then from matB */
1022 #if defined(PETSC_USE_CTABLE)
1023           PetscInt   tt;
1024 #endif
1025           row    = row - rstart;
1026           nzA    = a_i[row+1] - a_i[row];
1027           nzB    = b_i[row+1] - b_i[row];
1028           cworkA =  a_j + a_i[row];
1029           cworkB = b_j + b_i[row];
1030 
1031           if (!allcolumns[i]) {
1032 #if defined(PETSC_USE_CTABLE)
1033             for (k=0; k<nzA; k++) {
1034               ierr = PetscTableFind(cmap_i,cstart+cworkA[k]+1,&tt);CHKERRQ(ierr);
1035               if (tt) lens_i[j]++;
1036             }
1037             for (k=0; k<nzB; k++) {
1038               ierr = PetscTableFind(cmap_i,bmap[cworkB[k]]+1,&tt);CHKERRQ(ierr);
1039               if (tt) lens_i[j]++;
1040             }
1041 
1042 #else
1043             for (k=0; k<nzA; k++) {
1044               if (cmap_i[cstart + cworkA[k]]) lens_i[j]++;
1045             }
1046             for (k=0; k<nzB; k++) {
1047               if (cmap_i[bmap[cworkB[k]]]) lens_i[j]++;
1048             }
1049 #endif
1050           } else { /* allcolumns */
1051             lens_i[j] = nzA + nzB;
1052           }
1053         }
1054       }
1055     }
1056 
1057     /* Create row map: global row of C -> local row of submatrices */
1058     for (i=0; i<ismax; i++) {
1059       if (!allrows[i]) {
1060 #if defined(PETSC_USE_CTABLE)
1061         ierr   = PetscTableCreate(nrow[i]+1,c->Mbs+1,&rmap[i]);CHKERRQ(ierr);
1062         irow_i = irow[i];
1063         jmax   = nrow[i];
1064         for (j=0; j<jmax; j++) {
1065           if (allrows[i]) {
1066             ierr = PetscTableAdd(rmap[i],j+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1067           } else {
1068             ierr = PetscTableAdd(rmap[i],irow_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1069           }
1070         }
1071 #else
1072         ierr   = PetscCalloc1(c->Mbs,&rmap[i]);CHKERRQ(ierr);
1073         rmap_i = rmap[i];
1074         irow_i = irow[i];
1075         jmax   = nrow[i];
1076         for (j=0; j<jmax; j++) {
1077           if (allrows[i]) rmap_i[j] = j;
1078           else rmap_i[irow_i[j]] = j;
1079         }
1080 #endif
1081       } else rmap[i] = NULL;
1082     }
1083 
1084     /* Update lens from offproc data */
1085     {
1086       PetscInt *rbuf2_i,*rbuf3_i,*sbuf1_i;
1087 
1088       ierr    = MPI_Waitall(nrqs,r_waits3,r_status3);CHKERRQ(ierr);
1089       for (tmp2=0; tmp2<nrqs; tmp2++) {
1090         sbuf1_i = sbuf1[pa[tmp2]];
1091         jmax    = sbuf1_i[0];
1092         ct1     = 2*jmax+1;
1093         ct2     = 0;
1094         rbuf2_i = rbuf2[tmp2];
1095         rbuf3_i = rbuf3[tmp2];
1096         for (j=1; j<=jmax; j++) {
1097           is_no  = sbuf1_i[2*j-1];
1098           max1   = sbuf1_i[2*j];
1099           lens_i = lens[is_no];
1100           if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1101           rmap_i = rmap[is_no];
1102           for (k=0; k<max1; k++,ct1++) {
1103             if (allrows[is_no]) {
1104               row = sbuf1_i[ct1];
1105             } else {
1106 #if defined(PETSC_USE_CTABLE)
1107               ierr = PetscTableFind(rmap_i,sbuf1_i[ct1]+1,&row);CHKERRQ(ierr);
1108               row--;
1109               if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1110 #else
1111               row = rmap_i[sbuf1_i[ct1]]; /* the val in the new matrix to be */
1112 #endif
1113             }
1114             max2 = rbuf2_i[ct1];
1115             for (l=0; l<max2; l++,ct2++) {
1116               if (!allcolumns[is_no]) {
1117 #if defined(PETSC_USE_CTABLE)
1118                 ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
1119 #else
1120                 tcol = cmap_i[rbuf3_i[ct2]];
1121 #endif
1122                 if (tcol) lens_i[row]++;
1123               } else { /* allcolumns */
1124                 lens_i[row]++; /* lens_i[row] += max2 ? */
1125               }
1126             }
1127           }
1128         }
1129       }
1130     }
1131     ierr = PetscFree(r_waits3);CHKERRQ(ierr);
1132     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits3,s_status3);CHKERRQ(ierr);}
1133     ierr = PetscFree2(r_status3,s_status3);CHKERRQ(ierr);
1134     ierr = PetscFree(s_waits3);CHKERRQ(ierr);
1135 
1136     /* Create the submatrices */
1137     for (i=0; i<ismax; i++) {
1138       PetscInt bs_tmp;
1139       if (ijonly) bs_tmp = 1;
1140       else        bs_tmp = bs;
1141 
1142       ierr = MatCreate(PETSC_COMM_SELF,submats+i);CHKERRQ(ierr);
1143       ierr = MatSetSizes(submats[i],nrow[i]*bs_tmp,ncol[i]*bs_tmp,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1144 
1145       ierr = MatSetType(submats[i],((PetscObject)A)->type_name);CHKERRQ(ierr);
1146       ierr = MatSeqBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr);
1147       ierr = MatSeqSBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr); /* this subroutine is used by SBAIJ routines */
1148 
1149       /* create struct Mat_SubSppt and attached it to submat */
1150       ierr = PetscNew(&smat_i);CHKERRQ(ierr);
1151       subc = (Mat_SeqBAIJ*)submats[i]->data;
1152       subc->submatis1 = smat_i;
1153 
1154       smat_i->destroy          = submats[i]->ops->destroy;
1155       submats[i]->ops->destroy = MatDestroy_SeqBAIJ_Submatrices;
1156       submats[i]->factortype   = C->factortype;
1157 
1158       smat_i->id          = i;
1159       smat_i->nrqs        = nrqs;
1160       smat_i->nrqr        = nrqr;
1161       smat_i->rbuf1       = rbuf1;
1162       smat_i->rbuf2       = rbuf2;
1163       smat_i->rbuf3       = rbuf3;
1164       smat_i->sbuf2       = sbuf2;
1165       smat_i->req_source2 = req_source2;
1166 
1167       smat_i->sbuf1       = sbuf1;
1168       smat_i->ptr         = ptr;
1169       smat_i->tmp         = tmp;
1170       smat_i->ctr         = ctr;
1171 
1172       smat_i->pa           = pa;
1173       smat_i->req_size     = req_size;
1174       smat_i->req_source1  = req_source1;
1175 
1176       smat_i->allcolumns  = allcolumns[i];
1177       smat_i->allrows     = allrows[i];
1178       smat_i->singleis    = PETSC_FALSE;
1179       smat_i->row2proc    = row2proc[i];
1180       smat_i->rmap        = rmap[i];
1181       smat_i->cmap        = cmap[i];
1182     }
1183 
1184     if (!ismax) { /* Create dummy submats[0] for reuse struct subc */
1185       ierr = MatCreate(PETSC_COMM_SELF,&submats[0]);CHKERRQ(ierr);
1186       ierr = MatSetSizes(submats[0],0,0,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1187       ierr = MatSetType(submats[0],MATDUMMY);CHKERRQ(ierr);
1188 
1189       /* create struct Mat_SubSppt and attached it to submat */
1190       ierr = PetscNewLog(submats[0],&smat_i);CHKERRQ(ierr);
1191       submats[0]->data = (void*)smat_i;
1192 
1193       smat_i->destroy          = submats[0]->ops->destroy;
1194       submats[0]->ops->destroy = MatDestroy_Dummy_Submatrices;
1195       submats[0]->factortype   = C->factortype;
1196 
1197       smat_i->id          = 0;
1198       smat_i->nrqs        = nrqs;
1199       smat_i->nrqr        = nrqr;
1200       smat_i->rbuf1       = rbuf1;
1201       smat_i->rbuf2       = rbuf2;
1202       smat_i->rbuf3       = rbuf3;
1203       smat_i->sbuf2       = sbuf2;
1204       smat_i->req_source2 = req_source2;
1205 
1206       smat_i->sbuf1       = sbuf1;
1207       smat_i->ptr         = ptr;
1208       smat_i->tmp         = tmp;
1209       smat_i->ctr         = ctr;
1210 
1211       smat_i->pa           = pa;
1212       smat_i->req_size     = req_size;
1213       smat_i->req_source1  = req_source1;
1214 
1215       smat_i->allcolumns  = PETSC_FALSE;
1216       smat_i->singleis    = PETSC_FALSE;
1217       smat_i->row2proc    = NULL;
1218       smat_i->rmap        = NULL;
1219       smat_i->cmap        = NULL;
1220     }
1221 
1222 
1223     if (ismax) {ierr = PetscFree(lens[0]);CHKERRQ(ierr);}
1224     ierr = PetscFree(lens);CHKERRQ(ierr);
1225     ierr = PetscFree(sbuf_aj[0]);CHKERRQ(ierr);
1226     ierr = PetscFree(sbuf_aj);CHKERRQ(ierr);
1227 
1228   } /* endof scall == MAT_INITIAL_MATRIX */
1229 
1230   /* Post recv matrix values */
1231   if (!ijonly) {
1232     ierr = PetscObjectGetNewTag((PetscObject)C,&tag4);CHKERRQ(ierr);
1233     ierr = PetscMalloc1(nrqs+1,&rbuf4);CHKERRQ(ierr);
1234     ierr = PetscMalloc1(nrqs+1,&r_waits4);CHKERRQ(ierr);
1235     ierr = PetscMalloc1(nrqs+1,&r_status4);CHKERRQ(ierr);
1236     ierr = PetscMalloc1(nrqr+1,&s_status4);CHKERRQ(ierr);
1237     for (i=0; i<nrqs; ++i) {
1238       ierr = PetscMalloc1(rbuf2[i][0]*bs2,&rbuf4[i]);CHKERRQ(ierr);
1239       ierr = MPI_Irecv(rbuf4[i],rbuf2[i][0]*bs2,MPIU_SCALAR,req_source2[i],tag4,comm,r_waits4+i);CHKERRQ(ierr);
1240     }
1241 
1242     /* Allocate sending buffers for a->a, and send them off */
1243     ierr = PetscMalloc1(nrqr+1,&sbuf_aa);CHKERRQ(ierr);
1244     for (i=0,j=0; i<nrqr; i++) j += req_size[i];
1245 
1246     ierr = PetscMalloc1((j+1)*bs2,&sbuf_aa[0]);CHKERRQ(ierr);
1247     for (i=1; i<nrqr; i++) sbuf_aa[i] = sbuf_aa[i-1] + req_size[i-1]*bs2;
1248 
1249     ierr = PetscMalloc1(nrqr+1,&s_waits4);CHKERRQ(ierr);
1250 
1251     for (i=0; i<nrqr; i++) {
1252       rbuf1_i   = rbuf1[i];
1253       sbuf_aa_i = sbuf_aa[i];
1254       ct1       = 2*rbuf1_i[0]+1;
1255       ct2       = 0;
1256       for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
1257         kmax = rbuf1_i[2*j];
1258         for (k=0; k<kmax; k++,ct1++) {
1259           row    = rbuf1_i[ct1] - rstart;
1260           nzA    = a_i[row+1] - a_i[row];
1261           nzB    = b_i[row+1] - b_i[row];
1262           ncols  = nzA + nzB;
1263           cworkB = b_j + b_i[row];
1264           vworkA = a_a + a_i[row]*bs2;
1265           vworkB = b_a + b_i[row]*bs2;
1266 
1267           /* load the column values for this row into vals*/
1268           vals = sbuf_aa_i+ct2*bs2;
1269           for (l=0; l<nzB; l++) {
1270             if ((bmap[cworkB[l]]) < cstart) {
1271               ierr = PetscMemcpy(vals+l*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1272             } else break;
1273           }
1274           imark = l;
1275           for (l=0; l<nzA; l++) {
1276             ierr = PetscMemcpy(vals+(imark+l)*bs2,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1277           }
1278           for (l=imark; l<nzB; l++) {
1279             ierr = PetscMemcpy(vals+(nzA+l)*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1280           }
1281 
1282           ct2 += ncols;
1283         }
1284       }
1285       ierr = MPI_Isend(sbuf_aa_i,req_size[i]*bs2,MPIU_SCALAR,req_source1[i],tag4,comm,s_waits4+i);CHKERRQ(ierr);
1286     }
1287   }
1288 
1289   /* Assemble the matrices */
1290   /* First assemble the local rows */
1291   for (i=0; i<ismax; i++) {
1292     row2proc_i = row2proc[i];
1293     subc      = (Mat_SeqBAIJ*)submats[i]->data;
1294     imat_ilen = subc->ilen;
1295     imat_j    = subc->j;
1296     imat_i    = subc->i;
1297     imat_a    = subc->a;
1298 
1299     if (!allcolumns[i]) cmap_i = cmap[i];
1300     rmap_i = rmap[i];
1301     irow_i = irow[i];
1302     jmax   = nrow[i];
1303     for (j=0; j<jmax; j++) {
1304       if (allrows[i]) row = j;
1305       else row  = irow_i[j];
1306       proc = row2proc_i[j];
1307 
1308       if (proc == rank) {
1309 
1310         row    = row - rstart;
1311         nzA    = a_i[row+1] - a_i[row];
1312         nzB    = b_i[row+1] - b_i[row];
1313         cworkA = a_j + a_i[row];
1314         cworkB = b_j + b_i[row];
1315         if (!ijonly) {
1316           vworkA = a_a + a_i[row]*bs2;
1317           vworkB = b_a + b_i[row]*bs2;
1318         }
1319 
1320         if (allrows[i]) {
1321           row = row+rstart;
1322         } else {
1323 #if defined(PETSC_USE_CTABLE)
1324           ierr = PetscTableFind(rmap_i,row+rstart+1,&row);CHKERRQ(ierr);
1325           row--;
1326 
1327           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1328 #else
1329           row = rmap_i[row + rstart];
1330 #endif
1331         }
1332         mat_i = imat_i[row];
1333         if (!ijonly) mat_a = imat_a + mat_i*bs2;
1334         mat_j    = imat_j + mat_i;
1335         ilen = imat_ilen[row];
1336 
1337         /* load the column indices for this row into cols*/
1338         if (!allcolumns[i]) {
1339           for (l=0; l<nzB; l++) {
1340             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1341 #if defined(PETSC_USE_CTABLE)
1342               ierr = PetscTableFind(cmap_i,ctmp+1,&tcol);CHKERRQ(ierr);
1343               if (tcol) {
1344 #else
1345               if ((tcol = cmap_i[ctmp])) {
1346 #endif
1347                 *mat_j++ = tcol - 1;
1348                 ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1349                 mat_a   += bs2;
1350                 ilen++;
1351               }
1352             } else break;
1353           }
1354           imark = l;
1355           for (l=0; l<nzA; l++) {
1356 #if defined(PETSC_USE_CTABLE)
1357             ierr = PetscTableFind(cmap_i,cstart+cworkA[l]+1,&tcol);CHKERRQ(ierr);
1358             if (tcol) {
1359 #else
1360             if ((tcol = cmap_i[cstart + cworkA[l]])) {
1361 #endif
1362               *mat_j++ = tcol - 1;
1363               if (!ijonly) {
1364                 ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1365                 mat_a += bs2;
1366               }
1367               ilen++;
1368             }
1369           }
1370           for (l=imark; l<nzB; l++) {
1371 #if defined(PETSC_USE_CTABLE)
1372             ierr = PetscTableFind(cmap_i,bmap[cworkB[l]]+1,&tcol);CHKERRQ(ierr);
1373             if (tcol) {
1374 #else
1375             if ((tcol = cmap_i[bmap[cworkB[l]]])) {
1376 #endif
1377               *mat_j++ = tcol - 1;
1378               if (!ijonly) {
1379                 ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1380                 mat_a += bs2;
1381               }
1382               ilen++;
1383             }
1384           }
1385         } else { /* allcolumns */
1386           for (l=0; l<nzB; l++) {
1387             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1388               *mat_j++ = ctmp;
1389               ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1390               mat_a   += bs2;
1391               ilen++;
1392             } else break;
1393           }
1394           imark = l;
1395           for (l=0; l<nzA; l++) {
1396             *mat_j++ = cstart+cworkA[l];
1397             if (!ijonly) {
1398               ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1399               mat_a += bs2;
1400             }
1401             ilen++;
1402           }
1403           for (l=imark; l<nzB; l++) {
1404             *mat_j++ = bmap[cworkB[l]];
1405             if (!ijonly) {
1406               ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1407               mat_a += bs2;
1408             }
1409             ilen++;
1410           }
1411         }
1412         imat_ilen[row] = ilen;
1413       }
1414     }
1415   }
1416 
1417   /* Now assemble the off proc rows */
1418   if (!ijonly) {
1419     ierr = MPI_Waitall(nrqs,r_waits4,r_status4);CHKERRQ(ierr);
1420   }
1421   for (tmp2=0; tmp2<nrqs; tmp2++) {
1422     sbuf1_i = sbuf1[pa[tmp2]];
1423     jmax    = sbuf1_i[0];
1424     ct1     = 2*jmax + 1;
1425     ct2     = 0;
1426     rbuf2_i = rbuf2[tmp2];
1427     rbuf3_i = rbuf3[tmp2];
1428     if (!ijonly) rbuf4_i = rbuf4[tmp2];
1429     for (j=1; j<=jmax; j++) {
1430       is_no     = sbuf1_i[2*j-1];
1431       rmap_i    = rmap[is_no];
1432       if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1433       subc      = (Mat_SeqBAIJ*)submats[is_no]->data;
1434       imat_ilen = subc->ilen;
1435       imat_j    = subc->j;
1436       imat_i    = subc->i;
1437       if (!ijonly) imat_a    = subc->a;
1438       max1      = sbuf1_i[2*j];
1439       for (k=0; k<max1; k++,ct1++) { /* for each recved block row */
1440         row = sbuf1_i[ct1];
1441 
1442         if (allrows[is_no]) {
1443           row = sbuf1_i[ct1];
1444         } else {
1445 #if defined(PETSC_USE_CTABLE)
1446           ierr = PetscTableFind(rmap_i,row+1,&row);CHKERRQ(ierr);
1447           row--;
1448           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1449 #else
1450           row = rmap_i[row];
1451 #endif
1452         }
1453         ilen  = imat_ilen[row];
1454         mat_i = imat_i[row];
1455         if (!ijonly) mat_a = imat_a + mat_i*bs2;
1456         mat_j = imat_j + mat_i;
1457         max2  = rbuf2_i[ct1];
1458         if (!allcolumns[is_no]) {
1459           for (l=0; l<max2; l++,ct2++) {
1460 #if defined(PETSC_USE_CTABLE)
1461             ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
1462 #else
1463             tcol = cmap_i[rbuf3_i[ct2]];
1464 #endif
1465             if (tcol) {
1466               *mat_j++ = tcol - 1;
1467               if (!ijonly) {
1468                 ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1469                 mat_a += bs2;
1470               }
1471               ilen++;
1472             }
1473           }
1474         } else { /* allcolumns */
1475           for (l=0; l<max2; l++,ct2++) {
1476             *mat_j++ = rbuf3_i[ct2]; /* same global column index of C */
1477             if (!ijonly) {
1478               ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1479               mat_a += bs2;
1480             }
1481             ilen++;
1482           }
1483         }
1484         imat_ilen[row] = ilen;
1485       }
1486     }
1487   }
1488 
1489   if (!iscsorted) { /* sort column indices of the rows */
1490     MatScalar *work;
1491 
1492     ierr = PetscMalloc1(bs2,&work);CHKERRQ(ierr);
1493     for (i=0; i<ismax; i++) {
1494       subc      = (Mat_SeqBAIJ*)submats[i]->data;
1495       imat_ilen = subc->ilen;
1496       imat_j    = subc->j;
1497       imat_i    = subc->i;
1498       if (!ijonly) imat_a = subc->a;
1499       if (allcolumns[i]) continue;
1500 
1501       jmax = nrow[i];
1502       for (j=0; j<jmax; j++) {
1503         mat_i = imat_i[j];
1504         mat_j = imat_j + mat_i;
1505         ilen  = imat_ilen[j];
1506         if (ijonly) {
1507           ierr = PetscSortInt(ilen,mat_j);CHKERRQ(ierr);
1508         } else {
1509           mat_a = imat_a + mat_i*bs2;
1510           ierr = PetscSortIntWithDataArray(ilen,mat_j,mat_a,bs2*sizeof(MatScalar),work);CHKERRQ(ierr);
1511         }
1512       }
1513     }
1514     ierr = PetscFree(work);CHKERRQ(ierr);
1515   }
1516 
1517   if (!ijonly) {
1518     ierr = PetscFree(r_status4);CHKERRQ(ierr);
1519     ierr = PetscFree(r_waits4);CHKERRQ(ierr);
1520     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits4,s_status4);CHKERRQ(ierr);}
1521     ierr = PetscFree(s_waits4);CHKERRQ(ierr);
1522     ierr = PetscFree(s_status4);CHKERRQ(ierr);
1523   }
1524 
1525   /* Restore the indices */
1526   for (i=0; i<ismax; i++) {
1527     if (!allrows[i]) {
1528       ierr = ISRestoreIndices(isrow[i],irow+i);CHKERRQ(ierr);
1529     }
1530     if (!allcolumns[i]) {
1531       ierr = ISRestoreIndices(iscol[i],icol+i);CHKERRQ(ierr);
1532     }
1533   }
1534 
1535   for (i=0; i<ismax; i++) {
1536     ierr = MatAssemblyBegin(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1537     ierr = MatAssemblyEnd(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1538   }
1539 
1540   /* Destroy allocated memory */
1541   ierr = PetscFree5(irow,icol,nrow,ncol,issorted);CHKERRQ(ierr);
1542   ierr = PetscFree5(row2proc,cmap,rmap,allcolumns,allrows);CHKERRQ(ierr);
1543 
1544   if (!ijonly) {
1545     ierr = PetscFree(sbuf_aa[0]);CHKERRQ(ierr);
1546     ierr = PetscFree(sbuf_aa);CHKERRQ(ierr);
1547 
1548     for (i=0; i<nrqs; ++i) {
1549       ierr = PetscFree(rbuf4[i]);CHKERRQ(ierr);
1550     }
1551     ierr = PetscFree(rbuf4);CHKERRQ(ierr);
1552   }
1553   c->ijonly = PETSC_FALSE; /* set back to the default */
1554   PetscFunctionReturn(0);
1555 }
1556