xref: /petsc/src/mat/impls/aij/mpi/mpiptap.c (revision e5afcf2835ad2df3c79a70d4d9a0fbb86e97247e)
1 
2 /*
3   Defines projective product routines where A is a MPIAIJ matrix
4           C = P^T * A * P
5 */
6 
7 #include <../src/mat/impls/aij/seq/aij.h>   /*I "petscmat.h" I*/
8 #include <../src/mat/utils/freespace.h>
9 #include <../src/mat/impls/aij/mpi/mpiaij.h>
10 #include <petscbt.h>
11 #include <petsctime.h>
12 #include <petsc/private/hashmapiv.h>
13 #include <petsc/private/hashseti.h>
14 #include <petscsf.h>
15 
16 
17 PetscErrorCode MatView_MPIAIJ_PtAP(Mat A,PetscViewer viewer)
18 {
19   PetscErrorCode    ierr;
20   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
21   Mat_APMPI         *ptap=a->ap;
22   PetscBool         iascii;
23   PetscViewerFormat format;
24 
25   PetscFunctionBegin;
26   if (!ptap) {
27     /* hack: MatDuplicate() sets oldmat->ops->view to newmat which is a base mat class with null ptpa! */
28     A->ops->view = MatView_MPIAIJ;
29     ierr = (A->ops->view)(A,viewer);CHKERRQ(ierr);
30     PetscFunctionReturn(0);
31   }
32 
33   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
34   if (iascii) {
35     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
36     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
37       if (ptap->algType == 0) {
38         ierr = PetscViewerASCIIPrintf(viewer,"using scalable MatPtAP() implementation\n");CHKERRQ(ierr);
39       } else if (ptap->algType == 1) {
40         ierr = PetscViewerASCIIPrintf(viewer,"using nonscalable MatPtAP() implementation\n");CHKERRQ(ierr);
41       } else if (ptap->algType == 2) {
42         ierr = PetscViewerASCIIPrintf(viewer,"using allatonce MatPtAP() implementation\n");CHKERRQ(ierr);
43       } else if (ptap->algType == 3) {
44         ierr = PetscViewerASCIIPrintf(viewer,"using merged allatonce MatPtAP() implementation\n");CHKERRQ(ierr);
45       }
46     }
47   }
48   ierr = (ptap->view)(A,viewer);CHKERRQ(ierr);
49   PetscFunctionReturn(0);
50 }
51 
52 PetscErrorCode MatFreeIntermediateDataStructures_MPIAIJ_AP(Mat A)
53 {
54   PetscErrorCode      ierr;
55   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data;
56   Mat_APMPI           *ptap=a->ap;
57   Mat_Merge_SeqsToMPI *merge;
58 
59   PetscFunctionBegin;
60   if (!ptap) PetscFunctionReturn(0);
61 
62   ierr = PetscFree2(ptap->startsj_s,ptap->startsj_r);CHKERRQ(ierr);
63   ierr = PetscFree(ptap->bufa);CHKERRQ(ierr);
64   ierr = MatDestroy(&ptap->P_loc);CHKERRQ(ierr);
65   ierr = MatDestroy(&ptap->P_oth);CHKERRQ(ierr);
66   ierr = MatDestroy(&ptap->A_loc);CHKERRQ(ierr); /* used by MatTransposeMatMult() */
67   ierr = MatDestroy(&ptap->Rd);CHKERRQ(ierr);
68   ierr = MatDestroy(&ptap->Ro);CHKERRQ(ierr);
69   if (ptap->AP_loc) { /* used by alg_rap */
70     Mat_SeqAIJ *ap = (Mat_SeqAIJ*)(ptap->AP_loc)->data;
71     ierr = PetscFree(ap->i);CHKERRQ(ierr);
72     ierr = PetscFree2(ap->j,ap->a);CHKERRQ(ierr);
73     ierr = MatDestroy(&ptap->AP_loc);CHKERRQ(ierr);
74   } else { /* used by alg_ptap */
75     ierr = PetscFree(ptap->api);CHKERRQ(ierr);
76     ierr = PetscFree(ptap->apj);CHKERRQ(ierr);
77   }
78   ierr = MatDestroy(&ptap->C_loc);CHKERRQ(ierr);
79   ierr = MatDestroy(&ptap->C_oth);CHKERRQ(ierr);
80   if (ptap->apa) {ierr = PetscFree(ptap->apa);CHKERRQ(ierr);}
81 
82   ierr = MatDestroy(&ptap->Pt);CHKERRQ(ierr);
83 
84   merge=ptap->merge;
85   if (merge) { /* used by alg_ptap */
86     ierr = PetscFree(merge->id_r);CHKERRQ(ierr);
87     ierr = PetscFree(merge->len_s);CHKERRQ(ierr);
88     ierr = PetscFree(merge->len_r);CHKERRQ(ierr);
89     ierr = PetscFree(merge->bi);CHKERRQ(ierr);
90     ierr = PetscFree(merge->bj);CHKERRQ(ierr);
91     ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr);
92     ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr);
93     ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr);
94     ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr);
95     ierr = PetscFree(merge->coi);CHKERRQ(ierr);
96     ierr = PetscFree(merge->coj);CHKERRQ(ierr);
97     ierr = PetscFree(merge->owners_co);CHKERRQ(ierr);
98     ierr = PetscLayoutDestroy(&merge->rowmap);CHKERRQ(ierr);
99     ierr = PetscFree(ptap->merge);CHKERRQ(ierr);
100   }
101   ierr = ISLocalToGlobalMappingDestroy(&ptap->ltog);CHKERRQ(ierr);
102 
103   ierr = PetscSFDestroy(&ptap->sf);CHKERRQ(ierr);
104   ierr = PetscFree(ptap->c_othi);CHKERRQ(ierr);
105   ierr = PetscFree(ptap->c_rmti);CHKERRQ(ierr);
106   PetscFunctionReturn(0);
107 }
108 
109 PetscErrorCode MatDestroy_MPIAIJ_PtAP(Mat A)
110 {
111   PetscErrorCode ierr;
112   Mat_MPIAIJ     *a=(Mat_MPIAIJ*)A->data;
113   Mat_APMPI      *ptap=a->ap;
114 
115   PetscFunctionBegin;
116   ierr = (*A->ops->freeintermediatedatastructures)(A);CHKERRQ(ierr);
117   ierr = (*ptap->destroy)(A);CHKERRQ(ierr); /* MatDestroy_MPIAIJ(A) */
118   ierr = PetscFree(ptap);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 PETSC_INTERN PetscErrorCode MatPtAP_MPIAIJ_MPIAIJ(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
123 {
124   PetscErrorCode ierr;
125   PetscBool      flg;
126   MPI_Comm       comm;
127 #if !defined(PETSC_HAVE_HYPRE)
128   const char          *algTypes[4] = {"scalable","nonscalable","allatonce","allatonce_merged"};
129   PetscInt            nalg=4;
130 #else
131   const char          *algTypes[5] = {"scalable","nonscalable","allatonce","allatonce_merged","hypre"};
132   PetscInt            nalg=5;
133 #endif
134   PetscInt            pN=P->cmap->N,alg=1; /* set default algorithm */
135 
136   PetscFunctionBegin;
137   /* check if matrix local sizes are compatible */
138   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
139   if (A->rmap->rstart != P->rmap->rstart || A->rmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Arow (%D, %D) != Prow (%D,%D)",A->rmap->rstart,A->rmap->rend,P->rmap->rstart,P->rmap->rend);
140   if (A->cmap->rstart != P->rmap->rstart || A->cmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Acol (%D, %D) != Prow (%D,%D)",A->cmap->rstart,A->cmap->rend,P->rmap->rstart,P->rmap->rend);
141 
142   if (scall == MAT_INITIAL_MATRIX) {
143     /* pick an algorithm */
144     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
145     ierr = PetscOptionsEList("-matptap_via","Algorithmic approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
146     ierr = PetscOptionsEnd();CHKERRQ(ierr);
147 
148     if (!flg && pN > 100000) { /* may switch to scalable algorithm as default */
149       MatInfo     Ainfo,Pinfo;
150       PetscInt    nz_local;
151       PetscBool   alg_scalable_loc=PETSC_FALSE,alg_scalable;
152 
153       ierr = MatGetInfo(A,MAT_LOCAL,&Ainfo);CHKERRQ(ierr);
154       ierr = MatGetInfo(P,MAT_LOCAL,&Pinfo);CHKERRQ(ierr);
155       nz_local = (PetscInt)(Ainfo.nz_allocated + Pinfo.nz_allocated);
156 
157       if (pN > fill*nz_local) alg_scalable_loc = PETSC_TRUE;
158       ierr = MPIU_Allreduce(&alg_scalable_loc,&alg_scalable,1,MPIU_BOOL,MPI_LOR,comm);CHKERRQ(ierr);
159 
160       if (alg_scalable) {
161         alg = 0; /* scalable algorithm would 50% slower than nonscalable algorithm */
162       }
163     }
164 
165     switch (alg) {
166     case 1:
167       /* do R=P^T locally, then C=R*A*P -- nonscalable */
168       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
169       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ(A,P,fill,C);CHKERRQ(ierr);
170       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
171       break;
172     case 2:
173       /* compute C=P^T*A*P allatonce */
174       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
175       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(A,P,fill,C);CHKERRQ(ierr);
176       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
177       break;
178     case 3:
179       /* compute C=P^T*A*P allatonce */
180       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
181       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(A,P,fill,C);CHKERRQ(ierr);
182       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
183       break;
184 #if defined(PETSC_HAVE_HYPRE)
185     case 4:
186       /* Use boomerAMGBuildCoarseOperator */
187       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
188       ierr = MatPtAPSymbolic_AIJ_AIJ_wHYPRE(A,P,fill,C);CHKERRQ(ierr);
189       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
190       break;
191 #endif
192     default:
193       /* do R=P^T locally, then C=R*A*P */
194       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
195       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(A,P,fill,C);CHKERRQ(ierr);
196       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
197       break;
198     }
199 
200     if (alg == 0 || alg == 1 || alg == 2 || alg == 3) {
201       Mat_MPIAIJ *c  = (Mat_MPIAIJ*)(*C)->data;
202       Mat_APMPI  *ap = c->ap;
203       ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)(*C)),((PetscObject)(*C))->prefix,"MatFreeIntermediateDataStructures","Mat");CHKERRQ(ierr);
204       ap->freestruct = PETSC_FALSE;
205       ierr = PetscOptionsBool("-mat_freeintermediatedatastructures","Free intermediate data structures", "MatFreeIntermediateDataStructures",ap->freestruct,&ap->freestruct, NULL);CHKERRQ(ierr);
206       ierr = PetscOptionsEnd();CHKERRQ(ierr);
207     }
208   }
209 
210   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
211   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
212   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
213   PetscFunctionReturn(0);
214 }
215 
216 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,Mat C)
217 {
218   PetscErrorCode    ierr;
219   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
220   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
221   Mat_SeqAIJ        *ap,*p_loc,*p_oth,*c_seq;
222   Mat_APMPI         *ptap = c->ap;
223   Mat               AP_loc,C_loc,C_oth;
224   PetscInt          i,rstart,rend,cm,ncols,row,*api,*apj,am = A->rmap->n,apnz,nout;
225   PetscScalar       *apa;
226   const PetscInt    *cols;
227   const PetscScalar *vals;
228 
229   PetscFunctionBegin;
230   if (!ptap) {
231     MPI_Comm comm;
232     ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
233     SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
234   }
235 
236   ierr = MatZeroEntries(C);CHKERRQ(ierr);
237 
238   /* 1) get R = Pd^T,Ro = Po^T */
239   if (ptap->reuse == MAT_REUSE_MATRIX) {
240     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
241     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
242   }
243 
244   /* 2) get AP_loc */
245   AP_loc = ptap->AP_loc;
246   ap = (Mat_SeqAIJ*)AP_loc->data;
247 
248   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
249   /*-----------------------------------------------------*/
250   if (ptap->reuse == MAT_REUSE_MATRIX) {
251     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
252     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
253     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
254   }
255 
256   /* 2-2) compute numeric A_loc*P - dominating part */
257   /* ---------------------------------------------- */
258   /* get data from symbolic products */
259   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
260   if (ptap->P_oth) p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
261   else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ptap->P_oth is NULL. Cannot proceed!");
262 
263   api   = ap->i;
264   apj   = ap->j;
265   ierr = ISLocalToGlobalMappingApply(ptap->ltog,api[AP_loc->rmap->n],apj,apj);CHKERRQ(ierr);
266   for (i=0; i<am; i++) {
267     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
268     apnz = api[i+1] - api[i];
269     apa = ap->a + api[i];
270     ierr = PetscMemzero(apa,sizeof(PetscScalar)*apnz);CHKERRQ(ierr);
271     AProw_scalable(i,ad,ao,p_loc,p_oth,api,apj,apa);
272     ierr = PetscLogFlops(2.0*apnz);CHKERRQ(ierr);
273   }
274   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,api[AP_loc->rmap->n],apj,&nout,apj);CHKERRQ(ierr);
275   if (api[AP_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",api[AP_loc->rmap->n],nout);
276 
277   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
278   /* Always use scalable version since we are in the MPI scalable version */
279   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
280   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
281 
282   C_loc = ptap->C_loc;
283   C_oth = ptap->C_oth;
284 
285   /* add C_loc and Co to to C */
286   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
287 
288   /* C_loc -> C */
289   cm    = C_loc->rmap->N;
290   c_seq = (Mat_SeqAIJ*)C_loc->data;
291   cols = c_seq->j;
292   vals = c_seq->a;
293   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_loc->rmap->n],c_seq->j,c_seq->j);CHKERRQ(ierr);
294 
295   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
296   /* when there are no off-processor parts.  */
297   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
298   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
299   /* a table, and other, more complex stuff has to be done. */
300   if (C->assembled) {
301     C->was_assembled = PETSC_TRUE;
302     C->assembled     = PETSC_FALSE;
303   }
304   if (C->was_assembled) {
305     for (i=0; i<cm; i++) {
306       ncols = c_seq->i[i+1] - c_seq->i[i];
307       row = rstart + i;
308       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
309       cols += ncols; vals += ncols;
310     }
311   } else {
312     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
313   }
314   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_loc->rmap->n],c_seq->j,&nout,c_seq->j);CHKERRQ(ierr);
315   if (c_seq->i[C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
316 
317   /* Co -> C, off-processor part */
318   cm = C_oth->rmap->N;
319   c_seq = (Mat_SeqAIJ*)C_oth->data;
320   cols = c_seq->j;
321   vals = c_seq->a;
322   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_oth->rmap->n],c_seq->j,c_seq->j);CHKERRQ(ierr);
323   for (i=0; i<cm; i++) {
324     ncols = c_seq->i[i+1] - c_seq->i[i];
325     row = p->garray[i];
326     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
327     cols += ncols; vals += ncols;
328   }
329   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
330   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
331 
332   ptap->reuse = MAT_REUSE_MATRIX;
333 
334   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_oth->rmap->n],c_seq->j,&nout,c_seq->j);CHKERRQ(ierr);
335   if (c_seq->i[C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
336 
337   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
338   if (ptap->freestruct) {
339     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
340   }
341   PetscFunctionReturn(0);
342 }
343 
344 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,PetscReal fill,Mat *C)
345 {
346   PetscErrorCode      ierr;
347   Mat_APMPI           *ptap;
348   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c;
349   MPI_Comm            comm;
350   PetscMPIInt         size,rank;
351   Mat                 Cmpi,P_loc,P_oth;
352   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
353   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
354   PetscInt            *lnk,i,k,pnz,row,nsend;
355   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
356   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
357   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
358   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
359   MPI_Request         *swaits,*rwaits;
360   MPI_Status          *sstatus,rstatus;
361   PetscLayout         rowmap;
362   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
363   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
364   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,Crmax,*aj,*ai,*pi,nout;
365   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
366   PetscScalar         *apv;
367   PetscTable          ta;
368   MatType             mtype;
369   const char          *prefix;
370 #if defined(PETSC_USE_INFO)
371   PetscReal           apfill;
372 #endif
373 
374   PetscFunctionBegin;
375   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
376   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
377   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
378 
379   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
380 
381   /* create symbolic parallel matrix Cmpi */
382   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
383   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
384   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
385 
386   /* create struct Mat_APMPI and attached it to C later */
387   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
388   ptap->reuse = MAT_INITIAL_MATRIX;
389   ptap->algType = 0;
390 
391   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
392   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&P_oth);CHKERRQ(ierr);
393   /* get P_loc by taking all local rows of P */
394   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&P_loc);CHKERRQ(ierr);
395 
396   ptap->P_loc = P_loc;
397   ptap->P_oth = P_oth;
398 
399   /* (0) compute Rd = Pd^T, Ro = Po^T  */
400   /* --------------------------------- */
401   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
402   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
403 
404   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
405   /* ----------------------------------------------------------------- */
406   p_loc  = (Mat_SeqAIJ*)P_loc->data;
407   if (P_oth) p_oth = (Mat_SeqAIJ*)P_oth->data;
408 
409   /* create and initialize a linked list */
410   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
411   MatRowMergeMax_SeqAIJ(p_loc,P_loc->rmap->N,ta);
412   MatRowMergeMax_SeqAIJ(p_oth,P_oth->rmap->N,ta);
413   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
414 
415   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
416 
417   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
418   if (ao) {
419     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
420   } else {
421     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
422   }
423   current_space = free_space;
424   nspacedouble  = 0;
425 
426   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
427   api[0] = 0;
428   for (i=0; i<am; i++) {
429     /* diagonal portion: Ad[i,:]*P */
430     ai = ad->i; pi = p_loc->i;
431     nzi = ai[i+1] - ai[i];
432     aj  = ad->j + ai[i];
433     for (j=0; j<nzi; j++) {
434       row  = aj[j];
435       pnz  = pi[row+1] - pi[row];
436       Jptr = p_loc->j + pi[row];
437       /* add non-zero cols of P into the sorted linked list lnk */
438       ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
439     }
440     /* off-diagonal portion: Ao[i,:]*P */
441     if (ao) {
442       ai = ao->i; pi = p_oth->i;
443       nzi = ai[i+1] - ai[i];
444       aj  = ao->j + ai[i];
445       for (j=0; j<nzi; j++) {
446         row  = aj[j];
447         pnz  = pi[row+1] - pi[row];
448         Jptr = p_oth->j + pi[row];
449         ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
450       }
451     }
452     apnz     = lnk[0];
453     api[i+1] = api[i] + apnz;
454 
455     /* if free space is not available, double the total space in the list */
456     if (current_space->local_remaining<apnz) {
457       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
458       nspacedouble++;
459     }
460 
461     /* Copy data into free space, then initialize lnk */
462     ierr = PetscLLCondensedClean_Scalable(apnz,current_space->array,lnk);CHKERRQ(ierr);
463 
464     current_space->array           += apnz;
465     current_space->local_used      += apnz;
466     current_space->local_remaining -= apnz;
467   }
468   /* Allocate space for apj and apv, initialize apj, and */
469   /* destroy list of free space and other temporary array(s) */
470   ierr = PetscCalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
471   ierr = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
472   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
473 
474   /* Create AP_loc for reuse */
475   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
476   ierr = MatSeqAIJCompactOutExtraColumns_SeqAIJ(ptap->AP_loc, &ptap->ltog);CHKERRQ(ierr);
477 
478 #if defined(PETSC_USE_INFO)
479   if (ao) {
480     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
481   } else {
482     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
483   }
484   ptap->AP_loc->info.mallocs           = nspacedouble;
485   ptap->AP_loc->info.fill_ratio_given  = fill;
486   ptap->AP_loc->info.fill_ratio_needed = apfill;
487 
488   if (api[am]) {
489     ierr = PetscInfo3(ptap->AP_loc,"Scalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
490     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
491   } else {
492     ierr = PetscInfo(ptap->AP_loc,"Scalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
493   }
494 #endif
495 
496   /* (2-1) compute symbolic Co = Ro*AP_loc  */
497   /* ------------------------------------ */
498   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
499   ierr = MatSetOptionsPrefix(ptap->Ro,prefix);CHKERRQ(ierr);
500   ierr = MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");CHKERRQ(ierr);
501   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr);
502 
503   /* (3) send coj of C_oth to other processors  */
504   /* ------------------------------------------ */
505   /* determine row ownership */
506   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
507   rowmap->n  = pn;
508   rowmap->bs = 1;
509   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
510   owners = rowmap->range;
511 
512   /* determine the number of messages to send, their lengths */
513   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
514   ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
515   ierr = PetscMemzero(len_si,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
516 
517   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
518   coi   = c_oth->i; coj = c_oth->j;
519   con   = ptap->C_oth->rmap->n;
520   proc  = 0;
521   ierr = ISLocalToGlobalMappingApply(ptap->ltog,coi[con],coj,coj);CHKERRQ(ierr);
522   for (i=0; i<con; i++) {
523     while (prmap[i] >= owners[proc+1]) proc++;
524     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
525     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
526   }
527 
528   len          = 0; /* max length of buf_si[], see (4) */
529   owners_co[0] = 0;
530   nsend        = 0;
531   for (proc=0; proc<size; proc++) {
532     owners_co[proc+1] = owners_co[proc] + len_si[proc];
533     if (len_s[proc]) {
534       nsend++;
535       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
536       len         += len_si[proc];
537     }
538   }
539 
540   /* determine the number and length of messages to receive for coi and coj  */
541   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
542   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
543 
544   /* post the Irecv and Isend of coj */
545   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
546   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
547   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
548   for (proc=0, k=0; proc<size; proc++) {
549     if (!len_s[proc]) continue;
550     i    = owners_co[proc];
551     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
552     k++;
553   }
554 
555   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
556   /* ---------------------------------------- */
557   ierr = MatSetOptionsPrefix(ptap->Rd,prefix);CHKERRQ(ierr);
558   ierr = MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");CHKERRQ(ierr);
559   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr);
560   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
561   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,c_loc->j);CHKERRQ(ierr);
562 
563   /* receives coj are complete */
564   for (i=0; i<nrecv; i++) {
565     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
566   }
567   ierr = PetscFree(rwaits);CHKERRQ(ierr);
568   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
569 
570   /* add received column indices into ta to update Crmax */
571   for (k=0; k<nrecv; k++) {/* k-th received message */
572     Jptr = buf_rj[k];
573     for (j=0; j<len_r[k]; j++) {
574       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
575     }
576   }
577   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
578   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
579 
580   /* (4) send and recv coi */
581   /*-----------------------*/
582   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
583   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
584   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
585   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
586   for (proc=0,k=0; proc<size; proc++) {
587     if (!len_s[proc]) continue;
588     /* form outgoing message for i-structure:
589          buf_si[0]:                 nrows to be sent
590                [1:nrows]:           row index (global)
591                [nrows+1:2*nrows+1]: i-structure index
592     */
593     /*-------------------------------------------*/
594     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
595     buf_si_i    = buf_si + nrows+1;
596     buf_si[0]   = nrows;
597     buf_si_i[0] = 0;
598     nrows       = 0;
599     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
600       nzi = coi[i+1] - coi[i];
601       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
602       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
603       nrows++;
604     }
605     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
606     k++;
607     buf_si += len_si[proc];
608   }
609   for (i=0; i<nrecv; i++) {
610     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
611   }
612   ierr = PetscFree(rwaits);CHKERRQ(ierr);
613   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
614 
615   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
616   ierr = PetscFree(len_ri);CHKERRQ(ierr);
617   ierr = PetscFree(swaits);CHKERRQ(ierr);
618   ierr = PetscFree(buf_s);CHKERRQ(ierr);
619 
620   /* (5) compute the local portion of Cmpi      */
621   /* ------------------------------------------ */
622   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
623   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
624   current_space = free_space;
625 
626   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
627   for (k=0; k<nrecv; k++) {
628     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
629     nrows       = *buf_ri_k[k];
630     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
631     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
632   }
633 
634   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
635   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
636   for (i=0; i<pn; i++) {
637     /* add C_loc into Cmpi */
638     nzi  = c_loc->i[i+1] - c_loc->i[i];
639     Jptr = c_loc->j + c_loc->i[i];
640     ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
641 
642     /* add received col data into lnk */
643     for (k=0; k<nrecv; k++) { /* k-th received message */
644       if (i == *nextrow[k]) { /* i-th row */
645         nzi  = *(nextci[k]+1) - *nextci[k];
646         Jptr = buf_rj[k] + *nextci[k];
647         ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
648         nextrow[k]++; nextci[k]++;
649       }
650     }
651     nzi = lnk[0];
652 
653     /* copy data into free space, then initialize lnk */
654     ierr = PetscLLCondensedClean_Scalable(nzi,current_space->array,lnk);CHKERRQ(ierr);
655     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
656   }
657   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
658   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
659   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
660 
661   /* local sizes and preallocation */
662   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
663   if (P->cmap->bs > 0) {
664     ierr = PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);CHKERRQ(ierr);
665     ierr = PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);CHKERRQ(ierr);
666   }
667   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
668   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
669 
670   /* members in merge */
671   ierr = PetscFree(id_r);CHKERRQ(ierr);
672   ierr = PetscFree(len_r);CHKERRQ(ierr);
673   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
674   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
675   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
676   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
677   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
678 
679   /* attach the supporting struct to Cmpi for reuse */
680   c = (Mat_MPIAIJ*)Cmpi->data;
681   c->ap           = ptap;
682   ptap->duplicate = Cmpi->ops->duplicate;
683   ptap->destroy   = Cmpi->ops->destroy;
684   ptap->view      = Cmpi->ops->view;
685 
686   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
687   Cmpi->assembled        = PETSC_FALSE;
688   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable;
689   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
690   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
691   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
692   *C                     = Cmpi;
693 
694    nout = 0;
695    ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_oth->i[ptap->C_oth->rmap->n],c_oth->j,&nout,c_oth->j);CHKERRQ(ierr);
696    if (c_oth->i[ptap->C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_oth->i[ptap->C_oth->rmap->n],nout);
697    ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,&nout,c_loc->j);CHKERRQ(ierr);
698    if (c_loc->i[ptap->C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_loc->i[ptap->C_loc->rmap->n],nout);
699 
700   PetscFunctionReturn(0);
701 }
702 
703 PETSC_STATIC_INLINE PetscErrorCode MatPtAPSymbolicComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,PetscInt i,PetscHSetI dht,PetscHSetI oht)
704 {
705   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
706   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
707   PetscInt             *ai,nzi,j,*aj,row,col,*pi,*pj,pnz,nzpi,*p_othcols,k;
708   PetscInt             pcstart,pcend,column;
709   PetscErrorCode       ierr;
710 
711   PetscFunctionBegin;
712 
713   pcstart = P->cmap->rstart;
714   pcend   = P->cmap->rend;
715   /* diagonal portion: Ad[i,:]*P */
716   ai = ad->i;
717   nzi = ai[i+1] - ai[i];
718   aj  = ad->j + ai[i];
719   for (j=0; j<nzi; j++) {
720     row  = aj[j];
721     nzpi = pd->i[row+1] - pd->i[row];
722     pj  = pd->j + pd->i[row];
723     for (k=0; k<nzpi; k++) {
724       ierr = PetscHSetIAdd(dht,pj[k]+pcstart);CHKERRQ(ierr);
725     }
726   }
727   for (j=0; j<nzi; j++) {
728     row  = aj[j];
729     nzpi = po->i[row+1] - po->i[row];
730     pj  = po->j + po->i[row];
731     for (k=0; k<nzpi; k++) {
732       ierr = PetscHSetIAdd(oht,p->garray[pj[k]]);CHKERRQ(ierr);
733     }
734   }
735 
736   /* off diagonal part: Ao[i, :]*P_oth */
737   if (ao) {
738     ai = ao->i;
739     pi = p_oth->i;
740     nzi = ai[i+1] - ai[i];
741     aj  = ao->j + ai[i];
742     for (j=0; j<nzi; j++) {
743       row  = aj[j];
744       pnz  = pi[row+1] - pi[row];
745       p_othcols = p_oth->j + pi[row];
746       for (col=0; col<pnz; col++) {
747         column = p_othcols[col];
748         if (column>=pcstart && column<pcend) {
749           ierr = PetscHSetIAdd(dht,column);CHKERRQ(ierr);
750         } else {
751           ierr = PetscHSetIAdd(oht,column);CHKERRQ(ierr);
752         }
753       }
754     }
755   } /* end if (ao) */
756   PetscFunctionReturn(0);
757 }
758 
759 PETSC_STATIC_INLINE PetscErrorCode MatPtAPNumericComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,PetscInt i,PetscHMapIV hmap)
760 {
761   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
762   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
763   PetscInt             *ai,nzi,j,*aj,row,col,*pi,pnz,*p_othcols,pcstart,*pj,k,nzpi;
764   PetscScalar          ra,*aa,*pa;
765   PetscErrorCode       ierr;
766 
767   PetscFunctionBegin;
768   pcstart = P->cmap->rstart;
769   /* diagonal portion: Ad[i,:]*P */
770   ai  = ad->i;
771   nzi = ai[i+1] - ai[i];
772   aj  = ad->j + ai[i];
773   aa  = ad->a + ai[i];
774 
775   for (j=0; j<nzi; j++) {
776     ra   = aa[j];
777     row  = aj[j];
778     nzpi = pd->i[row+1] - pd->i[row];
779     pj = pd->j + pd->i[row];
780     pa = pd->a + pd->i[row];
781     for (k=0; k<nzpi; k++) {
782       ierr = PetscHMapIVAddValue(hmap,pj[k]+pcstart,ra*pa[k]);CHKERRQ(ierr);
783     }
784   }
785   for (j=0; j<nzi; j++) {
786     ra   = aa[j];
787     row  = aj[j];
788     nzpi = po->i[row+1] - po->i[row];
789     pj = po->j + po->i[row];
790     pa = po->a + po->i[row];
791     for (k=0; k<nzpi; k++) {
792       ierr = PetscHMapIVAddValue(hmap,p->garray[pj[k]],ra*pa[k]);CHKERRQ(ierr);
793     }
794   }
795 
796 
797   /* off diagonal part: Ao[i, :]*P_oth */
798   if (ao) {
799     ai = ao->i;
800     pi = p_oth->i;
801     nzi = ai[i+1] - ai[i];
802     aj  = ao->j + ai[i];
803     aa  = ao->a + ai[i];
804     for (j=0; j<nzi; j++) {
805       row  = aj[j];
806       ra   = aa[j];
807       pnz  = pi[row+1] - pi[row];
808       p_othcols = p_oth->j + pi[row];
809       pa   = p_oth->a + pi[row];
810       for (col=0; col<pnz; col++) {
811         ierr = PetscHMapIVAddValue(hmap,p_othcols[col],ra*pa[col]);CHKERRQ(ierr);
812       }
813     }
814   } /* end if (ao) */
815   PetscFunctionReturn(0);
816 }
817 
818 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,Mat C)
819 {
820   PetscErrorCode    ierr;
821   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
822   Mat_SeqAIJ        *cd,*co,*po,*pd;
823   Mat_APMPI         *ptap = c->ap;
824   PetscHMapIV       hmap;
825   PetscInt          i,j,jj,kk,nzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,ccstart,ccend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,*dcc,*occ,loc;
826   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
827   MPI_Comm          comm;
828 
829   PetscFunctionBegin;
830   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
831   if (!ptap) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
832 
833   ierr = MatZeroEntries(C);CHKERRQ(ierr);
834 
835   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
836   /*-----------------------------------------------------*/
837   if (ptap->reuse == MAT_REUSE_MATRIX) {
838     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
839     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
840   }
841 
842   po = (Mat_SeqAIJ*) p->B->data;
843   pd = (Mat_SeqAIJ*) p->A->data;
844 
845   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
846 
847   ierr = PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);CHKERRQ(ierr);
848   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
849   cmaxr = 0;
850   for (i=0; i<pon; i++) {
851     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
852   }
853   ierr = PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);CHKERRQ(ierr);
854   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
855   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
856   for (i=0; i<am && pon; i++) {
857     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
858     nzi = po->i[i+1] - po->i[i];
859     if (!nzi) continue;
860     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,i,hmap);CHKERRQ(ierr);
861     voff = 0;
862     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
863     if (!voff) continue;
864     /*ierr = PetscMemzero(c_rmtc,sizeof(PetscInt)*pon);CHKERRQ(ierr);*/
865 
866     /* Form C(ii, :) */
867     poj = po->j + po->i[i];
868     poa = po->a + po->i[i];
869     for (j=0; j<nzi; j++) {
870       c_rmtjj = c_rmtj + ptap->c_rmti[poj[j]];
871       c_rmtaa = c_rmta + ptap->c_rmti[poj[j]];
872       for (jj=0; jj<voff; jj++) {
873         apvaluestmp[jj] = apvalues[jj]*poa[j];
874         /*If the row is empty */
875         if (!c_rmtc[poj[j]]) {
876           c_rmtjj[jj] = apindices[jj];
877           c_rmtaa[jj] = apvaluestmp[jj];
878           c_rmtc[poj[j]]++;
879         } else {
880           ierr = PetscFindInt(apindices[jj],c_rmtc[poj[j]],c_rmtjj,&loc);CHKERRQ(ierr);
881           if (loc>=0){ /* hit */
882             c_rmtaa[loc] += apvaluestmp[jj];
883           } else { /* new element */
884             loc = -(loc+1);
885             /* Move data backward */
886             for (kk=c_rmtc[poj[j]]; kk>loc; kk--) {
887               c_rmtjj[kk] = c_rmtjj[kk-1];
888               c_rmtaa[kk] = c_rmtaa[kk-1];
889             }/* End kk */
890             c_rmtjj[loc] = apindices[jj];
891             c_rmtaa[loc] = apvaluestmp[jj];
892             c_rmtc[poj[j]]++;
893           }
894         }
895       } /* End jj */
896     } /* End j */
897   } /* End i */
898 
899   ierr = PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);CHKERRQ(ierr);
900 
901   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
902   ierr = PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);CHKERRQ(ierr);
903 
904   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
905   ierr = PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
906   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
907   cd = (Mat_SeqAIJ*)(c->A)->data;
908   co = (Mat_SeqAIJ*)(c->B)->data;
909 
910   cmaxr = 0;
911   for (i=0; i<pn; i++) {
912     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
913   }
914   ierr = PetscCalloc5(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pn,&dcc,pn,&occ);CHKERRQ(ierr);
915   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
916   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
917   for (i=0; i<am && pn; i++) {
918     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
919     nzi = pd->i[i+1] - pd->i[i];
920     if (!nzi) continue;
921     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,i,hmap);CHKERRQ(ierr);
922     voff = 0;
923     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
924     if (!voff) continue;
925     /* Form C(ii, :) */
926     pdj = pd->j + pd->i[i];
927     pda = pd->a + pd->i[i];
928     for (j=0; j<nzi; j++) {
929       row = pcstart + pdj[j];
930       for (jj=0; jj<voff; jj++) {
931         apvaluestmp[jj] = apvalues[jj]*pda[j];
932       }
933       ierr = MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);CHKERRQ(ierr);
934     }
935   }
936 
937   ierr = MatGetOwnershipRangeColumn(C,&ccstart,&ccend);CHKERRQ(ierr);
938   ierr = PetscFree5(apindices,apvalues,apvaluestmp,dcc,occ);CHKERRQ(ierr);
939   ierr = PetscHMapIVDestroy(&hmap);CHKERRQ(ierr);
940   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPI_SUM);CHKERRQ(ierr);
941   ierr = PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
942   ierr = PetscFree2(c_rmtj,c_rmta);CHKERRQ(ierr);
943 
944   /* Add contributions from remote */
945   for (i = 0; i < pn; i++) {
946     row = i + pcstart;
947     ierr = MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);CHKERRQ(ierr);
948   }
949   ierr = PetscFree2(c_othj,c_otha);CHKERRQ(ierr);
950 
951   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
952   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
953 
954   ptap->reuse = MAT_REUSE_MATRIX;
955 
956   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
957   if (ptap->freestruct) {
958     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
959   }
960   PetscFunctionReturn(0);
961 }
962 
963 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,Mat C)
964 {
965   PetscErrorCode    ierr;
966   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
967   Mat_SeqAIJ        *cd,*co,*po,*pd;
968   Mat_APMPI         *ptap = c->ap;
969   PetscHMapIV       hmap;
970   PetscInt          i,j,jj,kk,nzi,dnzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,loc;
971   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
972   MPI_Comm          comm;
973 
974   PetscFunctionBegin;
975   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
976   if (!ptap) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
977 
978   ierr = MatZeroEntries(C);CHKERRQ(ierr);
979 
980   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
981   /*-----------------------------------------------------*/
982   if (ptap->reuse == MAT_REUSE_MATRIX) {
983     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
984     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
985   }
986 
987   po = (Mat_SeqAIJ*) p->B->data;
988   pd = (Mat_SeqAIJ*) p->A->data;
989 
990   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
991   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
992 
993   ierr = PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);CHKERRQ(ierr);
994   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
995   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
996   cmaxr = 0;
997   for (i=0; i<pon; i++) {
998     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
999   }
1000   cd = (Mat_SeqAIJ*)(c->A)->data;
1001   co = (Mat_SeqAIJ*)(c->B)->data;
1002   for (i=0; i<pn; i++) {
1003     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
1004   }
1005   ierr = PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);CHKERRQ(ierr);
1006   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
1007   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
1008   for (i=0; i<am && (pon || pn); i++) {
1009     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
1010     nzi  = po->i[i+1] - po->i[i];
1011     dnzi = pd->i[i+1] - pd->i[i];
1012     if (!nzi && !dnzi) continue;
1013     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,i,hmap);CHKERRQ(ierr);
1014     voff = 0;
1015     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
1016     if (!voff) continue;
1017 
1018     /* Form remote C(ii, :) */
1019     poj = po->j + po->i[i];
1020     poa = po->a + po->i[i];
1021     for (j=0; j<nzi; j++) {
1022       c_rmtjj = c_rmtj + ptap->c_rmti[poj[j]];
1023       c_rmtaa = c_rmta + ptap->c_rmti[poj[j]];
1024       for (jj=0; jj<voff; jj++) {
1025         apvaluestmp[jj] = apvalues[jj]*poa[j];
1026         /*If the row is empty */
1027         if (!c_rmtc[poj[j]]) {
1028           c_rmtjj[jj] = apindices[jj];
1029           c_rmtaa[jj] = apvaluestmp[jj];
1030           c_rmtc[poj[j]]++;
1031         } else {
1032           ierr = PetscFindInt(apindices[jj],c_rmtc[poj[j]],c_rmtjj,&loc);CHKERRQ(ierr);
1033           if (loc>=0){ /* hit */
1034             c_rmtaa[loc] += apvaluestmp[jj];
1035           } else { /* new element */
1036             loc = -(loc+1);
1037             /* Move data backward */
1038             for (kk=c_rmtc[poj[j]]; kk>loc; kk--) {
1039               c_rmtjj[kk] = c_rmtjj[kk-1];
1040               c_rmtaa[kk] = c_rmtaa[kk-1];
1041             }/* End kk */
1042             c_rmtjj[loc] = apindices[jj];
1043             c_rmtaa[loc] = apvaluestmp[jj];
1044             c_rmtc[poj[j]]++;
1045           }
1046         }
1047       } /* End jj */
1048     } /* End j */
1049 
1050     /* Form local C(ii, :) */
1051     pdj = pd->j + pd->i[i];
1052     pda = pd->a + pd->i[i];
1053     for (j=0; j<dnzi; j++) {
1054       row = pcstart + pdj[j];
1055       for (jj=0; jj<voff; jj++) {
1056         apvaluestmp[jj] = apvalues[jj]*pda[j];
1057       }/* End kk */
1058       ierr = MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);CHKERRQ(ierr);
1059     }/* End j */
1060   } /* End i */
1061 
1062   ierr = PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);CHKERRQ(ierr);
1063   ierr = PetscHMapIVDestroy(&hmap);CHKERRQ(ierr);
1064   ierr = PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);CHKERRQ(ierr);
1065 
1066   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1067   ierr = PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
1068   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPI_SUM);CHKERRQ(ierr);
1069   ierr = PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
1070   ierr = PetscFree2(c_rmtj,c_rmta);CHKERRQ(ierr);
1071 
1072   /* Add contributions from remote */
1073   for (i = 0; i < pn; i++) {
1074     row = i + pcstart;
1075     ierr = MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);CHKERRQ(ierr);
1076   }
1077   ierr = PetscFree2(c_othj,c_otha);CHKERRQ(ierr);
1078 
1079   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1080   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1081 
1082   ptap->reuse = MAT_REUSE_MATRIX;
1083 
1084   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
1085   if (ptap->freestruct) {
1086     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
1087   }
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,PetscReal fill,Mat *C)
1092 {
1093   Mat_APMPI           *ptap;
1094   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data,*c;
1095   MPI_Comm            comm;
1096   Mat                 Cmpi;
1097   Mat_SeqAIJ          *pd,*po;
1098   MatType             mtype;
1099   PetscSF             sf;
1100   PetscSFNode         *iremote;
1101   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1102   const PetscInt      *rootdegrees;
1103   PetscHSetI          ht,oht,*hta,*hto;
1104   PetscInt            pn,pon,*c_rmtc,i,j,nzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1105   PetscInt            owner,lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1106   PetscInt            nalg=2,alg=0;
1107   PetscBool           flg;
1108   const char          *algTypes[2] = {"overlapping","merged"};
1109   PetscErrorCode      ierr;
1110 
1111   PetscFunctionBegin;
1112   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1113 
1114   /* Create symbolic parallel matrix Cmpi */
1115   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1116   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1117   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1118   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1119   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1120 
1121   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1122   ptap->reuse = MAT_INITIAL_MATRIX;
1123   ptap->algType = 2;
1124 
1125   /* Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1126   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1127 
1128   po = (Mat_SeqAIJ*)p->B->data;
1129   pd = (Mat_SeqAIJ*)p->A->data;
1130 
1131   /* This equals to the number of offdiag columns in P */
1132   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1133   /* offsets */
1134   ierr = PetscMalloc1(pon+1,&ptap->c_rmti);CHKERRQ(ierr);
1135   /* The number of columns we will send to remote ranks */
1136   ierr = PetscMalloc1(pon,&c_rmtc);CHKERRQ(ierr);
1137   ierr = PetscMalloc1(pon,&hta);CHKERRQ(ierr);
1138   for (i=0; i<pon; i++) {
1139     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1140   }
1141   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1142   ierr = MatGetOwnershipRange(A,&arstart,&arend);CHKERRQ(ierr);
1143   /* Create hash table to merge all columns for C(i, :) */
1144   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1145 
1146   ptap->c_rmti[0] = 0;
1147   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1148   for (i=0; i<am && pon; i++) {
1149     /* Form one row of AP */
1150     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1151     /* If the off diag is empty, we should not do any calculation */
1152     nzi = po->i[i+1] - po->i[i];
1153     if (!nzi) continue;
1154 
1155     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,i,ht,ht);CHKERRQ(ierr);
1156     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1157     /* If AP is empty, just continue */
1158     if (!htsize) continue;
1159     /* Form C(ii, :) */
1160     poj = po->j + po->i[i];
1161     for (j=0; j<nzi; j++) {
1162       ierr = PetscHSetIUpdate(hta[poj[j]],ht);CHKERRQ(ierr);
1163     }
1164   }
1165 
1166   for (i=0; i<pon; i++) {
1167     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1168     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1169     c_rmtc[i] = htsize;
1170   }
1171 
1172   ierr = PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);CHKERRQ(ierr);
1173 
1174   for (i=0; i<pon; i++) {
1175     off = 0;
1176     ierr = PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);CHKERRQ(ierr);
1177     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1178   }
1179   ierr = PetscFree(hta);CHKERRQ(ierr);
1180 
1181   ierr = PetscCalloc1(pon,&iremote);CHKERRQ(ierr);
1182   for (i=0; i<pon; i++) {
1183     owner = 0; lidx = 0;
1184     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1185     iremote[i].index = lidx;
1186     iremote[i].rank  = owner;
1187   }
1188 
1189   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
1190   ierr = PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1191   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1192   ierr = PetscSFSetRankOrder(sf,PETSC_TRUE);CHKERRQ(ierr);
1193   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
1194   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1195   /* How many neighbors have contributions to my rows? */
1196   ierr = PetscSFComputeDegreeBegin(sf,&rootdegrees);CHKERRQ(ierr);
1197   ierr = PetscSFComputeDegreeEnd(sf,&rootdegrees);CHKERRQ(ierr);
1198   rootspacesize = 0;
1199   for (i = 0; i < pn; i++) {
1200     rootspacesize += rootdegrees[i];
1201   }
1202   ierr = PetscCalloc1(rootspacesize,&rootspace);CHKERRQ(ierr);
1203   ierr = PetscCalloc1(rootspacesize+1,&rootspaceoffsets);CHKERRQ(ierr);
1204   /* Get information from leaves
1205    * Number of columns other people contribute to my rows
1206    * */
1207   ierr = PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1208   ierr = PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1209   ierr = PetscFree(c_rmtc);CHKERRQ(ierr);
1210   ierr = PetscCalloc1(pn+1,&ptap->c_othi);CHKERRQ(ierr);
1211   /* The number of columns is received for each row */
1212   ptap->c_othi[0] = 0;
1213   rootspacesize = 0;
1214   rootspaceoffsets[0] = 0;
1215   for (i = 0; i < pn; i++) {
1216     rcvncols = 0;
1217     for (j = 0; j<rootdegrees[i]; j++) {
1218       rcvncols += rootspace[rootspacesize];
1219       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1220       rootspacesize++;
1221     }
1222     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1223   }
1224   ierr = PetscFree(rootspace);CHKERRQ(ierr);
1225 
1226   ierr = PetscCalloc1(pon,&c_rmtoffsets);CHKERRQ(ierr);
1227   ierr = PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1228   ierr = PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1229   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1230   ierr = PetscFree(rootspaceoffsets);CHKERRQ(ierr);
1231 
1232   ierr = PetscCalloc1(ptap->c_rmti[pon],&iremote);CHKERRQ(ierr);
1233   nleaves = 0;
1234   for (i = 0; i<pon; i++) {
1235     owner = 0; lidx = 0;
1236     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1237     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1238     for (j=0; j<sendncols; j++) {
1239       iremote[nleaves].rank = owner;
1240       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1241     }
1242   }
1243   ierr = PetscFree(c_rmtoffsets);CHKERRQ(ierr);
1244   ierr = PetscCalloc1(ptap->c_othi[pn],&c_othj);CHKERRQ(ierr);
1245 
1246   ierr = PetscSFCreate(comm,&ptap->sf);CHKERRQ(ierr);
1247   ierr = PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1248   ierr = PetscSFSetFromOptions(ptap->sf);CHKERRQ(ierr);
1249   /* One to one map */
1250   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1251 
1252   ierr = PetscCalloc2(pn,&dnz,pn,&onz);CHKERRQ(ierr);
1253   ierr = PetscHSetICreate(&oht);CHKERRQ(ierr);
1254   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1255   ierr = PetscMalloc2(pn,&hta,pn,&hto);CHKERRQ(ierr);
1256   for (i=0; i<pn; i++) {
1257     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1258     ierr = PetscHSetICreate(&hto[i]);CHKERRQ(ierr);
1259   }
1260   /* Work on local part */
1261   /* 4) Pass 1: Estimate memory for C_loc */
1262   for (i=0; i<am && pn; i++) {
1263     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1264     ierr = PetscHSetIClear(oht);CHKERRQ(ierr);
1265     nzi = pd->i[i+1] - pd->i[i];
1266     if (!nzi) continue;
1267 
1268     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,i,ht,oht);CHKERRQ(ierr);
1269     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1270     ierr = PetscHSetIGetSize(oht,&htosize);CHKERRQ(ierr);
1271     if (!(htsize+htosize)) continue;
1272     /* Form C(ii, :) */
1273     pdj = pd->j + pd->i[i];
1274     for (j=0; j<nzi; j++) {
1275       ierr = PetscHSetIUpdate(hta[pdj[j]],ht);CHKERRQ(ierr);
1276       ierr = PetscHSetIUpdate(hto[pdj[j]],oht);CHKERRQ(ierr);
1277     }
1278   }
1279 
1280   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1281   ierr = PetscHSetIDestroy(&oht);CHKERRQ(ierr);
1282 
1283   /* Get remote data */
1284   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1285   ierr = PetscFree(c_rmtj);CHKERRQ(ierr);
1286 
1287   for (i = 0; i < pn; i++) {
1288     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1289     rdj = c_othj + ptap->c_othi[i];
1290     for (j = 0; j < nzi; j++) {
1291       col =  rdj[j];
1292       /* diag part */
1293       if (col>=pcstart && col<pcend) {
1294         ierr = PetscHSetIAdd(hta[i],col);CHKERRQ(ierr);
1295       } else { /* off diag */
1296         ierr = PetscHSetIAdd(hto[i],col);CHKERRQ(ierr);
1297       }
1298     }
1299     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1300     dnz[i] = htsize;
1301     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1302     ierr = PetscHSetIGetSize(hto[i],&htsize);CHKERRQ(ierr);
1303     onz[i] = htsize;
1304     ierr = PetscHSetIDestroy(&hto[i]);CHKERRQ(ierr);
1305   }
1306 
1307   ierr = PetscFree2(hta,hto);CHKERRQ(ierr);
1308   ierr = PetscFree(c_othj);CHKERRQ(ierr);
1309 
1310   /* local sizes and preallocation */
1311   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1312   ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr);
1313   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1314   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
1315 
1316   /* attach the supporting struct to Cmpi for reuse */
1317   c = (Mat_MPIAIJ*)Cmpi->data;
1318   c->ap           = ptap;
1319   ptap->duplicate = Cmpi->ops->duplicate;
1320   ptap->destroy   = Cmpi->ops->destroy;
1321   ptap->view      = Cmpi->ops->view;
1322 
1323   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1324   Cmpi->assembled        = PETSC_FALSE;
1325   /* pick an algorithm */
1326   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
1327   alg = 0;
1328   ierr = PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
1329   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1330   switch (alg) {
1331     case 0:
1332       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1333       break;
1334     case 1:
1335       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1336       break;
1337     default:
1338       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1339   }
1340   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
1341   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
1342   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1343   *C                     = Cmpi;
1344   PetscFunctionReturn(0);
1345 }
1346 
1347 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,PetscReal fill,Mat *C)
1348 {
1349   Mat_APMPI           *ptap;
1350   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data,*c;
1351   MPI_Comm            comm;
1352   Mat                 Cmpi;
1353   Mat_SeqAIJ          *pd,*po;
1354   MatType             mtype;
1355   PetscSF             sf;
1356   PetscSFNode         *iremote;
1357   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1358   const PetscInt      *rootdegrees;
1359   PetscHSetI          ht,oht,*hta,*hto,*htd;
1360   PetscInt            pn,pon,*c_rmtc,i,j,nzi,dnzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1361   PetscInt            owner,lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1362   PetscInt            nalg=2,alg=0;
1363   PetscBool           flg;
1364   const char          *algTypes[2] = {"merged","overlapping"};
1365   PetscErrorCode      ierr;
1366 
1367   PetscFunctionBegin;
1368   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1369 
1370   /* Create symbolic parallel matrix Cmpi */
1371   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1372   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1373   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1374   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1375   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1376 
1377   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1378   ptap->reuse = MAT_INITIAL_MATRIX;
1379   ptap->algType = 3;
1380 
1381   /* 0) Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1382   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1383 
1384   po = (Mat_SeqAIJ*)p->B->data;
1385   pd = (Mat_SeqAIJ*)p->A->data;
1386 
1387   /* This equals to the number of offdiag columns in P */
1388   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1389   /* offsets */
1390   ierr = PetscMalloc1(pon+1,&ptap->c_rmti);CHKERRQ(ierr);
1391   /* The number of columns we will send to remote ranks */
1392   ierr = PetscMalloc1(pon,&c_rmtc);CHKERRQ(ierr);
1393   ierr = PetscMalloc1(pon,&hta);CHKERRQ(ierr);
1394   for (i=0; i<pon; i++) {
1395     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1396   }
1397   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1398   ierr = MatGetOwnershipRange(A,&arstart,&arend);CHKERRQ(ierr);
1399   /* Create hash table to merge all columns for C(i, :) */
1400   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1401   ierr = PetscHSetICreate(&oht);CHKERRQ(ierr);
1402   ierr = PetscMalloc2(pn,&htd,pn,&hto);CHKERRQ(ierr);
1403   for (i=0; i<pn; i++) {
1404     ierr = PetscHSetICreate(&htd[i]);CHKERRQ(ierr);
1405     ierr = PetscHSetICreate(&hto[i]);CHKERRQ(ierr);
1406   }
1407   ptap->c_rmti[0] = 0;
1408   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1409   for (i=0; i<am && (pon || pn); i++) {
1410     /* Form one row of AP */
1411     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1412     ierr = PetscHSetIClear(oht);CHKERRQ(ierr);
1413     /* If the off diag is empty, we should not do any calculation */
1414     nzi = po->i[i+1] - po->i[i];
1415     dnzi = pd->i[i+1] - pd->i[i];
1416     if (!nzi && !dnzi) continue;
1417 
1418     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,i,ht,oht);CHKERRQ(ierr);
1419     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1420     ierr = PetscHSetIGetSize(oht,&htosize);CHKERRQ(ierr);
1421     /* If AP is empty, just continue */
1422     if (!(htsize+htosize)) continue;
1423 
1424     /* Form remote C(ii, :) */
1425     poj = po->j + po->i[i];
1426     for (j=0; j<nzi; j++) {
1427       ierr = PetscHSetIUpdate(hta[poj[j]],ht);CHKERRQ(ierr);
1428       ierr = PetscHSetIUpdate(hta[poj[j]],oht);CHKERRQ(ierr);
1429     }
1430 
1431     /* Form local C(ii, :) */
1432     pdj = pd->j + pd->i[i];
1433     for (j=0; j<dnzi; j++) {
1434       ierr = PetscHSetIUpdate(htd[pdj[j]],ht);CHKERRQ(ierr);
1435       ierr = PetscHSetIUpdate(hto[pdj[j]],oht);CHKERRQ(ierr);
1436     }
1437   }
1438 
1439   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1440   ierr = PetscHSetIDestroy(&oht);CHKERRQ(ierr);
1441 
1442   for (i=0; i<pon; i++) {
1443     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1444     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1445     c_rmtc[i] = htsize;
1446   }
1447 
1448   ierr = PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);CHKERRQ(ierr);
1449 
1450   for (i=0; i<pon; i++) {
1451     off = 0;
1452     ierr = PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);CHKERRQ(ierr);
1453     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1454   }
1455   ierr = PetscFree(hta);CHKERRQ(ierr);
1456 
1457   ierr = PetscCalloc1(pon,&iremote);CHKERRQ(ierr);
1458   for (i=0; i<pon; i++) {
1459     owner = 0; lidx = 0;
1460     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1461     iremote[i].index = lidx;
1462     iremote[i].rank  = owner;
1463   }
1464 
1465   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
1466   ierr = PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1467   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1468   ierr = PetscSFSetRankOrder(sf,PETSC_TRUE);CHKERRQ(ierr);
1469   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
1470   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1471   /* How many neighbors have contributions to my rows? */
1472   ierr = PetscSFComputeDegreeBegin(sf,&rootdegrees);CHKERRQ(ierr);
1473   ierr = PetscSFComputeDegreeEnd(sf,&rootdegrees);CHKERRQ(ierr);
1474   rootspacesize = 0;
1475   for (i = 0; i < pn; i++) {
1476     rootspacesize += rootdegrees[i];
1477   }
1478   ierr = PetscCalloc1(rootspacesize,&rootspace);CHKERRQ(ierr);
1479   ierr = PetscCalloc1(rootspacesize+1,&rootspaceoffsets);CHKERRQ(ierr);
1480   /* Get information from leaves
1481    * Number of columns other people contribute to my rows
1482    * */
1483   ierr = PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1484   ierr = PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1485   ierr = PetscFree(c_rmtc);CHKERRQ(ierr);
1486   ierr = PetscCalloc1(pn+1,&ptap->c_othi);CHKERRQ(ierr);
1487   /* The number of columns is received for each row */
1488   ptap->c_othi[0] = 0;
1489   rootspacesize = 0;
1490   rootspaceoffsets[0] = 0;
1491   for (i = 0; i < pn; i++) {
1492     rcvncols = 0;
1493     for (j = 0; j<rootdegrees[i]; j++) {
1494       rcvncols += rootspace[rootspacesize];
1495       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1496       rootspacesize++;
1497     }
1498     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1499   }
1500   ierr = PetscFree(rootspace);CHKERRQ(ierr);
1501 
1502   ierr = PetscCalloc1(pon,&c_rmtoffsets);CHKERRQ(ierr);
1503   ierr = PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1504   ierr = PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1505   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1506   ierr = PetscFree(rootspaceoffsets);CHKERRQ(ierr);
1507 
1508   ierr = PetscCalloc1(ptap->c_rmti[pon],&iremote);CHKERRQ(ierr);
1509   nleaves = 0;
1510   for (i = 0; i<pon; i++) {
1511     owner = 0; lidx = 0;
1512     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1513     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1514     for (j=0; j<sendncols; j++) {
1515       iremote[nleaves].rank = owner;
1516       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1517     }
1518   }
1519   ierr = PetscFree(c_rmtoffsets);CHKERRQ(ierr);
1520   ierr = PetscCalloc1(ptap->c_othi[pn],&c_othj);CHKERRQ(ierr);
1521 
1522   ierr = PetscSFCreate(comm,&ptap->sf);CHKERRQ(ierr);
1523   ierr = PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1524   ierr = PetscSFSetFromOptions(ptap->sf);CHKERRQ(ierr);
1525   /* One to one map */
1526   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1527   /* Get remote data */
1528   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1529   ierr = PetscFree(c_rmtj);CHKERRQ(ierr);
1530   ierr = PetscCalloc2(pn,&dnz,pn,&onz);CHKERRQ(ierr);
1531   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1532 
1533   for (i = 0; i < pn; i++) {
1534     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1535     rdj = c_othj + ptap->c_othi[i];
1536     for (j = 0; j < nzi; j++) {
1537       col =  rdj[j];
1538       /* diag part */
1539       if (col>=pcstart && col<pcend) {
1540         ierr = PetscHSetIAdd(htd[i],col);CHKERRQ(ierr);
1541       } else { /* off diag */
1542         ierr = PetscHSetIAdd(hto[i],col);CHKERRQ(ierr);
1543       }
1544     }
1545     ierr = PetscHSetIGetSize(htd[i],&htsize);CHKERRQ(ierr);
1546     dnz[i] = htsize;
1547     ierr = PetscHSetIDestroy(&htd[i]);CHKERRQ(ierr);
1548     ierr = PetscHSetIGetSize(hto[i],&htsize);CHKERRQ(ierr);
1549     onz[i] = htsize;
1550     ierr = PetscHSetIDestroy(&hto[i]);CHKERRQ(ierr);
1551   }
1552 
1553   ierr = PetscFree2(htd,hto);CHKERRQ(ierr);
1554   ierr = PetscFree(c_othj);CHKERRQ(ierr);
1555 
1556   /* local sizes and preallocation */
1557   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1558   ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr);
1559   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1560   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
1561 
1562   /* attach the supporting struct to Cmpi for reuse */
1563   c = (Mat_MPIAIJ*)Cmpi->data;
1564   c->ap           = ptap;
1565   ptap->duplicate = Cmpi->ops->duplicate;
1566   ptap->destroy   = Cmpi->ops->destroy;
1567   ptap->view      = Cmpi->ops->view;
1568 
1569   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1570   Cmpi->assembled        = PETSC_FALSE;
1571   /* pick an algorithm */
1572   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
1573   alg = 0;
1574   ierr = PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
1575   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1576   switch (alg) {
1577     case 0:
1578       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1579       break;
1580     case 1:
1581       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1582       break;
1583     default:
1584       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1585   }
1586   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
1587   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
1588   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1589   *C                     = Cmpi;
1590   PetscFunctionReturn(0);
1591 }
1592 
1593 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat P,PetscReal fill,Mat *C)
1594 {
1595   PetscErrorCode      ierr;
1596   Mat_APMPI           *ptap;
1597   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c;
1598   MPI_Comm            comm;
1599   PetscMPIInt         size,rank;
1600   Mat                 Cmpi;
1601   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
1602   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
1603   PetscInt            *lnk,i,k,pnz,row,nsend;
1604   PetscBT             lnkbt;
1605   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
1606   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
1607   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
1608   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
1609   MPI_Request         *swaits,*rwaits;
1610   MPI_Status          *sstatus,rstatus;
1611   PetscLayout         rowmap;
1612   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
1613   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
1614   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,ap_rmax=0,Crmax,*aj,*ai,*pi;
1615   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
1616   PetscScalar         *apv;
1617   PetscTable          ta;
1618   MatType             mtype;
1619   const char          *prefix;
1620 #if defined(PETSC_USE_INFO)
1621   PetscReal           apfill;
1622 #endif
1623 
1624   PetscFunctionBegin;
1625   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1626   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1627   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1628 
1629   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
1630 
1631   /* create symbolic parallel matrix Cmpi */
1632   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1633   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1634   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1635 
1636   /* Do dense axpy in MatPtAPNumeric_MPIAIJ_MPIAIJ() */
1637   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ;
1638 
1639   /* create struct Mat_APMPI and attached it to C later */
1640   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1641   ptap->reuse = MAT_INITIAL_MATRIX;
1642   ptap->algType = 1;
1643 
1644   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1645   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1646   /* get P_loc by taking all local rows of P */
1647   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
1648 
1649   /* (0) compute Rd = Pd^T, Ro = Po^T  */
1650   /* --------------------------------- */
1651   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
1652   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
1653 
1654   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
1655   /* ----------------------------------------------------------------- */
1656   p_loc  = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1657   if (ptap->P_oth) p_oth  = (Mat_SeqAIJ*)(ptap->P_oth)->data;
1658 
1659   /* create and initialize a linked list */
1660   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
1661   MatRowMergeMax_SeqAIJ(p_loc,ptap->P_loc->rmap->N,ta);
1662   MatRowMergeMax_SeqAIJ(p_oth,ptap->P_oth->rmap->N,ta);
1663   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
1664   /* printf("[%d] est %d, Crmax %d; pN %d\n",rank,5*(p_loc->rmax+p_oth->rmax + (PetscInt)(1.e-2*pN)),Crmax,pN); */
1665 
1666   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
1667 
1668   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
1669   if (ao) {
1670     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
1671   } else {
1672     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
1673   }
1674   current_space = free_space;
1675   nspacedouble  = 0;
1676 
1677   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
1678   api[0] = 0;
1679   for (i=0; i<am; i++) {
1680     /* diagonal portion: Ad[i,:]*P */
1681     ai = ad->i; pi = p_loc->i;
1682     nzi = ai[i+1] - ai[i];
1683     aj  = ad->j + ai[i];
1684     for (j=0; j<nzi; j++) {
1685       row  = aj[j];
1686       pnz  = pi[row+1] - pi[row];
1687       Jptr = p_loc->j + pi[row];
1688       /* add non-zero cols of P into the sorted linked list lnk */
1689       ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1690     }
1691     /* off-diagonal portion: Ao[i,:]*P */
1692     if (ao) {
1693       ai = ao->i; pi = p_oth->i;
1694       nzi = ai[i+1] - ai[i];
1695       aj  = ao->j + ai[i];
1696       for (j=0; j<nzi; j++) {
1697         row  = aj[j];
1698         pnz  = pi[row+1] - pi[row];
1699         Jptr = p_oth->j + pi[row];
1700         ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1701       }
1702     }
1703     apnz     = lnk[0];
1704     api[i+1] = api[i] + apnz;
1705     if (ap_rmax < apnz) ap_rmax = apnz;
1706 
1707     /* if free space is not available, double the total space in the list */
1708     if (current_space->local_remaining<apnz) {
1709       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
1710       nspacedouble++;
1711     }
1712 
1713     /* Copy data into free space, then initialize lnk */
1714     ierr = PetscLLCondensedClean(pN,apnz,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
1715 
1716     current_space->array           += apnz;
1717     current_space->local_used      += apnz;
1718     current_space->local_remaining -= apnz;
1719   }
1720   /* Allocate space for apj and apv, initialize apj, and */
1721   /* destroy list of free space and other temporary array(s) */
1722   ierr   = PetscMalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
1723   ierr   = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
1724   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
1725 
1726   /* Create AP_loc for reuse */
1727   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
1728 
1729 #if defined(PETSC_USE_INFO)
1730   if (ao) {
1731     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
1732   } else {
1733     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
1734   }
1735   ptap->AP_loc->info.mallocs           = nspacedouble;
1736   ptap->AP_loc->info.fill_ratio_given  = fill;
1737   ptap->AP_loc->info.fill_ratio_needed = apfill;
1738 
1739   if (api[am]) {
1740     ierr = PetscInfo3(ptap->AP_loc,"Nonscalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
1741     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
1742   } else {
1743     ierr = PetscInfo(ptap->AP_loc,"Nonscalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
1744   }
1745 #endif
1746 
1747   /* (2-1) compute symbolic Co = Ro*AP_loc  */
1748   /* ------------------------------------ */
1749   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1750   ierr = MatSetOptionsPrefix(ptap->Ro,prefix);CHKERRQ(ierr);
1751   ierr = MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");CHKERRQ(ierr);
1752   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr);
1753 
1754   /* (3) send coj of C_oth to other processors  */
1755   /* ------------------------------------------ */
1756   /* determine row ownership */
1757   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
1758   rowmap->n  = pn;
1759   rowmap->bs = 1;
1760   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
1761   owners = rowmap->range;
1762 
1763   /* determine the number of messages to send, their lengths */
1764   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
1765   ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
1766   ierr = PetscMemzero(len_si,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
1767 
1768   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
1769   coi   = c_oth->i; coj = c_oth->j;
1770   con   = ptap->C_oth->rmap->n;
1771   proc  = 0;
1772   for (i=0; i<con; i++) {
1773     while (prmap[i] >= owners[proc+1]) proc++;
1774     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
1775     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
1776   }
1777 
1778   len          = 0; /* max length of buf_si[], see (4) */
1779   owners_co[0] = 0;
1780   nsend        = 0;
1781   for (proc=0; proc<size; proc++) {
1782     owners_co[proc+1] = owners_co[proc] + len_si[proc];
1783     if (len_s[proc]) {
1784       nsend++;
1785       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
1786       len         += len_si[proc];
1787     }
1788   }
1789 
1790   /* determine the number and length of messages to receive for coi and coj  */
1791   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
1792   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
1793 
1794   /* post the Irecv and Isend of coj */
1795   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
1796   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
1797   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
1798   for (proc=0, k=0; proc<size; proc++) {
1799     if (!len_s[proc]) continue;
1800     i    = owners_co[proc];
1801     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
1802     k++;
1803   }
1804 
1805   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
1806   /* ---------------------------------------- */
1807   ierr = MatSetOptionsPrefix(ptap->Rd,prefix);CHKERRQ(ierr);
1808   ierr = MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");CHKERRQ(ierr);
1809   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr);
1810   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
1811 
1812   /* receives coj are complete */
1813   for (i=0; i<nrecv; i++) {
1814     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
1815   }
1816   ierr = PetscFree(rwaits);CHKERRQ(ierr);
1817   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
1818 
1819   /* add received column indices into ta to update Crmax */
1820   for (k=0; k<nrecv; k++) {/* k-th received message */
1821     Jptr = buf_rj[k];
1822     for (j=0; j<len_r[k]; j++) {
1823       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
1824     }
1825   }
1826   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
1827   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
1828 
1829   /* (4) send and recv coi */
1830   /*-----------------------*/
1831   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
1832   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
1833   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
1834   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
1835   for (proc=0,k=0; proc<size; proc++) {
1836     if (!len_s[proc]) continue;
1837     /* form outgoing message for i-structure:
1838          buf_si[0]:                 nrows to be sent
1839                [1:nrows]:           row index (global)
1840                [nrows+1:2*nrows+1]: i-structure index
1841     */
1842     /*-------------------------------------------*/
1843     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
1844     buf_si_i    = buf_si + nrows+1;
1845     buf_si[0]   = nrows;
1846     buf_si_i[0] = 0;
1847     nrows       = 0;
1848     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
1849       nzi = coi[i+1] - coi[i];
1850       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
1851       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
1852       nrows++;
1853     }
1854     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
1855     k++;
1856     buf_si += len_si[proc];
1857   }
1858   for (i=0; i<nrecv; i++) {
1859     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
1860   }
1861   ierr = PetscFree(rwaits);CHKERRQ(ierr);
1862   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
1863 
1864   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
1865   ierr = PetscFree(len_ri);CHKERRQ(ierr);
1866   ierr = PetscFree(swaits);CHKERRQ(ierr);
1867   ierr = PetscFree(buf_s);CHKERRQ(ierr);
1868 
1869   /* (5) compute the local portion of Cmpi      */
1870   /* ------------------------------------------ */
1871   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
1872   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
1873   current_space = free_space;
1874 
1875   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
1876   for (k=0; k<nrecv; k++) {
1877     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
1878     nrows       = *buf_ri_k[k];
1879     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
1880     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
1881   }
1882 
1883   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
1884   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
1885   for (i=0; i<pn; i++) {
1886     /* add C_loc into Cmpi */
1887     nzi  = c_loc->i[i+1] - c_loc->i[i];
1888     Jptr = c_loc->j + c_loc->i[i];
1889     ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1890 
1891     /* add received col data into lnk */
1892     for (k=0; k<nrecv; k++) { /* k-th received message */
1893       if (i == *nextrow[k]) { /* i-th row */
1894         nzi  = *(nextci[k]+1) - *nextci[k];
1895         Jptr = buf_rj[k] + *nextci[k];
1896         ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1897         nextrow[k]++; nextci[k]++;
1898       }
1899     }
1900     nzi = lnk[0];
1901 
1902     /* copy data into free space, then initialize lnk */
1903     ierr = PetscLLCondensedClean(pN,nzi,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
1904     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
1905   }
1906   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
1907   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
1908   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
1909 
1910   /* local sizes and preallocation */
1911   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1912   if (P->cmap->bs > 0) {
1913     ierr = PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);CHKERRQ(ierr);
1914     ierr = PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);CHKERRQ(ierr);
1915   }
1916   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1917   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
1918 
1919   /* members in merge */
1920   ierr = PetscFree(id_r);CHKERRQ(ierr);
1921   ierr = PetscFree(len_r);CHKERRQ(ierr);
1922   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
1923   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
1924   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
1925   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
1926   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
1927 
1928   /* attach the supporting struct to Cmpi for reuse */
1929   c = (Mat_MPIAIJ*)Cmpi->data;
1930   c->ap           = ptap;
1931   ptap->duplicate = Cmpi->ops->duplicate;
1932   ptap->destroy   = Cmpi->ops->destroy;
1933   ptap->view      = Cmpi->ops->view;
1934   ierr = PetscCalloc1(pN,&ptap->apa);CHKERRQ(ierr);
1935 
1936   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1937   Cmpi->assembled        = PETSC_FALSE;
1938   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
1939   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
1940   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1941   *C                     = Cmpi;
1942   PetscFunctionReturn(0);
1943 }
1944 
1945 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat A,Mat P,Mat C)
1946 {
1947   PetscErrorCode    ierr;
1948   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
1949   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
1950   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
1951   Mat_APMPI         *ptap = c->ap;
1952   Mat               AP_loc,C_loc,C_oth;
1953   PetscInt          i,rstart,rend,cm,ncols,row;
1954   PetscInt          *api,*apj,am = A->rmap->n,j,col,apnz;
1955   PetscScalar       *apa;
1956   const PetscInt    *cols;
1957   const PetscScalar *vals;
1958 
1959   PetscFunctionBegin;
1960   if (!ptap) {
1961     MPI_Comm comm;
1962     ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
1963     SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
1964   }
1965 
1966   ierr = MatZeroEntries(C);CHKERRQ(ierr);
1967   /* 1) get R = Pd^T,Ro = Po^T */
1968   if (ptap->reuse == MAT_REUSE_MATRIX) {
1969     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
1970     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
1971   }
1972 
1973   /* 2) get AP_loc */
1974   AP_loc = ptap->AP_loc;
1975   ap = (Mat_SeqAIJ*)AP_loc->data;
1976 
1977   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
1978   /*-----------------------------------------------------*/
1979   if (ptap->reuse == MAT_REUSE_MATRIX) {
1980     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
1981     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1982     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
1983   }
1984 
1985   /* 2-2) compute numeric A_loc*P - dominating part */
1986   /* ---------------------------------------------- */
1987   /* get data from symbolic products */
1988   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1989   if (ptap->P_oth) {
1990     p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
1991   }
1992   apa   = ptap->apa;
1993   api   = ap->i;
1994   apj   = ap->j;
1995   for (i=0; i<am; i++) {
1996     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
1997     AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa);
1998     apnz = api[i+1] - api[i];
1999     for (j=0; j<apnz; j++) {
2000       col = apj[j+api[i]];
2001       ap->a[j+ap->i[i]] = apa[col];
2002       apa[col] = 0.0;
2003     }
2004     ierr = PetscLogFlops(2.0*apnz);CHKERRQ(ierr);
2005   }
2006 
2007   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
2008   ierr = ((ptap->C_loc)->ops->matmultnumeric)(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
2009   ierr = ((ptap->C_oth)->ops->matmultnumeric)(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
2010   C_loc = ptap->C_loc;
2011   C_oth = ptap->C_oth;
2012 
2013   /* add C_loc and Co to to C */
2014   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
2015 
2016   /* C_loc -> C */
2017   cm    = C_loc->rmap->N;
2018   c_seq = (Mat_SeqAIJ*)C_loc->data;
2019   cols = c_seq->j;
2020   vals = c_seq->a;
2021 
2022 
2023   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
2024   /* when there are no off-processor parts.  */
2025   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
2026   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
2027   /* a table, and other, more complex stuff has to be done. */
2028   if (C->assembled) {
2029     C->was_assembled = PETSC_TRUE;
2030     C->assembled     = PETSC_FALSE;
2031   }
2032   if (C->was_assembled) {
2033     for (i=0; i<cm; i++) {
2034       ncols = c_seq->i[i+1] - c_seq->i[i];
2035       row = rstart + i;
2036       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
2037       cols += ncols; vals += ncols;
2038     }
2039   } else {
2040     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
2041   }
2042 
2043   /* Co -> C, off-processor part */
2044   cm = C_oth->rmap->N;
2045   c_seq = (Mat_SeqAIJ*)C_oth->data;
2046   cols = c_seq->j;
2047   vals = c_seq->a;
2048   for (i=0; i<cm; i++) {
2049     ncols = c_seq->i[i+1] - c_seq->i[i];
2050     row = p->garray[i];
2051     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
2052     cols += ncols; vals += ncols;
2053   }
2054 
2055   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2056   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2057 
2058   ptap->reuse = MAT_REUSE_MATRIX;
2059 
2060   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
2061   if (ptap->freestruct) {
2062     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
2063   }
2064   PetscFunctionReturn(0);
2065 }
2066