xref: /petsc/src/mat/impls/aij/mpi/mpiptap.c (revision fccb18fbb4825433b897d8c2cb64f0bcfabccc52)
1 
2 /*
3   Defines projective product routines where A is a MPIAIJ matrix
4           C = P^T * A * P
5 */
6 
7 #include <../src/mat/impls/aij/seq/aij.h>   /*I "petscmat.h" I*/
8 #include <../src/mat/utils/freespace.h>
9 #include <../src/mat/impls/aij/mpi/mpiaij.h>
10 #include <petscbt.h>
11 #include <petsctime.h>
12 #include <petsc/private/hashmapiv.h>
13 #include <petsc/private/hashseti.h>
14 #include <petscsf.h>
15 
16 
17 PetscErrorCode MatView_MPIAIJ_PtAP(Mat A,PetscViewer viewer)
18 {
19   PetscErrorCode    ierr;
20   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
21   Mat_APMPI         *ptap=a->ap;
22   PetscBool         iascii;
23   PetscViewerFormat format;
24 
25   PetscFunctionBegin;
26   if (!ptap) {
27     /* hack: MatDuplicate() sets oldmat->ops->view to newmat which is a base mat class with null ptpa! */
28     A->ops->view = MatView_MPIAIJ;
29     ierr = (A->ops->view)(A,viewer);CHKERRQ(ierr);
30     PetscFunctionReturn(0);
31   }
32 
33   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
34   if (iascii) {
35     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
36     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
37       if (ptap->algType == 0) {
38         ierr = PetscViewerASCIIPrintf(viewer,"using scalable MatPtAP() implementation\n");CHKERRQ(ierr);
39       } else if (ptap->algType == 1) {
40         ierr = PetscViewerASCIIPrintf(viewer,"using nonscalable MatPtAP() implementation\n");CHKERRQ(ierr);
41       } else if (ptap->algType == 2) {
42         ierr = PetscViewerASCIIPrintf(viewer,"using allatonce MatPtAP() implementation\n");CHKERRQ(ierr);
43       } else if (ptap->algType == 3) {
44         ierr = PetscViewerASCIIPrintf(viewer,"using merged allatonce MatPtAP() implementation\n");CHKERRQ(ierr);
45       }
46     }
47   }
48   ierr = (ptap->view)(A,viewer);CHKERRQ(ierr);
49   PetscFunctionReturn(0);
50 }
51 
52 PetscErrorCode MatFreeIntermediateDataStructures_MPIAIJ_AP(Mat A)
53 {
54   PetscErrorCode      ierr;
55   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data;
56   Mat_APMPI           *ptap=a->ap;
57   Mat_Merge_SeqsToMPI *merge;
58 
59   PetscFunctionBegin;
60   if (!ptap) PetscFunctionReturn(0);
61 
62   ierr = PetscFree2(ptap->startsj_s,ptap->startsj_r);CHKERRQ(ierr);
63   ierr = PetscFree(ptap->bufa);CHKERRQ(ierr);
64   ierr = MatDestroy(&ptap->P_loc);CHKERRQ(ierr);
65   ierr = MatDestroy(&ptap->P_oth);CHKERRQ(ierr);
66   ierr = MatDestroy(&ptap->A_loc);CHKERRQ(ierr); /* used by MatTransposeMatMult() */
67   ierr = MatDestroy(&ptap->Rd);CHKERRQ(ierr);
68   ierr = MatDestroy(&ptap->Ro);CHKERRQ(ierr);
69   if (ptap->AP_loc) { /* used by alg_rap */
70     Mat_SeqAIJ *ap = (Mat_SeqAIJ*)(ptap->AP_loc)->data;
71     ierr = PetscFree(ap->i);CHKERRQ(ierr);
72     ierr = PetscFree2(ap->j,ap->a);CHKERRQ(ierr);
73     ierr = MatDestroy(&ptap->AP_loc);CHKERRQ(ierr);
74   } else { /* used by alg_ptap */
75     ierr = PetscFree(ptap->api);CHKERRQ(ierr);
76     ierr = PetscFree(ptap->apj);CHKERRQ(ierr);
77   }
78   ierr = MatDestroy(&ptap->C_loc);CHKERRQ(ierr);
79   ierr = MatDestroy(&ptap->C_oth);CHKERRQ(ierr);
80   if (ptap->apa) {ierr = PetscFree(ptap->apa);CHKERRQ(ierr);}
81 
82   ierr = MatDestroy(&ptap->Pt);CHKERRQ(ierr);
83 
84   merge=ptap->merge;
85   if (merge) { /* used by alg_ptap */
86     ierr = PetscFree(merge->id_r);CHKERRQ(ierr);
87     ierr = PetscFree(merge->len_s);CHKERRQ(ierr);
88     ierr = PetscFree(merge->len_r);CHKERRQ(ierr);
89     ierr = PetscFree(merge->bi);CHKERRQ(ierr);
90     ierr = PetscFree(merge->bj);CHKERRQ(ierr);
91     ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr);
92     ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr);
93     ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr);
94     ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr);
95     ierr = PetscFree(merge->coi);CHKERRQ(ierr);
96     ierr = PetscFree(merge->coj);CHKERRQ(ierr);
97     ierr = PetscFree(merge->owners_co);CHKERRQ(ierr);
98     ierr = PetscLayoutDestroy(&merge->rowmap);CHKERRQ(ierr);
99     ierr = PetscFree(ptap->merge);CHKERRQ(ierr);
100   }
101   ierr = ISLocalToGlobalMappingDestroy(&ptap->ltog);CHKERRQ(ierr);
102 
103   ierr = PetscSFDestroy(&ptap->sf);CHKERRQ(ierr);
104   ierr = PetscFree(ptap->c_othi);CHKERRQ(ierr);
105   ierr = PetscFree(ptap->c_rmti);CHKERRQ(ierr);
106   PetscFunctionReturn(0);
107 }
108 
109 PetscErrorCode MatDestroy_MPIAIJ_PtAP(Mat A)
110 {
111   PetscErrorCode ierr;
112   Mat_MPIAIJ     *a=(Mat_MPIAIJ*)A->data;
113   Mat_APMPI      *ptap=a->ap;
114 
115   PetscFunctionBegin;
116   ierr = (*A->ops->freeintermediatedatastructures)(A);CHKERRQ(ierr);
117   ierr = (*ptap->destroy)(A);CHKERRQ(ierr); /* MatDestroy_MPIAIJ(A) */
118   ierr = PetscFree(ptap);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 PETSC_INTERN PetscErrorCode MatPtAP_MPIAIJ_MPIAIJ(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
123 {
124   PetscErrorCode ierr;
125   PetscBool      flg;
126   MPI_Comm       comm;
127 #if !defined(PETSC_HAVE_HYPRE)
128   const char          *algTypes[4] = {"scalable","nonscalable","allatonce","allatonce_merged"};
129   PetscInt            nalg=4;
130 #else
131   const char          *algTypes[5] = {"scalable","nonscalable","allatonce","allatonce_merged","hypre"};
132   PetscInt            nalg=5;
133 #endif
134   PetscInt            pN=P->cmap->N,alg=1; /* set default algorithm */
135 
136   PetscFunctionBegin;
137   /* check if matrix local sizes are compatible */
138   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
139   if (A->rmap->rstart != P->rmap->rstart || A->rmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Arow (%D, %D) != Prow (%D,%D)",A->rmap->rstart,A->rmap->rend,P->rmap->rstart,P->rmap->rend);
140   if (A->cmap->rstart != P->rmap->rstart || A->cmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Acol (%D, %D) != Prow (%D,%D)",A->cmap->rstart,A->cmap->rend,P->rmap->rstart,P->rmap->rend);
141 
142   if (scall == MAT_INITIAL_MATRIX) {
143     /* pick an algorithm */
144     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
145     ierr = PetscOptionsEList("-matptap_via","Algorithmic approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
146     ierr = PetscOptionsEnd();CHKERRQ(ierr);
147 
148     if (!flg && pN > 100000) { /* may switch to scalable algorithm as default */
149       MatInfo     Ainfo,Pinfo;
150       PetscInt    nz_local;
151       PetscBool   alg_scalable_loc=PETSC_FALSE,alg_scalable;
152 
153       ierr = MatGetInfo(A,MAT_LOCAL,&Ainfo);CHKERRQ(ierr);
154       ierr = MatGetInfo(P,MAT_LOCAL,&Pinfo);CHKERRQ(ierr);
155       nz_local = (PetscInt)(Ainfo.nz_allocated + Pinfo.nz_allocated);
156 
157       if (pN > fill*nz_local) alg_scalable_loc = PETSC_TRUE;
158       ierr = MPIU_Allreduce(&alg_scalable_loc,&alg_scalable,1,MPIU_BOOL,MPI_LOR,comm);CHKERRQ(ierr);
159 
160       if (alg_scalable) {
161         alg = 0; /* scalable algorithm would 50% slower than nonscalable algorithm */
162       }
163     }
164 
165     switch (alg) {
166     case 1:
167       /* do R=P^T locally, then C=R*A*P -- nonscalable */
168       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
169       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ(A,P,fill,C);CHKERRQ(ierr);
170       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
171       break;
172     case 2:
173       /* compute C=P^T*A*P allatonce */
174       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
175       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(A,P,fill,C);CHKERRQ(ierr);
176       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
177       break;
178     case 3:
179       /* compute C=P^T*A*P allatonce */
180       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
181       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(A,P,fill,C);CHKERRQ(ierr);
182       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
183       break;
184 #if defined(PETSC_HAVE_HYPRE)
185     case 4:
186       /* Use boomerAMGBuildCoarseOperator */
187       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
188       ierr = MatPtAPSymbolic_AIJ_AIJ_wHYPRE(A,P,fill,C);CHKERRQ(ierr);
189       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
190       break;
191 #endif
192     default:
193       /* do R=P^T locally, then C=R*A*P */
194       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
195       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(A,P,fill,C);CHKERRQ(ierr);
196       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
197       break;
198     }
199 
200     if (alg == 0 || alg == 1 || alg == 2 || alg == 3) {
201       Mat_MPIAIJ *c  = (Mat_MPIAIJ*)(*C)->data;
202       Mat_APMPI  *ap = c->ap;
203       ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)(*C)),((PetscObject)(*C))->prefix,"MatFreeIntermediateDataStructures","Mat");CHKERRQ(ierr);
204       ap->freestruct = PETSC_FALSE;
205       ierr = PetscOptionsBool("-mat_freeintermediatedatastructures","Free intermediate data structures", "MatFreeIntermediateDataStructures",ap->freestruct,&ap->freestruct, NULL);CHKERRQ(ierr);
206       ierr = PetscOptionsEnd();CHKERRQ(ierr);
207     }
208   }
209 
210   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
211   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
212   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
213   PetscFunctionReturn(0);
214 }
215 
216 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,Mat C)
217 {
218   PetscErrorCode    ierr;
219   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
220   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
221   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
222   Mat_APMPI         *ptap = c->ap;
223   Mat               AP_loc,C_loc,C_oth;
224   PetscInt          i,rstart,rend,cm,ncols,row,*api,*apj,am = A->rmap->n,apnz,nout;
225   PetscScalar       *apa;
226   const PetscInt    *cols;
227   const PetscScalar *vals;
228 
229   PetscFunctionBegin;
230   if (!ptap->AP_loc) {
231     MPI_Comm comm;
232     ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
233     SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
234   }
235 
236   ierr = MatZeroEntries(C);CHKERRQ(ierr);
237 
238   /* 1) get R = Pd^T,Ro = Po^T */
239   if (ptap->reuse == MAT_REUSE_MATRIX) {
240     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
241     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
242   }
243 
244   /* 2) get AP_loc */
245   AP_loc = ptap->AP_loc;
246   ap = (Mat_SeqAIJ*)AP_loc->data;
247 
248   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
249   /*-----------------------------------------------------*/
250   if (ptap->reuse == MAT_REUSE_MATRIX) {
251     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
252     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
253     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
254   }
255 
256   /* 2-2) compute numeric A_loc*P - dominating part */
257   /* ---------------------------------------------- */
258   /* get data from symbolic products */
259   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
260   if (ptap->P_oth) p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
261 
262   api   = ap->i;
263   apj   = ap->j;
264   ierr = ISLocalToGlobalMappingApply(ptap->ltog,api[AP_loc->rmap->n],apj,apj);CHKERRQ(ierr);
265   for (i=0; i<am; i++) {
266     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
267     apnz = api[i+1] - api[i];
268     apa = ap->a + api[i];
269     ierr = PetscMemzero(apa,sizeof(PetscScalar)*apnz);CHKERRQ(ierr);
270     AProw_scalable(i,ad,ao,p_loc,p_oth,api,apj,apa);
271     ierr = PetscLogFlops(2.0*apnz);CHKERRQ(ierr);
272   }
273   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,api[AP_loc->rmap->n],apj,&nout,apj);CHKERRQ(ierr);
274   if (api[AP_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",api[AP_loc->rmap->n],nout);
275 
276   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
277   /* Always use scalable version since we are in the MPI scalable version */
278   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
279   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
280 
281   C_loc = ptap->C_loc;
282   C_oth = ptap->C_oth;
283 
284   /* add C_loc and Co to to C */
285   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
286 
287   /* C_loc -> C */
288   cm    = C_loc->rmap->N;
289   c_seq = (Mat_SeqAIJ*)C_loc->data;
290   cols = c_seq->j;
291   vals = c_seq->a;
292   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_loc->rmap->n],c_seq->j,c_seq->j);CHKERRQ(ierr);
293 
294   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
295   /* when there are no off-processor parts.  */
296   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
297   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
298   /* a table, and other, more complex stuff has to be done. */
299   if (C->assembled) {
300     C->was_assembled = PETSC_TRUE;
301     C->assembled     = PETSC_FALSE;
302   }
303   if (C->was_assembled) {
304     for (i=0; i<cm; i++) {
305       ncols = c_seq->i[i+1] - c_seq->i[i];
306       row = rstart + i;
307       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
308       cols += ncols; vals += ncols;
309     }
310   } else {
311     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
312   }
313   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_loc->rmap->n],c_seq->j,&nout,c_seq->j);CHKERRQ(ierr);
314   if (c_seq->i[C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
315 
316   /* Co -> C, off-processor part */
317   cm = C_oth->rmap->N;
318   c_seq = (Mat_SeqAIJ*)C_oth->data;
319   cols = c_seq->j;
320   vals = c_seq->a;
321   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_oth->rmap->n],c_seq->j,c_seq->j);CHKERRQ(ierr);
322   for (i=0; i<cm; i++) {
323     ncols = c_seq->i[i+1] - c_seq->i[i];
324     row = p->garray[i];
325     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
326     cols += ncols; vals += ncols;
327   }
328   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
329   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
330 
331   ptap->reuse = MAT_REUSE_MATRIX;
332 
333   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_oth->rmap->n],c_seq->j,&nout,c_seq->j);CHKERRQ(ierr);
334   if (c_seq->i[C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
335 
336   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
337   if (ptap->freestruct) {
338     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
339   }
340   PetscFunctionReturn(0);
341 }
342 
343 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,PetscReal fill,Mat *C)
344 {
345   PetscErrorCode      ierr;
346   Mat_APMPI           *ptap;
347   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c;
348   MPI_Comm            comm;
349   PetscMPIInt         size,rank;
350   Mat                 Cmpi,P_loc,P_oth;
351   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
352   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
353   PetscInt            *lnk,i,k,pnz,row,nsend;
354   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
355   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
356   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
357   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
358   MPI_Request         *swaits,*rwaits;
359   MPI_Status          *sstatus,rstatus;
360   PetscLayout         rowmap;
361   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
362   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
363   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,Crmax,*aj,*ai,*pi,nout;
364   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
365   PetscScalar         *apv;
366   PetscTable          ta;
367   MatType             mtype;
368   const char          *prefix;
369 #if defined(PETSC_USE_INFO)
370   PetscReal           apfill;
371 #endif
372 
373   PetscFunctionBegin;
374   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
375   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
376   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
377 
378   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
379 
380   /* create symbolic parallel matrix Cmpi */
381   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
382   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
383   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
384 
385   /* create struct Mat_APMPI and attached it to C later */
386   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
387   ptap->reuse = MAT_INITIAL_MATRIX;
388   ptap->algType = 0;
389 
390   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
391   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&P_oth);CHKERRQ(ierr);
392   /* get P_loc by taking all local rows of P */
393   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&P_loc);CHKERRQ(ierr);
394 
395   ptap->P_loc = P_loc;
396   ptap->P_oth = P_oth;
397 
398   /* (0) compute Rd = Pd^T, Ro = Po^T  */
399   /* --------------------------------- */
400   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
401   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
402 
403   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
404   /* ----------------------------------------------------------------- */
405   p_loc  = (Mat_SeqAIJ*)P_loc->data;
406   if (P_oth) p_oth = (Mat_SeqAIJ*)P_oth->data;
407 
408   /* create and initialize a linked list */
409   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
410   MatRowMergeMax_SeqAIJ(p_loc,P_loc->rmap->N,ta);
411   MatRowMergeMax_SeqAIJ(p_oth,P_oth->rmap->N,ta);
412   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
413 
414   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
415 
416   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
417   if (ao) {
418     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
419   } else {
420     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
421   }
422   current_space = free_space;
423   nspacedouble  = 0;
424 
425   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
426   api[0] = 0;
427   for (i=0; i<am; i++) {
428     /* diagonal portion: Ad[i,:]*P */
429     ai = ad->i; pi = p_loc->i;
430     nzi = ai[i+1] - ai[i];
431     aj  = ad->j + ai[i];
432     for (j=0; j<nzi; j++) {
433       row  = aj[j];
434       pnz  = pi[row+1] - pi[row];
435       Jptr = p_loc->j + pi[row];
436       /* add non-zero cols of P into the sorted linked list lnk */
437       ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
438     }
439     /* off-diagonal portion: Ao[i,:]*P */
440     if (ao) {
441       ai = ao->i; pi = p_oth->i;
442       nzi = ai[i+1] - ai[i];
443       aj  = ao->j + ai[i];
444       for (j=0; j<nzi; j++) {
445         row  = aj[j];
446         pnz  = pi[row+1] - pi[row];
447         Jptr = p_oth->j + pi[row];
448         ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
449       }
450     }
451     apnz     = lnk[0];
452     api[i+1] = api[i] + apnz;
453 
454     /* if free space is not available, double the total space in the list */
455     if (current_space->local_remaining<apnz) {
456       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
457       nspacedouble++;
458     }
459 
460     /* Copy data into free space, then initialize lnk */
461     ierr = PetscLLCondensedClean_Scalable(apnz,current_space->array,lnk);CHKERRQ(ierr);
462 
463     current_space->array           += apnz;
464     current_space->local_used      += apnz;
465     current_space->local_remaining -= apnz;
466   }
467   /* Allocate space for apj and apv, initialize apj, and */
468   /* destroy list of free space and other temporary array(s) */
469   ierr = PetscCalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
470   ierr = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
471   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
472 
473   /* Create AP_loc for reuse */
474   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
475   ierr = MatSeqAIJCompactOutExtraColumns_SeqAIJ(ptap->AP_loc, &ptap->ltog);CHKERRQ(ierr);
476 
477 #if defined(PETSC_USE_INFO)
478   if (ao) {
479     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
480   } else {
481     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
482   }
483   ptap->AP_loc->info.mallocs           = nspacedouble;
484   ptap->AP_loc->info.fill_ratio_given  = fill;
485   ptap->AP_loc->info.fill_ratio_needed = apfill;
486 
487   if (api[am]) {
488     ierr = PetscInfo3(ptap->AP_loc,"Scalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
489     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
490   } else {
491     ierr = PetscInfo(ptap->AP_loc,"Scalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
492   }
493 #endif
494 
495   /* (2-1) compute symbolic Co = Ro*AP_loc  */
496   /* ------------------------------------ */
497   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
498   ierr = MatSetOptionsPrefix(ptap->Ro,prefix);CHKERRQ(ierr);
499   ierr = MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");CHKERRQ(ierr);
500   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr);
501 
502   /* (3) send coj of C_oth to other processors  */
503   /* ------------------------------------------ */
504   /* determine row ownership */
505   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
506   rowmap->n  = pn;
507   rowmap->bs = 1;
508   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
509   owners = rowmap->range;
510 
511   /* determine the number of messages to send, their lengths */
512   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
513   ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
514   ierr = PetscMemzero(len_si,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
515 
516   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
517   coi   = c_oth->i; coj = c_oth->j;
518   con   = ptap->C_oth->rmap->n;
519   proc  = 0;
520   ierr = ISLocalToGlobalMappingApply(ptap->ltog,coi[con],coj,coj);CHKERRQ(ierr);
521   for (i=0; i<con; i++) {
522     while (prmap[i] >= owners[proc+1]) proc++;
523     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
524     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
525   }
526 
527   len          = 0; /* max length of buf_si[], see (4) */
528   owners_co[0] = 0;
529   nsend        = 0;
530   for (proc=0; proc<size; proc++) {
531     owners_co[proc+1] = owners_co[proc] + len_si[proc];
532     if (len_s[proc]) {
533       nsend++;
534       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
535       len         += len_si[proc];
536     }
537   }
538 
539   /* determine the number and length of messages to receive for coi and coj  */
540   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
541   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
542 
543   /* post the Irecv and Isend of coj */
544   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
545   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
546   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
547   for (proc=0, k=0; proc<size; proc++) {
548     if (!len_s[proc]) continue;
549     i    = owners_co[proc];
550     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
551     k++;
552   }
553 
554   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
555   /* ---------------------------------------- */
556   ierr = MatSetOptionsPrefix(ptap->Rd,prefix);CHKERRQ(ierr);
557   ierr = MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");CHKERRQ(ierr);
558   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr);
559   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
560   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,c_loc->j);CHKERRQ(ierr);
561 
562   /* receives coj are complete */
563   for (i=0; i<nrecv; i++) {
564     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
565   }
566   ierr = PetscFree(rwaits);CHKERRQ(ierr);
567   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
568 
569   /* add received column indices into ta to update Crmax */
570   for (k=0; k<nrecv; k++) {/* k-th received message */
571     Jptr = buf_rj[k];
572     for (j=0; j<len_r[k]; j++) {
573       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
574     }
575   }
576   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
577   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
578 
579   /* (4) send and recv coi */
580   /*-----------------------*/
581   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
582   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
583   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
584   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
585   for (proc=0,k=0; proc<size; proc++) {
586     if (!len_s[proc]) continue;
587     /* form outgoing message for i-structure:
588          buf_si[0]:                 nrows to be sent
589                [1:nrows]:           row index (global)
590                [nrows+1:2*nrows+1]: i-structure index
591     */
592     /*-------------------------------------------*/
593     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
594     buf_si_i    = buf_si + nrows+1;
595     buf_si[0]   = nrows;
596     buf_si_i[0] = 0;
597     nrows       = 0;
598     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
599       nzi = coi[i+1] - coi[i];
600       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
601       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
602       nrows++;
603     }
604     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
605     k++;
606     buf_si += len_si[proc];
607   }
608   for (i=0; i<nrecv; i++) {
609     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
610   }
611   ierr = PetscFree(rwaits);CHKERRQ(ierr);
612   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
613 
614   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
615   ierr = PetscFree(len_ri);CHKERRQ(ierr);
616   ierr = PetscFree(swaits);CHKERRQ(ierr);
617   ierr = PetscFree(buf_s);CHKERRQ(ierr);
618 
619   /* (5) compute the local portion of Cmpi      */
620   /* ------------------------------------------ */
621   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
622   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
623   current_space = free_space;
624 
625   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
626   for (k=0; k<nrecv; k++) {
627     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
628     nrows       = *buf_ri_k[k];
629     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
630     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
631   }
632 
633   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
634   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
635   for (i=0; i<pn; i++) {
636     /* add C_loc into Cmpi */
637     nzi  = c_loc->i[i+1] - c_loc->i[i];
638     Jptr = c_loc->j + c_loc->i[i];
639     ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
640 
641     /* add received col data into lnk */
642     for (k=0; k<nrecv; k++) { /* k-th received message */
643       if (i == *nextrow[k]) { /* i-th row */
644         nzi  = *(nextci[k]+1) - *nextci[k];
645         Jptr = buf_rj[k] + *nextci[k];
646         ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
647         nextrow[k]++; nextci[k]++;
648       }
649     }
650     nzi = lnk[0];
651 
652     /* copy data into free space, then initialize lnk */
653     ierr = PetscLLCondensedClean_Scalable(nzi,current_space->array,lnk);CHKERRQ(ierr);
654     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
655   }
656   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
657   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
658   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
659 
660   /* local sizes and preallocation */
661   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
662   if (P->cmap->bs > 0) {
663     ierr = PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);CHKERRQ(ierr);
664     ierr = PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);CHKERRQ(ierr);
665   }
666   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
667   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
668 
669   /* members in merge */
670   ierr = PetscFree(id_r);CHKERRQ(ierr);
671   ierr = PetscFree(len_r);CHKERRQ(ierr);
672   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
673   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
674   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
675   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
676   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
677 
678   /* attach the supporting struct to Cmpi for reuse */
679   c = (Mat_MPIAIJ*)Cmpi->data;
680   c->ap           = ptap;
681   ptap->duplicate = Cmpi->ops->duplicate;
682   ptap->destroy   = Cmpi->ops->destroy;
683   ptap->view      = Cmpi->ops->view;
684 
685   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
686   Cmpi->assembled        = PETSC_FALSE;
687   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable;
688   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
689   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
690   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
691   *C                     = Cmpi;
692 
693    nout = 0;
694    ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_oth->i[ptap->C_oth->rmap->n],c_oth->j,&nout,c_oth->j);CHKERRQ(ierr);
695    if (c_oth->i[ptap->C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_oth->i[ptap->C_oth->rmap->n],nout);
696    ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,&nout,c_loc->j);CHKERRQ(ierr);
697    if (c_loc->i[ptap->C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_loc->i[ptap->C_loc->rmap->n],nout);
698 
699   PetscFunctionReturn(0);
700 }
701 
702 PETSC_STATIC_INLINE PetscErrorCode MatPtAPSymbolicComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,PetscInt i,PetscHSetI dht,PetscHSetI oht)
703 {
704   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
705   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
706   PetscInt             *ai,nzi,j,*aj,row,col,*pi,*pj,pnz,nzpi,*p_othcols,k;
707   PetscInt             pcstart,pcend,column;
708   PetscErrorCode       ierr;
709 
710   PetscFunctionBegin;
711 
712   pcstart = P->cmap->rstart;
713   pcend   = P->cmap->rend;
714   /* diagonal portion: Ad[i,:]*P */
715   ai = ad->i;
716   nzi = ai[i+1] - ai[i];
717   aj  = ad->j + ai[i];
718   for (j=0; j<nzi; j++) {
719     row  = aj[j];
720     nzpi = pd->i[row+1] - pd->i[row];
721     pj  = pd->j + pd->i[row];
722     for (k=0; k<nzpi; k++) {
723       ierr = PetscHSetIAdd(dht,pj[k]+pcstart);CHKERRQ(ierr);
724     }
725   }
726   for (j=0; j<nzi; j++) {
727     row  = aj[j];
728     nzpi = po->i[row+1] - po->i[row];
729     pj  = po->j + po->i[row];
730     for (k=0; k<nzpi; k++) {
731       ierr = PetscHSetIAdd(oht,p->garray[pj[k]]);CHKERRQ(ierr);
732     }
733   }
734 
735   /* off diagonal part: Ao[i, :]*P_oth */
736   if (ao) {
737     ai = ao->i;
738     pi = p_oth->i;
739     nzi = ai[i+1] - ai[i];
740     aj  = ao->j + ai[i];
741     for (j=0; j<nzi; j++) {
742       row  = aj[j];
743       pnz  = pi[row+1] - pi[row];
744       p_othcols = p_oth->j + pi[row];
745       for (col=0; col<pnz; col++) {
746         column = p_othcols[col];
747         if (column>=pcstart && column<pcend) {
748           ierr = PetscHSetIAdd(dht,column);CHKERRQ(ierr);
749         } else {
750           ierr = PetscHSetIAdd(oht,column);CHKERRQ(ierr);
751         }
752       }
753     }
754   } /* end if (ao) */
755   PetscFunctionReturn(0);
756 }
757 
758 PETSC_STATIC_INLINE PetscErrorCode MatPtAPNumericComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,PetscInt i,PetscHMapIV hmap)
759 {
760   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
761   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
762   PetscInt             *ai,nzi,j,*aj,row,col,*pi,pnz,*p_othcols,pcstart,*pj,k,nzpi;
763   PetscScalar          ra,*aa,*pa;
764   PetscLogDouble       flops=0.0;
765   PetscErrorCode       ierr;
766 
767   PetscFunctionBegin;
768   pcstart = P->cmap->rstart;
769   /* diagonal portion: Ad[i,:]*P */
770   ai  = ad->i;
771   nzi = ai[i+1] - ai[i];
772   aj  = ad->j + ai[i];
773   aa  = ad->a + ai[i];
774 
775   for (j=0; j<nzi; j++) {
776     ra   = aa[j];
777     row  = aj[j];
778     nzpi = pd->i[row+1] - pd->i[row];
779     pj = pd->j + pd->i[row];
780     pa = pd->a + pd->i[row];
781     for (k=0; k<nzpi; k++) {
782       ierr = PetscHMapIVAddValue(hmap,pj[k]+pcstart,ra*pa[k]);CHKERRQ(ierr);
783     }
784     flops += nzpi;
785   }
786   for (j=0; j<nzi; j++) {
787     ra   = aa[j];
788     row  = aj[j];
789     nzpi = po->i[row+1] - po->i[row];
790     pj = po->j + po->i[row];
791     pa = po->a + po->i[row];
792     for (k=0; k<nzpi; k++) {
793       ierr = PetscHMapIVAddValue(hmap,p->garray[pj[k]],ra*pa[k]);CHKERRQ(ierr);
794     }
795     flops += nzpi;
796   }
797 
798 
799   /* off diagonal part: Ao[i, :]*P_oth */
800   if (ao) {
801     ai = ao->i;
802     pi = p_oth->i;
803     nzi = ai[i+1] - ai[i];
804     aj  = ao->j + ai[i];
805     aa  = ao->a + ai[i];
806     for (j=0; j<nzi; j++) {
807       row  = aj[j];
808       ra   = aa[j];
809       pnz  = pi[row+1] - pi[row];
810       p_othcols = p_oth->j + pi[row];
811       pa   = p_oth->a + pi[row];
812       for (col=0; col<pnz; col++) {
813         ierr = PetscHMapIVAddValue(hmap,p_othcols[col],ra*pa[col]);CHKERRQ(ierr);
814       }
815       flops += pnz;
816     }
817   } /* end if (ao) */
818 
819   ierr = PetscLogFlops(flops);CHKERRQ(ierr);
820   PetscFunctionReturn(0);
821 }
822 
823 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,Mat C)
824 {
825   PetscErrorCode    ierr;
826   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
827   Mat_SeqAIJ        *cd,*co,*po,*pd;
828   Mat_APMPI         *ptap = c->ap;
829   PetscHMapIV       hmap;
830   PetscInt          i,j,jj,kk,nzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,ccstart,ccend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,*dcc,*occ,loc;
831   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
832   PetscLogDouble    flops=0.0;
833   MPI_Comm          comm;
834 
835   PetscFunctionBegin;
836   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
837   if (!ptap->P_oth) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
838 
839   ierr = MatZeroEntries(C);CHKERRQ(ierr);
840 
841   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
842   /*-----------------------------------------------------*/
843   if (ptap->reuse == MAT_REUSE_MATRIX) {
844     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
845     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
846   }
847 
848   po = (Mat_SeqAIJ*) p->B->data;
849   pd = (Mat_SeqAIJ*) p->A->data;
850 
851   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
852 
853   ierr = PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);CHKERRQ(ierr);
854   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
855   cmaxr = 0;
856   for (i=0; i<pon; i++) {
857     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
858   }
859   ierr = PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);CHKERRQ(ierr);
860   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
861   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
862   for (i=0; i<am && pon; i++) {
863     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
864     nzi = po->i[i+1] - po->i[i];
865     if (!nzi) continue;
866     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,i,hmap);CHKERRQ(ierr);
867     voff = 0;
868     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
869     if (!voff) continue;
870     /*ierr = PetscMemzero(c_rmtc,sizeof(PetscInt)*pon);CHKERRQ(ierr);*/
871 
872     /* Form C(ii, :) */
873     poj = po->j + po->i[i];
874     poa = po->a + po->i[i];
875     for (j=0; j<nzi; j++) {
876       c_rmtjj = c_rmtj + ptap->c_rmti[poj[j]];
877       c_rmtaa = c_rmta + ptap->c_rmti[poj[j]];
878       for (jj=0; jj<voff; jj++) {
879         apvaluestmp[jj] = apvalues[jj]*poa[j];
880         /*If the row is empty */
881         if (!c_rmtc[poj[j]]) {
882           c_rmtjj[jj] = apindices[jj];
883           c_rmtaa[jj] = apvaluestmp[jj];
884           c_rmtc[poj[j]]++;
885         } else {
886           ierr = PetscFindInt(apindices[jj],c_rmtc[poj[j]],c_rmtjj,&loc);CHKERRQ(ierr);
887           if (loc>=0){ /* hit */
888             c_rmtaa[loc] += apvaluestmp[jj];
889           } else { /* new element */
890             loc = -(loc+1);
891             /* Move data backward */
892             for (kk=c_rmtc[poj[j]]; kk>loc; kk--) {
893               c_rmtjj[kk] = c_rmtjj[kk-1];
894               c_rmtaa[kk] = c_rmtaa[kk-1];
895             }/* End kk */
896             c_rmtjj[loc] = apindices[jj];
897             c_rmtaa[loc] = apvaluestmp[jj];
898             c_rmtc[poj[j]]++;
899           }
900         }
901         flops += voff;
902       } /* End jj */
903     } /* End j */
904   } /* End i */
905 
906   ierr = PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);CHKERRQ(ierr);
907 
908   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
909   ierr = PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);CHKERRQ(ierr);
910 
911   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
912   ierr = PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
913   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
914   cd = (Mat_SeqAIJ*)(c->A)->data;
915   co = (Mat_SeqAIJ*)(c->B)->data;
916 
917   cmaxr = 0;
918   for (i=0; i<pn; i++) {
919     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
920   }
921   ierr = PetscCalloc5(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pn,&dcc,pn,&occ);CHKERRQ(ierr);
922   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
923   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
924   for (i=0; i<am && pn; i++) {
925     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
926     nzi = pd->i[i+1] - pd->i[i];
927     if (!nzi) continue;
928     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,i,hmap);CHKERRQ(ierr);
929     voff = 0;
930     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
931     if (!voff) continue;
932     /* Form C(ii, :) */
933     pdj = pd->j + pd->i[i];
934     pda = pd->a + pd->i[i];
935     for (j=0; j<nzi; j++) {
936       row = pcstart + pdj[j];
937       for (jj=0; jj<voff; jj++) {
938         apvaluestmp[jj] = apvalues[jj]*pda[j];
939       }
940       ierr = MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);CHKERRQ(ierr);
941       flops += voff;
942     }
943   }
944 
945   ierr = MatGetOwnershipRangeColumn(C,&ccstart,&ccend);CHKERRQ(ierr);
946   ierr = PetscFree5(apindices,apvalues,apvaluestmp,dcc,occ);CHKERRQ(ierr);
947   ierr = PetscHMapIVDestroy(&hmap);CHKERRQ(ierr);
948   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPI_SUM);CHKERRQ(ierr);
949   ierr = PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
950   ierr = PetscFree2(c_rmtj,c_rmta);CHKERRQ(ierr);
951 
952   /* Add contributions from remote */
953   for (i = 0; i < pn; i++) {
954     row = i + pcstart;
955     ierr = MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);CHKERRQ(ierr);
956   }
957   ierr = PetscFree2(c_othj,c_otha);CHKERRQ(ierr);
958 
959   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
960   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
961   ierr = PetscLogFlops(flops);CHKERRQ(ierr);
962 
963   ptap->reuse = MAT_REUSE_MATRIX;
964 
965   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
966   if (ptap->freestruct) {
967     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
968   }
969   PetscFunctionReturn(0);
970 }
971 
972 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,Mat C)
973 {
974   PetscErrorCode    ierr;
975   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
976   Mat_SeqAIJ        *cd,*co,*po,*pd;
977   Mat_APMPI         *ptap = c->ap;
978   PetscHMapIV       hmap;
979   PetscInt          i,j,jj,kk,nzi,dnzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,loc;
980   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
981   PetscLogDouble    flops=0.0;
982   MPI_Comm          comm;
983 
984   PetscFunctionBegin;
985   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
986   if (!ptap->P_oth) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
987 
988   ierr = MatZeroEntries(C);CHKERRQ(ierr);
989 
990   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
991   /*-----------------------------------------------------*/
992   if (ptap->reuse == MAT_REUSE_MATRIX) {
993     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
994     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
995   }
996 
997   po = (Mat_SeqAIJ*) p->B->data;
998   pd = (Mat_SeqAIJ*) p->A->data;
999 
1000   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1001   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1002 
1003   ierr = PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);CHKERRQ(ierr);
1004   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1005   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1006   cmaxr = 0;
1007   for (i=0; i<pon; i++) {
1008     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
1009   }
1010   cd = (Mat_SeqAIJ*)(c->A)->data;
1011   co = (Mat_SeqAIJ*)(c->B)->data;
1012   for (i=0; i<pn; i++) {
1013     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
1014   }
1015   ierr = PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);CHKERRQ(ierr);
1016   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
1017   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
1018   for (i=0; i<am && (pon || pn); i++) {
1019     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
1020     nzi  = po->i[i+1] - po->i[i];
1021     dnzi = pd->i[i+1] - pd->i[i];
1022     if (!nzi && !dnzi) continue;
1023     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,i,hmap);CHKERRQ(ierr);
1024     voff = 0;
1025     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
1026     if (!voff) continue;
1027 
1028     /* Form remote C(ii, :) */
1029     poj = po->j + po->i[i];
1030     poa = po->a + po->i[i];
1031     for (j=0; j<nzi; j++) {
1032       c_rmtjj = c_rmtj + ptap->c_rmti[poj[j]];
1033       c_rmtaa = c_rmta + ptap->c_rmti[poj[j]];
1034       for (jj=0; jj<voff; jj++) {
1035         apvaluestmp[jj] = apvalues[jj]*poa[j];
1036         /*If the row is empty */
1037         if (!c_rmtc[poj[j]]) {
1038           c_rmtjj[jj] = apindices[jj];
1039           c_rmtaa[jj] = apvaluestmp[jj];
1040           c_rmtc[poj[j]]++;
1041         } else {
1042           ierr = PetscFindInt(apindices[jj],c_rmtc[poj[j]],c_rmtjj,&loc);CHKERRQ(ierr);
1043           if (loc>=0){ /* hit */
1044             c_rmtaa[loc] += apvaluestmp[jj];
1045           } else { /* new element */
1046             loc = -(loc+1);
1047             /* Move data backward */
1048             for (kk=c_rmtc[poj[j]]; kk>loc; kk--) {
1049               c_rmtjj[kk] = c_rmtjj[kk-1];
1050               c_rmtaa[kk] = c_rmtaa[kk-1];
1051             }/* End kk */
1052             c_rmtjj[loc] = apindices[jj];
1053             c_rmtaa[loc] = apvaluestmp[jj];
1054             c_rmtc[poj[j]]++;
1055           }
1056         }
1057       } /* End jj */
1058       flops += voff;
1059     } /* End j */
1060 
1061     /* Form local C(ii, :) */
1062     pdj = pd->j + pd->i[i];
1063     pda = pd->a + pd->i[i];
1064     for (j=0; j<dnzi; j++) {
1065       row = pcstart + pdj[j];
1066       for (jj=0; jj<voff; jj++) {
1067         apvaluestmp[jj] = apvalues[jj]*pda[j];
1068       }/* End kk */
1069       ierr = MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);CHKERRQ(ierr);
1070       flops += voff;
1071     }/* End j */
1072   } /* End i */
1073 
1074   ierr = PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);CHKERRQ(ierr);
1075   ierr = PetscHMapIVDestroy(&hmap);CHKERRQ(ierr);
1076   ierr = PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);CHKERRQ(ierr);
1077 
1078   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1079   ierr = PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
1080   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPI_SUM);CHKERRQ(ierr);
1081   ierr = PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
1082   ierr = PetscFree2(c_rmtj,c_rmta);CHKERRQ(ierr);
1083 
1084   /* Add contributions from remote */
1085   for (i = 0; i < pn; i++) {
1086     row = i + pcstart;
1087     ierr = MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);CHKERRQ(ierr);
1088   }
1089   ierr = PetscFree2(c_othj,c_otha);CHKERRQ(ierr);
1090 
1091   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1092   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1093   ierr = PetscLogFlops(flops);CHKERRQ(ierr);
1094 
1095   ptap->reuse = MAT_REUSE_MATRIX;
1096 
1097   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
1098   if (ptap->freestruct) {
1099     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
1100   }
1101   PetscFunctionReturn(0);
1102 }
1103 
1104 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,PetscReal fill,Mat *C)
1105 {
1106   Mat_APMPI           *ptap;
1107   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data,*c;
1108   MPI_Comm            comm;
1109   Mat                 Cmpi;
1110   Mat_SeqAIJ          *pd,*po;
1111   MatType             mtype;
1112   PetscSF             sf;
1113   PetscSFNode         *iremote;
1114   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1115   const PetscInt      *rootdegrees;
1116   PetscHSetI          ht,oht,*hta,*hto;
1117   PetscInt            pn,pon,*c_rmtc,i,j,nzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1118   PetscInt            owner,lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1119   PetscInt            nalg=2,alg=0;
1120   PetscBool           flg;
1121   const char          *algTypes[2] = {"overlapping","merged"};
1122   PetscErrorCode      ierr;
1123 
1124   PetscFunctionBegin;
1125   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1126 
1127   /* Create symbolic parallel matrix Cmpi */
1128   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1129   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1130   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1131   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1132   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1133 
1134   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1135   ptap->reuse = MAT_INITIAL_MATRIX;
1136   ptap->algType = 2;
1137 
1138   /* Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1139   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1140 
1141   po = (Mat_SeqAIJ*)p->B->data;
1142   pd = (Mat_SeqAIJ*)p->A->data;
1143 
1144   /* This equals to the number of offdiag columns in P */
1145   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1146   /* offsets */
1147   ierr = PetscMalloc1(pon+1,&ptap->c_rmti);CHKERRQ(ierr);
1148   /* The number of columns we will send to remote ranks */
1149   ierr = PetscMalloc1(pon,&c_rmtc);CHKERRQ(ierr);
1150   ierr = PetscMalloc1(pon,&hta);CHKERRQ(ierr);
1151   for (i=0; i<pon; i++) {
1152     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1153   }
1154   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1155   ierr = MatGetOwnershipRange(A,&arstart,&arend);CHKERRQ(ierr);
1156   /* Create hash table to merge all columns for C(i, :) */
1157   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1158 
1159   ptap->c_rmti[0] = 0;
1160   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1161   for (i=0; i<am && pon; i++) {
1162     /* Form one row of AP */
1163     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1164     /* If the off diag is empty, we should not do any calculation */
1165     nzi = po->i[i+1] - po->i[i];
1166     if (!nzi) continue;
1167 
1168     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,i,ht,ht);CHKERRQ(ierr);
1169     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1170     /* If AP is empty, just continue */
1171     if (!htsize) continue;
1172     /* Form C(ii, :) */
1173     poj = po->j + po->i[i];
1174     for (j=0; j<nzi; j++) {
1175       ierr = PetscHSetIUpdate(hta[poj[j]],ht);CHKERRQ(ierr);
1176     }
1177   }
1178 
1179   for (i=0; i<pon; i++) {
1180     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1181     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1182     c_rmtc[i] = htsize;
1183   }
1184 
1185   ierr = PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);CHKERRQ(ierr);
1186 
1187   for (i=0; i<pon; i++) {
1188     off = 0;
1189     ierr = PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);CHKERRQ(ierr);
1190     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1191   }
1192   ierr = PetscFree(hta);CHKERRQ(ierr);
1193 
1194   ierr = PetscCalloc1(pon,&iremote);CHKERRQ(ierr);
1195   for (i=0; i<pon; i++) {
1196     owner = 0; lidx = 0;
1197     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1198     iremote[i].index = lidx;
1199     iremote[i].rank  = owner;
1200   }
1201 
1202   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
1203   ierr = PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1204   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1205   ierr = PetscSFSetRankOrder(sf,PETSC_TRUE);CHKERRQ(ierr);
1206   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
1207   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1208   /* How many neighbors have contributions to my rows? */
1209   ierr = PetscSFComputeDegreeBegin(sf,&rootdegrees);CHKERRQ(ierr);
1210   ierr = PetscSFComputeDegreeEnd(sf,&rootdegrees);CHKERRQ(ierr);
1211   rootspacesize = 0;
1212   for (i = 0; i < pn; i++) {
1213     rootspacesize += rootdegrees[i];
1214   }
1215   ierr = PetscCalloc1(rootspacesize,&rootspace);CHKERRQ(ierr);
1216   ierr = PetscCalloc1(rootspacesize+1,&rootspaceoffsets);CHKERRQ(ierr);
1217   /* Get information from leaves
1218    * Number of columns other people contribute to my rows
1219    * */
1220   ierr = PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1221   ierr = PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1222   ierr = PetscFree(c_rmtc);CHKERRQ(ierr);
1223   ierr = PetscCalloc1(pn+1,&ptap->c_othi);CHKERRQ(ierr);
1224   /* The number of columns is received for each row */
1225   ptap->c_othi[0] = 0;
1226   rootspacesize = 0;
1227   rootspaceoffsets[0] = 0;
1228   for (i = 0; i < pn; i++) {
1229     rcvncols = 0;
1230     for (j = 0; j<rootdegrees[i]; j++) {
1231       rcvncols += rootspace[rootspacesize];
1232       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1233       rootspacesize++;
1234     }
1235     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1236   }
1237   ierr = PetscFree(rootspace);CHKERRQ(ierr);
1238 
1239   ierr = PetscCalloc1(pon,&c_rmtoffsets);CHKERRQ(ierr);
1240   ierr = PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1241   ierr = PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1242   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1243   ierr = PetscFree(rootspaceoffsets);CHKERRQ(ierr);
1244 
1245   ierr = PetscCalloc1(ptap->c_rmti[pon],&iremote);CHKERRQ(ierr);
1246   nleaves = 0;
1247   for (i = 0; i<pon; i++) {
1248     owner = 0; lidx = 0;
1249     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1250     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1251     for (j=0; j<sendncols; j++) {
1252       iremote[nleaves].rank = owner;
1253       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1254     }
1255   }
1256   ierr = PetscFree(c_rmtoffsets);CHKERRQ(ierr);
1257   ierr = PetscCalloc1(ptap->c_othi[pn],&c_othj);CHKERRQ(ierr);
1258 
1259   ierr = PetscSFCreate(comm,&ptap->sf);CHKERRQ(ierr);
1260   ierr = PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1261   ierr = PetscSFSetFromOptions(ptap->sf);CHKERRQ(ierr);
1262   /* One to one map */
1263   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1264 
1265   ierr = PetscCalloc2(pn,&dnz,pn,&onz);CHKERRQ(ierr);
1266   ierr = PetscHSetICreate(&oht);CHKERRQ(ierr);
1267   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1268   ierr = PetscMalloc2(pn,&hta,pn,&hto);CHKERRQ(ierr);
1269   for (i=0; i<pn; i++) {
1270     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1271     ierr = PetscHSetICreate(&hto[i]);CHKERRQ(ierr);
1272   }
1273   /* Work on local part */
1274   /* 4) Pass 1: Estimate memory for C_loc */
1275   for (i=0; i<am && pn; i++) {
1276     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1277     ierr = PetscHSetIClear(oht);CHKERRQ(ierr);
1278     nzi = pd->i[i+1] - pd->i[i];
1279     if (!nzi) continue;
1280 
1281     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,i,ht,oht);CHKERRQ(ierr);
1282     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1283     ierr = PetscHSetIGetSize(oht,&htosize);CHKERRQ(ierr);
1284     if (!(htsize+htosize)) continue;
1285     /* Form C(ii, :) */
1286     pdj = pd->j + pd->i[i];
1287     for (j=0; j<nzi; j++) {
1288       ierr = PetscHSetIUpdate(hta[pdj[j]],ht);CHKERRQ(ierr);
1289       ierr = PetscHSetIUpdate(hto[pdj[j]],oht);CHKERRQ(ierr);
1290     }
1291   }
1292 
1293   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1294   ierr = PetscHSetIDestroy(&oht);CHKERRQ(ierr);
1295 
1296   /* Get remote data */
1297   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1298   ierr = PetscFree(c_rmtj);CHKERRQ(ierr);
1299 
1300   for (i = 0; i < pn; i++) {
1301     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1302     rdj = c_othj + ptap->c_othi[i];
1303     for (j = 0; j < nzi; j++) {
1304       col =  rdj[j];
1305       /* diag part */
1306       if (col>=pcstart && col<pcend) {
1307         ierr = PetscHSetIAdd(hta[i],col);CHKERRQ(ierr);
1308       } else { /* off diag */
1309         ierr = PetscHSetIAdd(hto[i],col);CHKERRQ(ierr);
1310       }
1311     }
1312     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1313     dnz[i] = htsize;
1314     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1315     ierr = PetscHSetIGetSize(hto[i],&htsize);CHKERRQ(ierr);
1316     onz[i] = htsize;
1317     ierr = PetscHSetIDestroy(&hto[i]);CHKERRQ(ierr);
1318   }
1319 
1320   ierr = PetscFree2(hta,hto);CHKERRQ(ierr);
1321   ierr = PetscFree(c_othj);CHKERRQ(ierr);
1322 
1323   /* local sizes and preallocation */
1324   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1325   ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr);
1326   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1327   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
1328 
1329   /* attach the supporting struct to Cmpi for reuse */
1330   c = (Mat_MPIAIJ*)Cmpi->data;
1331   c->ap           = ptap;
1332   ptap->duplicate = Cmpi->ops->duplicate;
1333   ptap->destroy   = Cmpi->ops->destroy;
1334   ptap->view      = Cmpi->ops->view;
1335 
1336   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1337   Cmpi->assembled        = PETSC_FALSE;
1338   /* pick an algorithm */
1339   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
1340   alg = 0;
1341   ierr = PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
1342   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1343   switch (alg) {
1344     case 0:
1345       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1346       break;
1347     case 1:
1348       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1349       break;
1350     default:
1351       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1352   }
1353   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
1354   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
1355   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1356   *C                     = Cmpi;
1357   PetscFunctionReturn(0);
1358 }
1359 
1360 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,PetscReal fill,Mat *C)
1361 {
1362   Mat_APMPI           *ptap;
1363   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data,*c;
1364   MPI_Comm            comm;
1365   Mat                 Cmpi;
1366   Mat_SeqAIJ          *pd,*po;
1367   MatType             mtype;
1368   PetscSF             sf;
1369   PetscSFNode         *iremote;
1370   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1371   const PetscInt      *rootdegrees;
1372   PetscHSetI          ht,oht,*hta,*hto,*htd;
1373   PetscInt            pn,pon,*c_rmtc,i,j,nzi,dnzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1374   PetscInt            owner,lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1375   PetscInt            nalg=2,alg=0;
1376   PetscBool           flg;
1377   const char          *algTypes[2] = {"merged","overlapping"};
1378   PetscErrorCode      ierr;
1379 
1380   PetscFunctionBegin;
1381   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1382 
1383   /* Create symbolic parallel matrix Cmpi */
1384   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1385   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1386   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1387   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1388   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1389 
1390   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1391   ptap->reuse = MAT_INITIAL_MATRIX;
1392   ptap->algType = 3;
1393 
1394   /* 0) Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1395   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1396 
1397   po = (Mat_SeqAIJ*)p->B->data;
1398   pd = (Mat_SeqAIJ*)p->A->data;
1399 
1400   /* This equals to the number of offdiag columns in P */
1401   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1402   /* offsets */
1403   ierr = PetscMalloc1(pon+1,&ptap->c_rmti);CHKERRQ(ierr);
1404   /* The number of columns we will send to remote ranks */
1405   ierr = PetscMalloc1(pon,&c_rmtc);CHKERRQ(ierr);
1406   ierr = PetscMalloc1(pon,&hta);CHKERRQ(ierr);
1407   for (i=0; i<pon; i++) {
1408     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1409   }
1410   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1411   ierr = MatGetOwnershipRange(A,&arstart,&arend);CHKERRQ(ierr);
1412   /* Create hash table to merge all columns for C(i, :) */
1413   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1414   ierr = PetscHSetICreate(&oht);CHKERRQ(ierr);
1415   ierr = PetscMalloc2(pn,&htd,pn,&hto);CHKERRQ(ierr);
1416   for (i=0; i<pn; i++) {
1417     ierr = PetscHSetICreate(&htd[i]);CHKERRQ(ierr);
1418     ierr = PetscHSetICreate(&hto[i]);CHKERRQ(ierr);
1419   }
1420   ptap->c_rmti[0] = 0;
1421   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1422   for (i=0; i<am && (pon || pn); i++) {
1423     /* Form one row of AP */
1424     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1425     ierr = PetscHSetIClear(oht);CHKERRQ(ierr);
1426     /* If the off diag is empty, we should not do any calculation */
1427     nzi = po->i[i+1] - po->i[i];
1428     dnzi = pd->i[i+1] - pd->i[i];
1429     if (!nzi && !dnzi) continue;
1430 
1431     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,i,ht,oht);CHKERRQ(ierr);
1432     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1433     ierr = PetscHSetIGetSize(oht,&htosize);CHKERRQ(ierr);
1434     /* If AP is empty, just continue */
1435     if (!(htsize+htosize)) continue;
1436 
1437     /* Form remote C(ii, :) */
1438     poj = po->j + po->i[i];
1439     for (j=0; j<nzi; j++) {
1440       ierr = PetscHSetIUpdate(hta[poj[j]],ht);CHKERRQ(ierr);
1441       ierr = PetscHSetIUpdate(hta[poj[j]],oht);CHKERRQ(ierr);
1442     }
1443 
1444     /* Form local C(ii, :) */
1445     pdj = pd->j + pd->i[i];
1446     for (j=0; j<dnzi; j++) {
1447       ierr = PetscHSetIUpdate(htd[pdj[j]],ht);CHKERRQ(ierr);
1448       ierr = PetscHSetIUpdate(hto[pdj[j]],oht);CHKERRQ(ierr);
1449     }
1450   }
1451 
1452   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1453   ierr = PetscHSetIDestroy(&oht);CHKERRQ(ierr);
1454 
1455   for (i=0; i<pon; i++) {
1456     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1457     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1458     c_rmtc[i] = htsize;
1459   }
1460 
1461   ierr = PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);CHKERRQ(ierr);
1462 
1463   for (i=0; i<pon; i++) {
1464     off = 0;
1465     ierr = PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);CHKERRQ(ierr);
1466     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1467   }
1468   ierr = PetscFree(hta);CHKERRQ(ierr);
1469 
1470   ierr = PetscCalloc1(pon,&iremote);CHKERRQ(ierr);
1471   for (i=0; i<pon; i++) {
1472     owner = 0; lidx = 0;
1473     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1474     iremote[i].index = lidx;
1475     iremote[i].rank  = owner;
1476   }
1477 
1478   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
1479   ierr = PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1480   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1481   ierr = PetscSFSetRankOrder(sf,PETSC_TRUE);CHKERRQ(ierr);
1482   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
1483   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1484   /* How many neighbors have contributions to my rows? */
1485   ierr = PetscSFComputeDegreeBegin(sf,&rootdegrees);CHKERRQ(ierr);
1486   ierr = PetscSFComputeDegreeEnd(sf,&rootdegrees);CHKERRQ(ierr);
1487   rootspacesize = 0;
1488   for (i = 0; i < pn; i++) {
1489     rootspacesize += rootdegrees[i];
1490   }
1491   ierr = PetscCalloc1(rootspacesize,&rootspace);CHKERRQ(ierr);
1492   ierr = PetscCalloc1(rootspacesize+1,&rootspaceoffsets);CHKERRQ(ierr);
1493   /* Get information from leaves
1494    * Number of columns other people contribute to my rows
1495    * */
1496   ierr = PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1497   ierr = PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1498   ierr = PetscFree(c_rmtc);CHKERRQ(ierr);
1499   ierr = PetscCalloc1(pn+1,&ptap->c_othi);CHKERRQ(ierr);
1500   /* The number of columns is received for each row */
1501   ptap->c_othi[0] = 0;
1502   rootspacesize = 0;
1503   rootspaceoffsets[0] = 0;
1504   for (i = 0; i < pn; i++) {
1505     rcvncols = 0;
1506     for (j = 0; j<rootdegrees[i]; j++) {
1507       rcvncols += rootspace[rootspacesize];
1508       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1509       rootspacesize++;
1510     }
1511     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1512   }
1513   ierr = PetscFree(rootspace);CHKERRQ(ierr);
1514 
1515   ierr = PetscCalloc1(pon,&c_rmtoffsets);CHKERRQ(ierr);
1516   ierr = PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1517   ierr = PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1518   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1519   ierr = PetscFree(rootspaceoffsets);CHKERRQ(ierr);
1520 
1521   ierr = PetscCalloc1(ptap->c_rmti[pon],&iremote);CHKERRQ(ierr);
1522   nleaves = 0;
1523   for (i = 0; i<pon; i++) {
1524     owner = 0; lidx = 0;
1525     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1526     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1527     for (j=0; j<sendncols; j++) {
1528       iremote[nleaves].rank = owner;
1529       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1530     }
1531   }
1532   ierr = PetscFree(c_rmtoffsets);CHKERRQ(ierr);
1533   ierr = PetscCalloc1(ptap->c_othi[pn],&c_othj);CHKERRQ(ierr);
1534 
1535   ierr = PetscSFCreate(comm,&ptap->sf);CHKERRQ(ierr);
1536   ierr = PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1537   ierr = PetscSFSetFromOptions(ptap->sf);CHKERRQ(ierr);
1538   /* One to one map */
1539   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1540   /* Get remote data */
1541   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1542   ierr = PetscFree(c_rmtj);CHKERRQ(ierr);
1543   ierr = PetscCalloc2(pn,&dnz,pn,&onz);CHKERRQ(ierr);
1544   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1545 
1546   for (i = 0; i < pn; i++) {
1547     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1548     rdj = c_othj + ptap->c_othi[i];
1549     for (j = 0; j < nzi; j++) {
1550       col =  rdj[j];
1551       /* diag part */
1552       if (col>=pcstart && col<pcend) {
1553         ierr = PetscHSetIAdd(htd[i],col);CHKERRQ(ierr);
1554       } else { /* off diag */
1555         ierr = PetscHSetIAdd(hto[i],col);CHKERRQ(ierr);
1556       }
1557     }
1558     ierr = PetscHSetIGetSize(htd[i],&htsize);CHKERRQ(ierr);
1559     dnz[i] = htsize;
1560     ierr = PetscHSetIDestroy(&htd[i]);CHKERRQ(ierr);
1561     ierr = PetscHSetIGetSize(hto[i],&htsize);CHKERRQ(ierr);
1562     onz[i] = htsize;
1563     ierr = PetscHSetIDestroy(&hto[i]);CHKERRQ(ierr);
1564   }
1565 
1566   ierr = PetscFree2(htd,hto);CHKERRQ(ierr);
1567   ierr = PetscFree(c_othj);CHKERRQ(ierr);
1568 
1569   /* local sizes and preallocation */
1570   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1571   ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr);
1572   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1573   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
1574 
1575   /* attach the supporting struct to Cmpi for reuse */
1576   c = (Mat_MPIAIJ*)Cmpi->data;
1577   c->ap           = ptap;
1578   ptap->duplicate = Cmpi->ops->duplicate;
1579   ptap->destroy   = Cmpi->ops->destroy;
1580   ptap->view      = Cmpi->ops->view;
1581 
1582   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1583   Cmpi->assembled        = PETSC_FALSE;
1584   /* pick an algorithm */
1585   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
1586   alg = 0;
1587   ierr = PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
1588   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1589   switch (alg) {
1590     case 0:
1591       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1592       break;
1593     case 1:
1594       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1595       break;
1596     default:
1597       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1598   }
1599   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
1600   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
1601   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1602   *C                     = Cmpi;
1603   PetscFunctionReturn(0);
1604 }
1605 
1606 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat P,PetscReal fill,Mat *C)
1607 {
1608   PetscErrorCode      ierr;
1609   Mat_APMPI           *ptap;
1610   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c;
1611   MPI_Comm            comm;
1612   PetscMPIInt         size,rank;
1613   Mat                 Cmpi;
1614   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
1615   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
1616   PetscInt            *lnk,i,k,pnz,row,nsend;
1617   PetscBT             lnkbt;
1618   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
1619   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
1620   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
1621   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
1622   MPI_Request         *swaits,*rwaits;
1623   MPI_Status          *sstatus,rstatus;
1624   PetscLayout         rowmap;
1625   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
1626   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
1627   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,ap_rmax=0,Crmax,*aj,*ai,*pi;
1628   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
1629   PetscScalar         *apv;
1630   PetscTable          ta;
1631   MatType             mtype;
1632   const char          *prefix;
1633 #if defined(PETSC_USE_INFO)
1634   PetscReal           apfill;
1635 #endif
1636 
1637   PetscFunctionBegin;
1638   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1639   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1640   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1641 
1642   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
1643 
1644   /* create symbolic parallel matrix Cmpi */
1645   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1646   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1647   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1648 
1649   /* Do dense axpy in MatPtAPNumeric_MPIAIJ_MPIAIJ() */
1650   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ;
1651 
1652   /* create struct Mat_APMPI and attached it to C later */
1653   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1654   ptap->reuse = MAT_INITIAL_MATRIX;
1655   ptap->algType = 1;
1656 
1657   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1658   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1659   /* get P_loc by taking all local rows of P */
1660   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
1661 
1662   /* (0) compute Rd = Pd^T, Ro = Po^T  */
1663   /* --------------------------------- */
1664   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
1665   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
1666 
1667   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
1668   /* ----------------------------------------------------------------- */
1669   p_loc  = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1670   if (ptap->P_oth) p_oth  = (Mat_SeqAIJ*)(ptap->P_oth)->data;
1671 
1672   /* create and initialize a linked list */
1673   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
1674   MatRowMergeMax_SeqAIJ(p_loc,ptap->P_loc->rmap->N,ta);
1675   MatRowMergeMax_SeqAIJ(p_oth,ptap->P_oth->rmap->N,ta);
1676   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
1677   /* printf("[%d] est %d, Crmax %d; pN %d\n",rank,5*(p_loc->rmax+p_oth->rmax + (PetscInt)(1.e-2*pN)),Crmax,pN); */
1678 
1679   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
1680 
1681   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
1682   if (ao) {
1683     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
1684   } else {
1685     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
1686   }
1687   current_space = free_space;
1688   nspacedouble  = 0;
1689 
1690   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
1691   api[0] = 0;
1692   for (i=0; i<am; i++) {
1693     /* diagonal portion: Ad[i,:]*P */
1694     ai = ad->i; pi = p_loc->i;
1695     nzi = ai[i+1] - ai[i];
1696     aj  = ad->j + ai[i];
1697     for (j=0; j<nzi; j++) {
1698       row  = aj[j];
1699       pnz  = pi[row+1] - pi[row];
1700       Jptr = p_loc->j + pi[row];
1701       /* add non-zero cols of P into the sorted linked list lnk */
1702       ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1703     }
1704     /* off-diagonal portion: Ao[i,:]*P */
1705     if (ao) {
1706       ai = ao->i; pi = p_oth->i;
1707       nzi = ai[i+1] - ai[i];
1708       aj  = ao->j + ai[i];
1709       for (j=0; j<nzi; j++) {
1710         row  = aj[j];
1711         pnz  = pi[row+1] - pi[row];
1712         Jptr = p_oth->j + pi[row];
1713         ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1714       }
1715     }
1716     apnz     = lnk[0];
1717     api[i+1] = api[i] + apnz;
1718     if (ap_rmax < apnz) ap_rmax = apnz;
1719 
1720     /* if free space is not available, double the total space in the list */
1721     if (current_space->local_remaining<apnz) {
1722       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
1723       nspacedouble++;
1724     }
1725 
1726     /* Copy data into free space, then initialize lnk */
1727     ierr = PetscLLCondensedClean(pN,apnz,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
1728 
1729     current_space->array           += apnz;
1730     current_space->local_used      += apnz;
1731     current_space->local_remaining -= apnz;
1732   }
1733   /* Allocate space for apj and apv, initialize apj, and */
1734   /* destroy list of free space and other temporary array(s) */
1735   ierr   = PetscMalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
1736   ierr   = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
1737   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
1738 
1739   /* Create AP_loc for reuse */
1740   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
1741 
1742 #if defined(PETSC_USE_INFO)
1743   if (ao) {
1744     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
1745   } else {
1746     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
1747   }
1748   ptap->AP_loc->info.mallocs           = nspacedouble;
1749   ptap->AP_loc->info.fill_ratio_given  = fill;
1750   ptap->AP_loc->info.fill_ratio_needed = apfill;
1751 
1752   if (api[am]) {
1753     ierr = PetscInfo3(ptap->AP_loc,"Nonscalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
1754     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
1755   } else {
1756     ierr = PetscInfo(ptap->AP_loc,"Nonscalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
1757   }
1758 #endif
1759 
1760   /* (2-1) compute symbolic Co = Ro*AP_loc  */
1761   /* ------------------------------------ */
1762   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1763   ierr = MatSetOptionsPrefix(ptap->Ro,prefix);CHKERRQ(ierr);
1764   ierr = MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");CHKERRQ(ierr);
1765   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr);
1766 
1767   /* (3) send coj of C_oth to other processors  */
1768   /* ------------------------------------------ */
1769   /* determine row ownership */
1770   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
1771   rowmap->n  = pn;
1772   rowmap->bs = 1;
1773   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
1774   owners = rowmap->range;
1775 
1776   /* determine the number of messages to send, their lengths */
1777   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
1778   ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
1779   ierr = PetscMemzero(len_si,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
1780 
1781   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
1782   coi   = c_oth->i; coj = c_oth->j;
1783   con   = ptap->C_oth->rmap->n;
1784   proc  = 0;
1785   for (i=0; i<con; i++) {
1786     while (prmap[i] >= owners[proc+1]) proc++;
1787     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
1788     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
1789   }
1790 
1791   len          = 0; /* max length of buf_si[], see (4) */
1792   owners_co[0] = 0;
1793   nsend        = 0;
1794   for (proc=0; proc<size; proc++) {
1795     owners_co[proc+1] = owners_co[proc] + len_si[proc];
1796     if (len_s[proc]) {
1797       nsend++;
1798       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
1799       len         += len_si[proc];
1800     }
1801   }
1802 
1803   /* determine the number and length of messages to receive for coi and coj  */
1804   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
1805   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
1806 
1807   /* post the Irecv and Isend of coj */
1808   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
1809   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
1810   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
1811   for (proc=0, k=0; proc<size; proc++) {
1812     if (!len_s[proc]) continue;
1813     i    = owners_co[proc];
1814     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
1815     k++;
1816   }
1817 
1818   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
1819   /* ---------------------------------------- */
1820   ierr = MatSetOptionsPrefix(ptap->Rd,prefix);CHKERRQ(ierr);
1821   ierr = MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");CHKERRQ(ierr);
1822   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr);
1823   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
1824 
1825   /* receives coj are complete */
1826   for (i=0; i<nrecv; i++) {
1827     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
1828   }
1829   ierr = PetscFree(rwaits);CHKERRQ(ierr);
1830   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
1831 
1832   /* add received column indices into ta to update Crmax */
1833   for (k=0; k<nrecv; k++) {/* k-th received message */
1834     Jptr = buf_rj[k];
1835     for (j=0; j<len_r[k]; j++) {
1836       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
1837     }
1838   }
1839   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
1840   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
1841 
1842   /* (4) send and recv coi */
1843   /*-----------------------*/
1844   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
1845   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
1846   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
1847   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
1848   for (proc=0,k=0; proc<size; proc++) {
1849     if (!len_s[proc]) continue;
1850     /* form outgoing message for i-structure:
1851          buf_si[0]:                 nrows to be sent
1852                [1:nrows]:           row index (global)
1853                [nrows+1:2*nrows+1]: i-structure index
1854     */
1855     /*-------------------------------------------*/
1856     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
1857     buf_si_i    = buf_si + nrows+1;
1858     buf_si[0]   = nrows;
1859     buf_si_i[0] = 0;
1860     nrows       = 0;
1861     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
1862       nzi = coi[i+1] - coi[i];
1863       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
1864       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
1865       nrows++;
1866     }
1867     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
1868     k++;
1869     buf_si += len_si[proc];
1870   }
1871   for (i=0; i<nrecv; i++) {
1872     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
1873   }
1874   ierr = PetscFree(rwaits);CHKERRQ(ierr);
1875   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
1876 
1877   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
1878   ierr = PetscFree(len_ri);CHKERRQ(ierr);
1879   ierr = PetscFree(swaits);CHKERRQ(ierr);
1880   ierr = PetscFree(buf_s);CHKERRQ(ierr);
1881 
1882   /* (5) compute the local portion of Cmpi      */
1883   /* ------------------------------------------ */
1884   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
1885   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
1886   current_space = free_space;
1887 
1888   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
1889   for (k=0; k<nrecv; k++) {
1890     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
1891     nrows       = *buf_ri_k[k];
1892     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
1893     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
1894   }
1895 
1896   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
1897   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
1898   for (i=0; i<pn; i++) {
1899     /* add C_loc into Cmpi */
1900     nzi  = c_loc->i[i+1] - c_loc->i[i];
1901     Jptr = c_loc->j + c_loc->i[i];
1902     ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1903 
1904     /* add received col data into lnk */
1905     for (k=0; k<nrecv; k++) { /* k-th received message */
1906       if (i == *nextrow[k]) { /* i-th row */
1907         nzi  = *(nextci[k]+1) - *nextci[k];
1908         Jptr = buf_rj[k] + *nextci[k];
1909         ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1910         nextrow[k]++; nextci[k]++;
1911       }
1912     }
1913     nzi = lnk[0];
1914 
1915     /* copy data into free space, then initialize lnk */
1916     ierr = PetscLLCondensedClean(pN,nzi,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
1917     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
1918   }
1919   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
1920   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
1921   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
1922 
1923   /* local sizes and preallocation */
1924   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1925   if (P->cmap->bs > 0) {
1926     ierr = PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);CHKERRQ(ierr);
1927     ierr = PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);CHKERRQ(ierr);
1928   }
1929   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1930   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
1931 
1932   /* members in merge */
1933   ierr = PetscFree(id_r);CHKERRQ(ierr);
1934   ierr = PetscFree(len_r);CHKERRQ(ierr);
1935   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
1936   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
1937   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
1938   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
1939   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
1940 
1941   /* attach the supporting struct to Cmpi for reuse */
1942   c = (Mat_MPIAIJ*)Cmpi->data;
1943   c->ap           = ptap;
1944   ptap->duplicate = Cmpi->ops->duplicate;
1945   ptap->destroy   = Cmpi->ops->destroy;
1946   ptap->view      = Cmpi->ops->view;
1947   ierr = PetscCalloc1(pN,&ptap->apa);CHKERRQ(ierr);
1948 
1949   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1950   Cmpi->assembled        = PETSC_FALSE;
1951   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
1952   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
1953   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1954   *C                     = Cmpi;
1955   PetscFunctionReturn(0);
1956 }
1957 
1958 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat A,Mat P,Mat C)
1959 {
1960   PetscErrorCode    ierr;
1961   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
1962   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
1963   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
1964   Mat_APMPI         *ptap = c->ap;
1965   Mat               AP_loc,C_loc,C_oth;
1966   PetscInt          i,rstart,rend,cm,ncols,row;
1967   PetscInt          *api,*apj,am = A->rmap->n,j,col,apnz;
1968   PetscScalar       *apa;
1969   const PetscInt    *cols;
1970   const PetscScalar *vals;
1971 
1972   PetscFunctionBegin;
1973   if (!ptap->AP_loc) {
1974     MPI_Comm comm;
1975     ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
1976     SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
1977   }
1978 
1979   ierr = MatZeroEntries(C);CHKERRQ(ierr);
1980   /* 1) get R = Pd^T,Ro = Po^T */
1981   if (ptap->reuse == MAT_REUSE_MATRIX) {
1982     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
1983     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
1984   }
1985 
1986   /* 2) get AP_loc */
1987   AP_loc = ptap->AP_loc;
1988   ap = (Mat_SeqAIJ*)AP_loc->data;
1989 
1990   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
1991   /*-----------------------------------------------------*/
1992   if (ptap->reuse == MAT_REUSE_MATRIX) {
1993     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
1994     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1995     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
1996   }
1997 
1998   /* 2-2) compute numeric A_loc*P - dominating part */
1999   /* ---------------------------------------------- */
2000   /* get data from symbolic products */
2001   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
2002   if (ptap->P_oth) {
2003     p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
2004   }
2005   apa   = ptap->apa;
2006   api   = ap->i;
2007   apj   = ap->j;
2008   for (i=0; i<am; i++) {
2009     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
2010     AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa);
2011     apnz = api[i+1] - api[i];
2012     for (j=0; j<apnz; j++) {
2013       col = apj[j+api[i]];
2014       ap->a[j+ap->i[i]] = apa[col];
2015       apa[col] = 0.0;
2016     }
2017     ierr = PetscLogFlops(2.0*apnz);CHKERRQ(ierr);
2018   }
2019 
2020   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
2021   ierr = ((ptap->C_loc)->ops->matmultnumeric)(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
2022   ierr = ((ptap->C_oth)->ops->matmultnumeric)(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
2023   C_loc = ptap->C_loc;
2024   C_oth = ptap->C_oth;
2025 
2026   /* add C_loc and Co to to C */
2027   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
2028 
2029   /* C_loc -> C */
2030   cm    = C_loc->rmap->N;
2031   c_seq = (Mat_SeqAIJ*)C_loc->data;
2032   cols = c_seq->j;
2033   vals = c_seq->a;
2034 
2035 
2036   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
2037   /* when there are no off-processor parts.  */
2038   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
2039   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
2040   /* a table, and other, more complex stuff has to be done. */
2041   if (C->assembled) {
2042     C->was_assembled = PETSC_TRUE;
2043     C->assembled     = PETSC_FALSE;
2044   }
2045   if (C->was_assembled) {
2046     for (i=0; i<cm; i++) {
2047       ncols = c_seq->i[i+1] - c_seq->i[i];
2048       row = rstart + i;
2049       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
2050       cols += ncols; vals += ncols;
2051     }
2052   } else {
2053     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
2054   }
2055 
2056   /* Co -> C, off-processor part */
2057   cm = C_oth->rmap->N;
2058   c_seq = (Mat_SeqAIJ*)C_oth->data;
2059   cols = c_seq->j;
2060   vals = c_seq->a;
2061   for (i=0; i<cm; i++) {
2062     ncols = c_seq->i[i+1] - c_seq->i[i];
2063     row = p->garray[i];
2064     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
2065     cols += ncols; vals += ncols;
2066   }
2067 
2068   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2069   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2070 
2071   ptap->reuse = MAT_REUSE_MATRIX;
2072 
2073   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
2074   if (ptap->freestruct) {
2075     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
2076   }
2077   PetscFunctionReturn(0);
2078 }
2079