xref: /petsc/src/mat/impls/aij/mpi/mpiptap.c (revision 1873fc9fedfc3bd8fc2f82ccd29ab8b44a80f1db)
1 
2 /*
3   Defines projective product routines where A is a MPIAIJ matrix
4           C = P^T * A * P
5 */
6 
7 #include <../src/mat/impls/aij/seq/aij.h>   /*I "petscmat.h" I*/
8 #include <../src/mat/utils/freespace.h>
9 #include <../src/mat/impls/aij/mpi/mpiaij.h>
10 #include <petscbt.h>
11 #include <petsctime.h>
12 #include <petsc/private/hashmapiv.h>
13 #include <petsc/private/hashseti.h>
14 #include <petscsf.h>
15 
16 
17 PetscErrorCode MatView_MPIAIJ_PtAP(Mat A,PetscViewer viewer)
18 {
19   PetscErrorCode    ierr;
20   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
21   Mat_APMPI         *ptap=a->ap;
22   PetscBool         iascii;
23   PetscViewerFormat format;
24 
25   PetscFunctionBegin;
26   if (!ptap) {
27     /* hack: MatDuplicate() sets oldmat->ops->view to newmat which is a base mat class with null ptpa! */
28     A->ops->view = MatView_MPIAIJ;
29     ierr = (A->ops->view)(A,viewer);CHKERRQ(ierr);
30     PetscFunctionReturn(0);
31   }
32 
33   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
34   if (iascii) {
35     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
36     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
37       if (ptap->algType == 0) {
38         ierr = PetscViewerASCIIPrintf(viewer,"using scalable MatPtAP() implementation\n");CHKERRQ(ierr);
39       } else if (ptap->algType == 1) {
40         ierr = PetscViewerASCIIPrintf(viewer,"using nonscalable MatPtAP() implementation\n");CHKERRQ(ierr);
41       } else if (ptap->algType == 2) {
42         ierr = PetscViewerASCIIPrintf(viewer,"using allatonce MatPtAP() implementation\n");CHKERRQ(ierr);
43       } else if (ptap->algType == 3) {
44         ierr = PetscViewerASCIIPrintf(viewer,"using merged allatonce MatPtAP() implementation\n");CHKERRQ(ierr);
45       }
46     }
47   }
48   ierr = (ptap->view)(A,viewer);CHKERRQ(ierr);
49   PetscFunctionReturn(0);
50 }
51 
52 PetscErrorCode MatFreeIntermediateDataStructures_MPIAIJ_AP(Mat A)
53 {
54   PetscErrorCode      ierr;
55   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data;
56   Mat_APMPI           *ptap=a->ap;
57   Mat_Merge_SeqsToMPI *merge;
58 
59   PetscFunctionBegin;
60   if (!ptap) PetscFunctionReturn(0);
61 
62   ierr = PetscFree2(ptap->startsj_s,ptap->startsj_r);CHKERRQ(ierr);
63   ierr = PetscFree(ptap->bufa);CHKERRQ(ierr);
64   ierr = MatDestroy(&ptap->P_loc);CHKERRQ(ierr);
65   ierr = MatDestroy(&ptap->P_oth);CHKERRQ(ierr);
66   ierr = MatDestroy(&ptap->A_loc);CHKERRQ(ierr); /* used by MatTransposeMatMult() */
67   ierr = MatDestroy(&ptap->Rd);CHKERRQ(ierr);
68   ierr = MatDestroy(&ptap->Ro);CHKERRQ(ierr);
69   if (ptap->AP_loc) { /* used by alg_rap */
70     Mat_SeqAIJ *ap = (Mat_SeqAIJ*)(ptap->AP_loc)->data;
71     ierr = PetscFree(ap->i);CHKERRQ(ierr);
72     ierr = PetscFree2(ap->j,ap->a);CHKERRQ(ierr);
73     ierr = MatDestroy(&ptap->AP_loc);CHKERRQ(ierr);
74   } else { /* used by alg_ptap */
75     ierr = PetscFree(ptap->api);CHKERRQ(ierr);
76     ierr = PetscFree(ptap->apj);CHKERRQ(ierr);
77   }
78   ierr = MatDestroy(&ptap->C_loc);CHKERRQ(ierr);
79   ierr = MatDestroy(&ptap->C_oth);CHKERRQ(ierr);
80   if (ptap->apa) {ierr = PetscFree(ptap->apa);CHKERRQ(ierr);}
81 
82   ierr = MatDestroy(&ptap->Pt);CHKERRQ(ierr);
83 
84   merge=ptap->merge;
85   if (merge) { /* used by alg_ptap */
86     ierr = PetscFree(merge->id_r);CHKERRQ(ierr);
87     ierr = PetscFree(merge->len_s);CHKERRQ(ierr);
88     ierr = PetscFree(merge->len_r);CHKERRQ(ierr);
89     ierr = PetscFree(merge->bi);CHKERRQ(ierr);
90     ierr = PetscFree(merge->bj);CHKERRQ(ierr);
91     ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr);
92     ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr);
93     ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr);
94     ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr);
95     ierr = PetscFree(merge->coi);CHKERRQ(ierr);
96     ierr = PetscFree(merge->coj);CHKERRQ(ierr);
97     ierr = PetscFree(merge->owners_co);CHKERRQ(ierr);
98     ierr = PetscLayoutDestroy(&merge->rowmap);CHKERRQ(ierr);
99     ierr = PetscFree(ptap->merge);CHKERRQ(ierr);
100   }
101   ierr = ISLocalToGlobalMappingDestroy(&ptap->ltog);CHKERRQ(ierr);
102 
103   ierr = PetscSFDestroy(&ptap->sf);CHKERRQ(ierr);
104   ierr = PetscFree(ptap->c_othi);CHKERRQ(ierr);
105   ierr = PetscFree(ptap->c_rmti);CHKERRQ(ierr);
106   PetscFunctionReturn(0);
107 }
108 
109 PetscErrorCode MatDestroy_MPIAIJ_PtAP(Mat A)
110 {
111   PetscErrorCode ierr;
112   Mat_MPIAIJ     *a=(Mat_MPIAIJ*)A->data;
113   Mat_APMPI      *ptap=a->ap;
114 
115   PetscFunctionBegin;
116   ierr = (*A->ops->freeintermediatedatastructures)(A);CHKERRQ(ierr);
117   ierr = (*ptap->destroy)(A);CHKERRQ(ierr); /* MatDestroy_MPIAIJ(A) */
118   ierr = PetscFree(ptap);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 PETSC_INTERN PetscErrorCode MatPtAP_MPIAIJ_MPIAIJ(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
123 {
124   PetscErrorCode ierr;
125   PetscBool      flg;
126   MPI_Comm       comm;
127 #if !defined(PETSC_HAVE_HYPRE)
128   const char          *algTypes[4] = {"scalable","nonscalable","allatonce","allatonce_merged"};
129   PetscInt            nalg=4;
130 #else
131   const char          *algTypes[5] = {"scalable","nonscalable","allatonce","allatonce_merged","hypre"};
132   PetscInt            nalg=5;
133 #endif
134   PetscInt            pN=P->cmap->N,alg=1; /* set default algorithm */
135 
136   PetscFunctionBegin;
137   /* check if matrix local sizes are compatible */
138   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
139   if (A->rmap->rstart != P->rmap->rstart || A->rmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Arow (%D, %D) != Prow (%D,%D)",A->rmap->rstart,A->rmap->rend,P->rmap->rstart,P->rmap->rend);
140   if (A->cmap->rstart != P->rmap->rstart || A->cmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Acol (%D, %D) != Prow (%D,%D)",A->cmap->rstart,A->cmap->rend,P->rmap->rstart,P->rmap->rend);
141 
142   if (scall == MAT_INITIAL_MATRIX) {
143     /* pick an algorithm */
144     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
145     ierr = PetscOptionsEList("-matptap_via","Algorithmic approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
146     ierr = PetscOptionsEnd();CHKERRQ(ierr);
147 
148     if (!flg && pN > 100000) { /* may switch to scalable algorithm as default */
149       MatInfo     Ainfo,Pinfo;
150       PetscInt    nz_local;
151       PetscBool   alg_scalable_loc=PETSC_FALSE,alg_scalable;
152 
153       ierr = MatGetInfo(A,MAT_LOCAL,&Ainfo);CHKERRQ(ierr);
154       ierr = MatGetInfo(P,MAT_LOCAL,&Pinfo);CHKERRQ(ierr);
155       nz_local = (PetscInt)(Ainfo.nz_allocated + Pinfo.nz_allocated);
156 
157       if (pN > fill*nz_local) alg_scalable_loc = PETSC_TRUE;
158       ierr = MPIU_Allreduce(&alg_scalable_loc,&alg_scalable,1,MPIU_BOOL,MPI_LOR,comm);CHKERRQ(ierr);
159 
160       if (alg_scalable) {
161         alg = 0; /* scalable algorithm would 50% slower than nonscalable algorithm */
162       }
163     }
164 
165     switch (alg) {
166     case 1:
167       /* do R=P^T locally, then C=R*A*P -- nonscalable */
168       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
169       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ(A,P,fill,C);CHKERRQ(ierr);
170       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
171       break;
172     case 2:
173       /* compute C=P^T*A*P allatonce */
174       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
175       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(A,P,fill,C);CHKERRQ(ierr);
176       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
177       break;
178     case 3:
179       /* compute C=P^T*A*P allatonce */
180       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
181       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(A,P,fill,C);CHKERRQ(ierr);
182       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
183       break;
184 #if defined(PETSC_HAVE_HYPRE)
185     case 4:
186       /* Use boomerAMGBuildCoarseOperator */
187       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
188       ierr = MatPtAPSymbolic_AIJ_AIJ_wHYPRE(A,P,fill,C);CHKERRQ(ierr);
189       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
190       break;
191 #endif
192     default:
193       /* do R=P^T locally, then C=R*A*P */
194       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
195       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(A,P,fill,C);CHKERRQ(ierr);
196       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
197       break;
198     }
199 
200     if (alg == 0 || alg == 1 || alg == 2 || alg == 3) {
201       Mat_MPIAIJ *c  = (Mat_MPIAIJ*)(*C)->data;
202       Mat_APMPI  *ap = c->ap;
203       ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)(*C)),((PetscObject)(*C))->prefix,"MatFreeIntermediateDataStructures","Mat");CHKERRQ(ierr);
204       ap->freestruct = PETSC_FALSE;
205       ierr = PetscOptionsBool("-mat_freeintermediatedatastructures","Free intermediate data structures", "MatFreeIntermediateDataStructures",ap->freestruct,&ap->freestruct, NULL);CHKERRQ(ierr);
206       ierr = PetscOptionsEnd();CHKERRQ(ierr);
207     }
208   }
209 
210   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
211   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
212   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
213   PetscFunctionReturn(0);
214 }
215 
216 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,Mat C)
217 {
218   PetscErrorCode    ierr;
219   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
220   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
221   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
222   Mat_APMPI         *ptap = c->ap;
223   Mat               AP_loc,C_loc,C_oth;
224   PetscInt          i,rstart,rend,cm,ncols,row,*api,*apj,am = A->rmap->n,apnz,nout;
225   PetscScalar       *apa;
226   const PetscInt    *cols;
227   const PetscScalar *vals;
228 
229   PetscFunctionBegin;
230   if (!ptap->AP_loc) {
231     MPI_Comm comm;
232     ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
233     SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
234   }
235 
236   ierr = MatZeroEntries(C);CHKERRQ(ierr);
237 
238   /* 1) get R = Pd^T,Ro = Po^T */
239   if (ptap->reuse == MAT_REUSE_MATRIX) {
240     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
241     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
242   }
243 
244   /* 2) get AP_loc */
245   AP_loc = ptap->AP_loc;
246   ap = (Mat_SeqAIJ*)AP_loc->data;
247 
248   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
249   /*-----------------------------------------------------*/
250   if (ptap->reuse == MAT_REUSE_MATRIX) {
251     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
252     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
253     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
254   }
255 
256   /* 2-2) compute numeric A_loc*P - dominating part */
257   /* ---------------------------------------------- */
258   /* get data from symbolic products */
259   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
260   if (ptap->P_oth) p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
261 
262   api   = ap->i;
263   apj   = ap->j;
264   ierr = ISLocalToGlobalMappingApply(ptap->ltog,api[AP_loc->rmap->n],apj,apj);CHKERRQ(ierr);
265   for (i=0; i<am; i++) {
266     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
267     apnz = api[i+1] - api[i];
268     apa = ap->a + api[i];
269     ierr = PetscArrayzero(apa,apnz);CHKERRQ(ierr);
270     AProw_scalable(i,ad,ao,p_loc,p_oth,api,apj,apa);
271   }
272   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,api[AP_loc->rmap->n],apj,&nout,apj);CHKERRQ(ierr);
273   if (api[AP_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",api[AP_loc->rmap->n],nout);
274 
275   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
276   /* Always use scalable version since we are in the MPI scalable version */
277   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
278   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
279 
280   C_loc = ptap->C_loc;
281   C_oth = ptap->C_oth;
282 
283   /* add C_loc and Co to to C */
284   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
285 
286   /* C_loc -> C */
287   cm    = C_loc->rmap->N;
288   c_seq = (Mat_SeqAIJ*)C_loc->data;
289   cols = c_seq->j;
290   vals = c_seq->a;
291   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_loc->rmap->n],c_seq->j,c_seq->j);CHKERRQ(ierr);
292 
293   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
294   /* when there are no off-processor parts.  */
295   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
296   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
297   /* a table, and other, more complex stuff has to be done. */
298   if (C->assembled) {
299     C->was_assembled = PETSC_TRUE;
300     C->assembled     = PETSC_FALSE;
301   }
302   if (C->was_assembled) {
303     for (i=0; i<cm; i++) {
304       ncols = c_seq->i[i+1] - c_seq->i[i];
305       row = rstart + i;
306       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
307       cols += ncols; vals += ncols;
308     }
309   } else {
310     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
311   }
312   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_loc->rmap->n],c_seq->j,&nout,c_seq->j);CHKERRQ(ierr);
313   if (c_seq->i[C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
314 
315   /* Co -> C, off-processor part */
316   cm = C_oth->rmap->N;
317   c_seq = (Mat_SeqAIJ*)C_oth->data;
318   cols = c_seq->j;
319   vals = c_seq->a;
320   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_oth->rmap->n],c_seq->j,c_seq->j);CHKERRQ(ierr);
321   for (i=0; i<cm; i++) {
322     ncols = c_seq->i[i+1] - c_seq->i[i];
323     row = p->garray[i];
324     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
325     cols += ncols; vals += ncols;
326   }
327   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
328   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
329 
330   ptap->reuse = MAT_REUSE_MATRIX;
331 
332   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_oth->rmap->n],c_seq->j,&nout,c_seq->j);CHKERRQ(ierr);
333   if (c_seq->i[C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
334 
335   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
336   if (ptap->freestruct) {
337     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
338   }
339   PetscFunctionReturn(0);
340 }
341 
342 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,PetscReal fill,Mat *C)
343 {
344   PetscErrorCode      ierr;
345   Mat_APMPI           *ptap;
346   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c;
347   MPI_Comm            comm;
348   PetscMPIInt         size,rank;
349   Mat                 Cmpi,P_loc,P_oth;
350   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
351   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
352   PetscInt            *lnk,i,k,pnz,row,nsend;
353   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
354   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
355   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
356   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
357   MPI_Request         *swaits,*rwaits;
358   MPI_Status          *sstatus,rstatus;
359   PetscLayout         rowmap;
360   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
361   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
362   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,Crmax,*aj,*ai,*pi,nout;
363   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
364   PetscScalar         *apv;
365   PetscTable          ta;
366   MatType             mtype;
367   const char          *prefix;
368 #if defined(PETSC_USE_INFO)
369   PetscReal           apfill;
370 #endif
371 
372   PetscFunctionBegin;
373   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
374   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
375   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
376 
377   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
378 
379   /* create symbolic parallel matrix Cmpi */
380   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
381   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
382   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
383 
384   /* create struct Mat_APMPI and attached it to C later */
385   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
386   ptap->reuse = MAT_INITIAL_MATRIX;
387   ptap->algType = 0;
388 
389   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
390   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&P_oth);CHKERRQ(ierr);
391   /* get P_loc by taking all local rows of P */
392   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&P_loc);CHKERRQ(ierr);
393 
394   ptap->P_loc = P_loc;
395   ptap->P_oth = P_oth;
396 
397   /* (0) compute Rd = Pd^T, Ro = Po^T  */
398   /* --------------------------------- */
399   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
400   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
401 
402   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
403   /* ----------------------------------------------------------------- */
404   p_loc  = (Mat_SeqAIJ*)P_loc->data;
405   if (P_oth) p_oth = (Mat_SeqAIJ*)P_oth->data;
406 
407   /* create and initialize a linked list */
408   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
409   MatRowMergeMax_SeqAIJ(p_loc,P_loc->rmap->N,ta);
410   MatRowMergeMax_SeqAIJ(p_oth,P_oth->rmap->N,ta);
411   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
412 
413   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
414 
415   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
416   if (ao) {
417     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
418   } else {
419     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
420   }
421   current_space = free_space;
422   nspacedouble  = 0;
423 
424   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
425   api[0] = 0;
426   for (i=0; i<am; i++) {
427     /* diagonal portion: Ad[i,:]*P */
428     ai = ad->i; pi = p_loc->i;
429     nzi = ai[i+1] - ai[i];
430     aj  = ad->j + ai[i];
431     for (j=0; j<nzi; j++) {
432       row  = aj[j];
433       pnz  = pi[row+1] - pi[row];
434       Jptr = p_loc->j + pi[row];
435       /* add non-zero cols of P into the sorted linked list lnk */
436       ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
437     }
438     /* off-diagonal portion: Ao[i,:]*P */
439     if (ao) {
440       ai = ao->i; pi = p_oth->i;
441       nzi = ai[i+1] - ai[i];
442       aj  = ao->j + ai[i];
443       for (j=0; j<nzi; j++) {
444         row  = aj[j];
445         pnz  = pi[row+1] - pi[row];
446         Jptr = p_oth->j + pi[row];
447         ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
448       }
449     }
450     apnz     = lnk[0];
451     api[i+1] = api[i] + apnz;
452 
453     /* if free space is not available, double the total space in the list */
454     if (current_space->local_remaining<apnz) {
455       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
456       nspacedouble++;
457     }
458 
459     /* Copy data into free space, then initialize lnk */
460     ierr = PetscLLCondensedClean_Scalable(apnz,current_space->array,lnk);CHKERRQ(ierr);
461 
462     current_space->array           += apnz;
463     current_space->local_used      += apnz;
464     current_space->local_remaining -= apnz;
465   }
466   /* Allocate space for apj and apv, initialize apj, and */
467   /* destroy list of free space and other temporary array(s) */
468   ierr = PetscCalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
469   ierr = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
470   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
471 
472   /* Create AP_loc for reuse */
473   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
474   ierr = MatSeqAIJCompactOutExtraColumns_SeqAIJ(ptap->AP_loc, &ptap->ltog);CHKERRQ(ierr);
475 
476 #if defined(PETSC_USE_INFO)
477   if (ao) {
478     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
479   } else {
480     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
481   }
482   ptap->AP_loc->info.mallocs           = nspacedouble;
483   ptap->AP_loc->info.fill_ratio_given  = fill;
484   ptap->AP_loc->info.fill_ratio_needed = apfill;
485 
486   if (api[am]) {
487     ierr = PetscInfo3(ptap->AP_loc,"Scalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
488     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
489   } else {
490     ierr = PetscInfo(ptap->AP_loc,"Scalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
491   }
492 #endif
493 
494   /* (2-1) compute symbolic Co = Ro*AP_loc  */
495   /* ------------------------------------ */
496   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
497   ierr = MatSetOptionsPrefix(ptap->Ro,prefix);CHKERRQ(ierr);
498   ierr = MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");CHKERRQ(ierr);
499   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr);
500 
501   /* (3) send coj of C_oth to other processors  */
502   /* ------------------------------------------ */
503   /* determine row ownership */
504   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
505   rowmap->n  = pn;
506   rowmap->bs = 1;
507   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
508   owners = rowmap->range;
509 
510   /* determine the number of messages to send, their lengths */
511   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
512   ierr = PetscArrayzero(len_s,size);CHKERRQ(ierr);
513   ierr = PetscArrayzero(len_si,size);CHKERRQ(ierr);
514 
515   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
516   coi   = c_oth->i; coj = c_oth->j;
517   con   = ptap->C_oth->rmap->n;
518   proc  = 0;
519   ierr = ISLocalToGlobalMappingApply(ptap->ltog,coi[con],coj,coj);CHKERRQ(ierr);
520   for (i=0; i<con; i++) {
521     while (prmap[i] >= owners[proc+1]) proc++;
522     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
523     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
524   }
525 
526   len          = 0; /* max length of buf_si[], see (4) */
527   owners_co[0] = 0;
528   nsend        = 0;
529   for (proc=0; proc<size; proc++) {
530     owners_co[proc+1] = owners_co[proc] + len_si[proc];
531     if (len_s[proc]) {
532       nsend++;
533       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
534       len         += len_si[proc];
535     }
536   }
537 
538   /* determine the number and length of messages to receive for coi and coj  */
539   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
540   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
541 
542   /* post the Irecv and Isend of coj */
543   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
544   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
545   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
546   for (proc=0, k=0; proc<size; proc++) {
547     if (!len_s[proc]) continue;
548     i    = owners_co[proc];
549     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
550     k++;
551   }
552 
553   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
554   /* ---------------------------------------- */
555   ierr = MatSetOptionsPrefix(ptap->Rd,prefix);CHKERRQ(ierr);
556   ierr = MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");CHKERRQ(ierr);
557   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr);
558   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
559   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,c_loc->j);CHKERRQ(ierr);
560 
561   /* receives coj are complete */
562   for (i=0; i<nrecv; i++) {
563     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
564   }
565   ierr = PetscFree(rwaits);CHKERRQ(ierr);
566   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
567 
568   /* add received column indices into ta to update Crmax */
569   for (k=0; k<nrecv; k++) {/* k-th received message */
570     Jptr = buf_rj[k];
571     for (j=0; j<len_r[k]; j++) {
572       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
573     }
574   }
575   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
576   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
577 
578   /* (4) send and recv coi */
579   /*-----------------------*/
580   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
581   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
582   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
583   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
584   for (proc=0,k=0; proc<size; proc++) {
585     if (!len_s[proc]) continue;
586     /* form outgoing message for i-structure:
587          buf_si[0]:                 nrows to be sent
588                [1:nrows]:           row index (global)
589                [nrows+1:2*nrows+1]: i-structure index
590     */
591     /*-------------------------------------------*/
592     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
593     buf_si_i    = buf_si + nrows+1;
594     buf_si[0]   = nrows;
595     buf_si_i[0] = 0;
596     nrows       = 0;
597     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
598       nzi = coi[i+1] - coi[i];
599       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
600       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
601       nrows++;
602     }
603     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
604     k++;
605     buf_si += len_si[proc];
606   }
607   for (i=0; i<nrecv; i++) {
608     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
609   }
610   ierr = PetscFree(rwaits);CHKERRQ(ierr);
611   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
612 
613   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
614   ierr = PetscFree(len_ri);CHKERRQ(ierr);
615   ierr = PetscFree(swaits);CHKERRQ(ierr);
616   ierr = PetscFree(buf_s);CHKERRQ(ierr);
617 
618   /* (5) compute the local portion of Cmpi      */
619   /* ------------------------------------------ */
620   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
621   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
622   current_space = free_space;
623 
624   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
625   for (k=0; k<nrecv; k++) {
626     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
627     nrows       = *buf_ri_k[k];
628     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
629     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
630   }
631 
632   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
633   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
634   for (i=0; i<pn; i++) {
635     /* add C_loc into Cmpi */
636     nzi  = c_loc->i[i+1] - c_loc->i[i];
637     Jptr = c_loc->j + c_loc->i[i];
638     ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
639 
640     /* add received col data into lnk */
641     for (k=0; k<nrecv; k++) { /* k-th received message */
642       if (i == *nextrow[k]) { /* i-th row */
643         nzi  = *(nextci[k]+1) - *nextci[k];
644         Jptr = buf_rj[k] + *nextci[k];
645         ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
646         nextrow[k]++; nextci[k]++;
647       }
648     }
649     nzi = lnk[0];
650 
651     /* copy data into free space, then initialize lnk */
652     ierr = PetscLLCondensedClean_Scalable(nzi,current_space->array,lnk);CHKERRQ(ierr);
653     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
654   }
655   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
656   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
657   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
658 
659   /* local sizes and preallocation */
660   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
661   if (P->cmap->bs > 0) {
662     ierr = PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);CHKERRQ(ierr);
663     ierr = PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);CHKERRQ(ierr);
664   }
665   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
666   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
667 
668   /* members in merge */
669   ierr = PetscFree(id_r);CHKERRQ(ierr);
670   ierr = PetscFree(len_r);CHKERRQ(ierr);
671   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
672   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
673   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
674   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
675   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
676 
677   /* attach the supporting struct to Cmpi for reuse */
678   c = (Mat_MPIAIJ*)Cmpi->data;
679   c->ap           = ptap;
680   ptap->duplicate = Cmpi->ops->duplicate;
681   ptap->destroy   = Cmpi->ops->destroy;
682   ptap->view      = Cmpi->ops->view;
683 
684   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
685   Cmpi->assembled        = PETSC_FALSE;
686   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable;
687   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
688   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
689   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
690   *C                     = Cmpi;
691 
692    nout = 0;
693    ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_oth->i[ptap->C_oth->rmap->n],c_oth->j,&nout,c_oth->j);CHKERRQ(ierr);
694    if (c_oth->i[ptap->C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_oth->i[ptap->C_oth->rmap->n],nout);
695    ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,&nout,c_loc->j);CHKERRQ(ierr);
696    if (c_loc->i[ptap->C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_loc->i[ptap->C_loc->rmap->n],nout);
697 
698   PetscFunctionReturn(0);
699 }
700 
701 PETSC_STATIC_INLINE PetscErrorCode MatPtAPSymbolicComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,const PetscInt *map,PetscInt dof,PetscInt i,PetscHSetI dht,PetscHSetI oht)
702 {
703   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
704   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
705   PetscInt             *ai,nzi,j,*aj,row,col,*pi,*pj,pnz,nzpi,*p_othcols,k;
706   PetscInt             pcstart,pcend,column,offset;
707   PetscErrorCode       ierr;
708 
709   PetscFunctionBegin;
710   pcstart = P->cmap->rstart;
711   pcstart *= dof;
712   pcend   = P->cmap->rend;
713   pcend   *= dof;
714   /* diagonal portion: Ad[i,:]*P */
715   ai = ad->i;
716   nzi = ai[i+1] - ai[i];
717   aj  = ad->j + ai[i];
718   for (j=0; j<nzi; j++) {
719     row  = aj[j];
720     offset = row%dof;
721     row   /= dof;
722     nzpi = pd->i[row+1] - pd->i[row];
723     pj  = pd->j + pd->i[row];
724     for (k=0; k<nzpi; k++) {
725       ierr = PetscHSetIAdd(dht,pj[k]*dof+offset+pcstart);CHKERRQ(ierr);
726     }
727   }
728   /* off diag P */
729   for (j=0; j<nzi; j++) {
730     row  = aj[j];
731     offset = row%dof;
732     row   /= dof;
733     nzpi = po->i[row+1] - po->i[row];
734     pj  = po->j + po->i[row];
735     for (k=0; k<nzpi; k++) {
736       ierr = PetscHSetIAdd(oht,p->garray[pj[k]]*dof+offset);CHKERRQ(ierr);
737     }
738   }
739 
740   /* off diagonal part: Ao[i, :]*P_oth */
741   if (ao) {
742     ai = ao->i;
743     pi = p_oth->i;
744     nzi = ai[i+1] - ai[i];
745     aj  = ao->j + ai[i];
746     for (j=0; j<nzi; j++) {
747       row  = aj[j];
748       offset = a->garray[row]%dof;
749       row  = map[row];
750       pnz  = pi[row+1] - pi[row];
751       p_othcols = p_oth->j + pi[row];
752       for (col=0; col<pnz; col++) {
753         column = p_othcols[col] * dof + offset;
754         if (column>=pcstart && column<pcend) {
755           ierr = PetscHSetIAdd(dht,column);CHKERRQ(ierr);
756         } else {
757           ierr = PetscHSetIAdd(oht,column);CHKERRQ(ierr);
758         }
759       }
760     }
761   } /* end if (ao) */
762   PetscFunctionReturn(0);
763 }
764 
765 PETSC_STATIC_INLINE PetscErrorCode MatPtAPNumericComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,const PetscInt *map,PetscInt dof,PetscInt i,PetscHMapIV hmap)
766 {
767   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
768   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
769   PetscInt             *ai,nzi,j,*aj,row,col,*pi,pnz,*p_othcols,pcstart,*pj,k,nzpi,offset;
770   PetscScalar          ra,*aa,*pa;
771   PetscErrorCode       ierr;
772 
773   PetscFunctionBegin;
774   pcstart = P->cmap->rstart;
775   pcstart *= dof;
776 
777   /* diagonal portion: Ad[i,:]*P */
778   ai  = ad->i;
779   nzi = ai[i+1] - ai[i];
780   aj  = ad->j + ai[i];
781   aa  = ad->a + ai[i];
782   for (j=0; j<nzi; j++) {
783     ra   = aa[j];
784     row  = aj[j];
785     offset = row%dof;
786     row    /= dof;
787     nzpi = pd->i[row+1] - pd->i[row];
788     pj = pd->j + pd->i[row];
789     pa = pd->a + pd->i[row];
790     for (k=0; k<nzpi; k++) {
791       ierr = PetscHMapIVAddValue(hmap,pj[k]*dof+offset+pcstart,ra*pa[k]);CHKERRQ(ierr);
792     }
793     ierr = PetscLogFlops(2.0*nzpi);CHKERRQ(ierr);
794   }
795   for (j=0; j<nzi; j++) {
796     ra   = aa[j];
797     row  = aj[j];
798     offset = row%dof;
799     row   /= dof;
800     nzpi = po->i[row+1] - po->i[row];
801     pj = po->j + po->i[row];
802     pa = po->a + po->i[row];
803     for (k=0; k<nzpi; k++) {
804       ierr = PetscHMapIVAddValue(hmap,p->garray[pj[k]]*dof+offset,ra*pa[k]);CHKERRQ(ierr);
805     }
806     ierr = PetscLogFlops(2.0*nzpi);CHKERRQ(ierr);
807   }
808 
809   /* off diagonal part: Ao[i, :]*P_oth */
810   if (ao) {
811     ai = ao->i;
812     pi = p_oth->i;
813     nzi = ai[i+1] - ai[i];
814     aj  = ao->j + ai[i];
815     aa  = ao->a + ai[i];
816     for (j=0; j<nzi; j++) {
817       row  = aj[j];
818       offset = a->garray[row]%dof;
819       row    = map[row];
820       ra   = aa[j];
821       pnz  = pi[row+1] - pi[row];
822       p_othcols = p_oth->j + pi[row];
823       pa   = p_oth->a + pi[row];
824       for (col=0; col<pnz; col++) {
825         ierr = PetscHMapIVAddValue(hmap,p_othcols[col]*dof+offset,ra*pa[col]);CHKERRQ(ierr);
826       }
827       ierr = PetscLogFlops(2.0*pnz);CHKERRQ(ierr);
828     }
829   } /* end if (ao) */
830 
831   PetscFunctionReturn(0);
832 }
833 
834 PetscErrorCode MatGetBrowsOfAcols_MPIXAIJ(Mat,Mat,PetscInt dof,MatReuse,Mat*);
835 
836 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce(Mat A,Mat P,PetscInt dof,Mat C)
837 {
838   PetscErrorCode    ierr;
839   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
840   Mat_SeqAIJ        *cd,*co,*po=(Mat_SeqAIJ*)p->B->data,*pd=(Mat_SeqAIJ*)p->A->data;
841   Mat_APMPI         *ptap = c->ap;
842   PetscHMapIV       hmap;
843   PetscInt          i,j,jj,kk,nzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,ccstart,ccend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,*dcc,*occ,loc;
844   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
845   PetscInt          offset,ii,pocol;
846   const PetscInt    *mappingindices;
847   IS                map;
848   MPI_Comm          comm;
849 
850   PetscFunctionBegin;
851   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
852   if (!ptap->P_oth) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
853 
854   ierr = MatZeroEntries(C);CHKERRQ(ierr);
855 
856   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
857   /*-----------------------------------------------------*/
858   if (ptap->reuse == MAT_REUSE_MATRIX) {
859     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
860     ierr =  MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_REUSE_MATRIX,&ptap->P_oth);CHKERRQ(ierr);
861   }
862   ierr = PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);CHKERRQ(ierr);
863 
864   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
865   pon *= dof;
866   ierr = PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);CHKERRQ(ierr);
867   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
868   cmaxr = 0;
869   for (i=0; i<pon; i++) {
870     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
871   }
872   ierr = PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);CHKERRQ(ierr);
873   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
874   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
875   ierr = ISGetIndices(map,&mappingindices);CHKERRQ(ierr);
876   for (i=0; i<am && pon; i++) {
877     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
878     offset = i%dof;
879     ii     = i/dof;
880     nzi = po->i[ii+1] - po->i[ii];
881     if (!nzi) continue;
882     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);CHKERRQ(ierr);
883     voff = 0;
884     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
885     if (!voff) continue;
886 
887     /* Form C(ii, :) */
888     poj = po->j + po->i[ii];
889     poa = po->a + po->i[ii];
890     for (j=0; j<nzi; j++) {
891       pocol = poj[j]*dof+offset;
892       c_rmtjj = c_rmtj + ptap->c_rmti[pocol];
893       c_rmtaa = c_rmta + ptap->c_rmti[pocol];
894       for (jj=0; jj<voff; jj++) {
895         apvaluestmp[jj] = apvalues[jj]*poa[j];
896         /*If the row is empty */
897         if (!c_rmtc[pocol]) {
898           c_rmtjj[jj] = apindices[jj];
899           c_rmtaa[jj] = apvaluestmp[jj];
900           c_rmtc[pocol]++;
901         } else {
902           ierr = PetscFindInt(apindices[jj],c_rmtc[pocol],c_rmtjj,&loc);CHKERRQ(ierr);
903           if (loc>=0){ /* hit */
904             c_rmtaa[loc] += apvaluestmp[jj];
905             ierr = PetscLogFlops(1.0);CHKERRQ(ierr);
906           } else { /* new element */
907             loc = -(loc+1);
908             /* Move data backward */
909             for (kk=c_rmtc[pocol]; kk>loc; kk--) {
910               c_rmtjj[kk] = c_rmtjj[kk-1];
911               c_rmtaa[kk] = c_rmtaa[kk-1];
912             }/* End kk */
913             c_rmtjj[loc] = apindices[jj];
914             c_rmtaa[loc] = apvaluestmp[jj];
915             c_rmtc[pocol]++;
916           }
917         }
918         ierr = PetscLogFlops(voff);CHKERRQ(ierr);
919       } /* End jj */
920     } /* End j */
921   } /* End i */
922 
923   ierr = PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);CHKERRQ(ierr);
924 
925   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
926   pn *= dof;
927   ierr = PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);CHKERRQ(ierr);
928 
929   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
930   ierr = PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
931   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
932   pcstart = pcstart*dof;
933   pcend   = pcend*dof;
934   cd = (Mat_SeqAIJ*)(c->A)->data;
935   co = (Mat_SeqAIJ*)(c->B)->data;
936 
937   cmaxr = 0;
938   for (i=0; i<pn; i++) {
939     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
940   }
941   ierr = PetscCalloc5(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pn,&dcc,pn,&occ);CHKERRQ(ierr);
942   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
943   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
944   for (i=0; i<am && pn; i++) {
945     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
946     offset = i%dof;
947     ii     = i/dof;
948     nzi = pd->i[ii+1] - pd->i[ii];
949     if (!nzi) continue;
950     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);CHKERRQ(ierr);
951     voff = 0;
952     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
953     if (!voff) continue;
954     /* Form C(ii, :) */
955     pdj = pd->j + pd->i[ii];
956     pda = pd->a + pd->i[ii];
957     for (j=0; j<nzi; j++) {
958       row = pcstart + pdj[j] * dof + offset;
959       for (jj=0; jj<voff; jj++) {
960         apvaluestmp[jj] = apvalues[jj]*pda[j];
961       }
962       ierr = PetscLogFlops(voff);CHKERRQ(ierr);
963       ierr = MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);CHKERRQ(ierr);
964     }
965   }
966   ierr = ISRestoreIndices(map,&mappingindices);CHKERRQ(ierr);
967   ierr = MatGetOwnershipRangeColumn(C,&ccstart,&ccend);CHKERRQ(ierr);
968   ierr = PetscFree5(apindices,apvalues,apvaluestmp,dcc,occ);CHKERRQ(ierr);
969   ierr = PetscHMapIVDestroy(&hmap);CHKERRQ(ierr);
970   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
971   ierr = PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
972   ierr = PetscFree2(c_rmtj,c_rmta);CHKERRQ(ierr);
973 
974   /* Add contributions from remote */
975   for (i = 0; i < pn; i++) {
976     row = i + pcstart;
977     ierr = MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);CHKERRQ(ierr);
978   }
979   ierr = PetscFree2(c_othj,c_otha);CHKERRQ(ierr);
980 
981   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
982   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
983 
984   ptap->reuse = MAT_REUSE_MATRIX;
985 
986   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
987   if (ptap->freestruct) {
988     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
989   }
990   PetscFunctionReturn(0);
991 }
992 
993 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,Mat C)
994 {
995   PetscErrorCode      ierr;
996 
997   PetscFunctionBegin;
998 
999   ierr = MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce(A,P,1,C);CHKERRQ(ierr);
1000   PetscFunctionReturn(0);
1001 }
1002 
1003 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce_merged(Mat A,Mat P,PetscInt dof,Mat C)
1004 {
1005   PetscErrorCode    ierr;
1006   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
1007   Mat_SeqAIJ        *cd,*co,*po=(Mat_SeqAIJ*)p->B->data,*pd=(Mat_SeqAIJ*)p->A->data;
1008   Mat_APMPI         *ptap = c->ap;
1009   PetscHMapIV       hmap;
1010   PetscInt          i,j,jj,kk,nzi,dnzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,loc;
1011   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
1012   PetscInt          offset,ii,pocol;
1013   const PetscInt    *mappingindices;
1014   IS                map;
1015   MPI_Comm          comm;
1016 
1017   PetscFunctionBegin;
1018   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1019   if (!ptap->P_oth) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
1020 
1021   ierr = MatZeroEntries(C);CHKERRQ(ierr);
1022 
1023   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
1024   /*-----------------------------------------------------*/
1025   if (ptap->reuse == MAT_REUSE_MATRIX) {
1026     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
1027     ierr =  MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_REUSE_MATRIX,&ptap->P_oth);CHKERRQ(ierr);
1028   }
1029   ierr = PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);CHKERRQ(ierr);
1030   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1031   pon *= dof;
1032   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1033   pn  *= dof;
1034 
1035   ierr = PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);CHKERRQ(ierr);
1036   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1037   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1038   pcstart *= dof;
1039   pcend   *= dof;
1040   cmaxr = 0;
1041   for (i=0; i<pon; i++) {
1042     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
1043   }
1044   cd = (Mat_SeqAIJ*)(c->A)->data;
1045   co = (Mat_SeqAIJ*)(c->B)->data;
1046   for (i=0; i<pn; i++) {
1047     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
1048   }
1049   ierr = PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);CHKERRQ(ierr);
1050   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
1051   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
1052   ierr = ISGetIndices(map,&mappingindices);CHKERRQ(ierr);
1053   for (i=0; i<am && (pon || pn); i++) {
1054     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
1055     offset = i%dof;
1056     ii     = i/dof;
1057     nzi  = po->i[ii+1] - po->i[ii];
1058     dnzi = pd->i[ii+1] - pd->i[ii];
1059     if (!nzi && !dnzi) continue;
1060     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);CHKERRQ(ierr);
1061     voff = 0;
1062     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
1063     if (!voff) continue;
1064 
1065     /* Form remote C(ii, :) */
1066     poj = po->j + po->i[ii];
1067     poa = po->a + po->i[ii];
1068     for (j=0; j<nzi; j++) {
1069       pocol = poj[j]*dof+offset;
1070       c_rmtjj = c_rmtj + ptap->c_rmti[pocol];
1071       c_rmtaa = c_rmta + ptap->c_rmti[pocol];
1072       for (jj=0; jj<voff; jj++) {
1073         apvaluestmp[jj] = apvalues[jj]*poa[j];
1074         /*If the row is empty */
1075         if (!c_rmtc[pocol]) {
1076           c_rmtjj[jj] = apindices[jj];
1077           c_rmtaa[jj] = apvaluestmp[jj];
1078           c_rmtc[pocol]++;
1079         } else {
1080           ierr = PetscFindInt(apindices[jj],c_rmtc[pocol],c_rmtjj,&loc);CHKERRQ(ierr);
1081           if (loc>=0){ /* hit */
1082             c_rmtaa[loc] += apvaluestmp[jj];
1083             ierr = PetscLogFlops(1.0);CHKERRQ(ierr);
1084           } else { /* new element */
1085             loc = -(loc+1);
1086             /* Move data backward */
1087             for (kk=c_rmtc[pocol]; kk>loc; kk--) {
1088               c_rmtjj[kk] = c_rmtjj[kk-1];
1089               c_rmtaa[kk] = c_rmtaa[kk-1];
1090             }/* End kk */
1091             c_rmtjj[loc] = apindices[jj];
1092             c_rmtaa[loc] = apvaluestmp[jj];
1093             c_rmtc[pocol]++;
1094           }
1095         }
1096       } /* End jj */
1097       ierr = PetscLogFlops(voff);CHKERRQ(ierr);
1098     } /* End j */
1099 
1100     /* Form local C(ii, :) */
1101     pdj = pd->j + pd->i[ii];
1102     pda = pd->a + pd->i[ii];
1103     for (j=0; j<dnzi; j++) {
1104       row = pcstart + pdj[j] * dof + offset;
1105       for (jj=0; jj<voff; jj++) {
1106         apvaluestmp[jj] = apvalues[jj]*pda[j];
1107       }/* End kk */
1108       ierr = PetscLogFlops(voff);CHKERRQ(ierr);
1109       ierr = MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);CHKERRQ(ierr);
1110     }/* End j */
1111   } /* End i */
1112 
1113   ierr = ISRestoreIndices(map,&mappingindices);CHKERRQ(ierr);
1114   ierr = PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);CHKERRQ(ierr);
1115   ierr = PetscHMapIVDestroy(&hmap);CHKERRQ(ierr);
1116   ierr = PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);CHKERRQ(ierr);
1117 
1118   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1119   ierr = PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
1120   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1121   ierr = PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
1122   ierr = PetscFree2(c_rmtj,c_rmta);CHKERRQ(ierr);
1123 
1124   /* Add contributions from remote */
1125   for (i = 0; i < pn; i++) {
1126     row = i + pcstart;
1127     ierr = MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);CHKERRQ(ierr);
1128   }
1129   ierr = PetscFree2(c_othj,c_otha);CHKERRQ(ierr);
1130 
1131   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1132   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1133 
1134   ptap->reuse = MAT_REUSE_MATRIX;
1135 
1136   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
1137   if (ptap->freestruct) {
1138     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
1139   }
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,Mat C)
1144 {
1145   PetscErrorCode      ierr;
1146 
1147   PetscFunctionBegin;
1148 
1149   ierr = MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce_merged(A,P,1,C);CHKERRQ(ierr);
1150   PetscFunctionReturn(0);
1151 }
1152 
1153 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce(Mat A,Mat P,PetscInt dof,PetscReal fill,Mat *C)
1154 {
1155   Mat_APMPI           *ptap;
1156   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data,*c;
1157   MPI_Comm            comm;
1158   Mat                 Cmpi;
1159   Mat_SeqAIJ          *pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
1160   MatType             mtype;
1161   PetscSF             sf;
1162   PetscSFNode         *iremote;
1163   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1164   const PetscInt      *rootdegrees;
1165   PetscHSetI          ht,oht,*hta,*hto;
1166   PetscInt            pn,pon,*c_rmtc,i,j,nzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1167   PetscInt            lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1168   PetscInt            nalg=2,alg=0,offset,ii;
1169   PetscMPIInt         owner;
1170   const PetscInt      *mappingindices;
1171   PetscBool           flg;
1172   const char          *algTypes[2] = {"overlapping","merged"};
1173   IS                  map;
1174   PetscErrorCode      ierr;
1175 
1176   PetscFunctionBegin;
1177   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1178 
1179   /* Create symbolic parallel matrix Cmpi */
1180   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1181   pn *= dof;
1182   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1183   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1184   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1185   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1186 
1187   ierr = PetscNew(&ptap);CHKERRQ(ierr);
1188   ptap->reuse = MAT_INITIAL_MATRIX;
1189   ptap->algType = 2;
1190 
1191   /* Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1192   ierr = MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_INITIAL_MATRIX,&ptap->P_oth);CHKERRQ(ierr);
1193   ierr = PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);CHKERRQ(ierr);
1194   /* This equals to the number of offdiag columns in P */
1195   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1196   pon *= dof;
1197   /* offsets */
1198   ierr = PetscMalloc1(pon+1,&ptap->c_rmti);CHKERRQ(ierr);
1199   /* The number of columns we will send to remote ranks */
1200   ierr = PetscMalloc1(pon,&c_rmtc);CHKERRQ(ierr);
1201   ierr = PetscMalloc1(pon,&hta);CHKERRQ(ierr);
1202   for (i=0; i<pon; i++) {
1203     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1204   }
1205   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1206   ierr = MatGetOwnershipRange(A,&arstart,&arend);CHKERRQ(ierr);
1207   /* Create hash table to merge all columns for C(i, :) */
1208   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1209 
1210   ierr = ISGetIndices(map,&mappingindices);CHKERRQ(ierr);
1211   ptap->c_rmti[0] = 0;
1212   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1213   for (i=0; i<am && pon; i++) {
1214     /* Form one row of AP */
1215     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1216     offset = i%dof;
1217     ii     = i/dof;
1218     /* If the off diag is empty, we should not do any calculation */
1219     nzi = po->i[ii+1] - po->i[ii];
1220     if (!nzi) continue;
1221 
1222     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,ht);CHKERRQ(ierr);
1223     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1224     /* If AP is empty, just continue */
1225     if (!htsize) continue;
1226     /* Form C(ii, :) */
1227     poj = po->j + po->i[ii];
1228     for (j=0; j<nzi; j++) {
1229       ierr = PetscHSetIUpdate(hta[poj[j]*dof+offset],ht);CHKERRQ(ierr);
1230     }
1231   }
1232 
1233   for (i=0; i<pon; i++) {
1234     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1235     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1236     c_rmtc[i] = htsize;
1237   }
1238 
1239   ierr = PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);CHKERRQ(ierr);
1240 
1241   for (i=0; i<pon; i++) {
1242     off = 0;
1243     ierr = PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);CHKERRQ(ierr);
1244     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1245   }
1246   ierr = PetscFree(hta);CHKERRQ(ierr);
1247 
1248   ierr = PetscMalloc1(pon,&iremote);CHKERRQ(ierr);
1249   for (i=0; i<pon; i++) {
1250     owner = 0; lidx = 0;
1251     offset = i%dof;
1252     ii     = i/dof;
1253     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,&lidx);CHKERRQ(ierr);
1254     iremote[i].index = lidx*dof + offset;
1255     iremote[i].rank  = owner;
1256   }
1257 
1258   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
1259   ierr = PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1260   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1261   ierr = PetscSFSetRankOrder(sf,PETSC_TRUE);CHKERRQ(ierr);
1262   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
1263   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1264   /* How many neighbors have contributions to my rows? */
1265   ierr = PetscSFComputeDegreeBegin(sf,&rootdegrees);CHKERRQ(ierr);
1266   ierr = PetscSFComputeDegreeEnd(sf,&rootdegrees);CHKERRQ(ierr);
1267   rootspacesize = 0;
1268   for (i = 0; i < pn; i++) {
1269     rootspacesize += rootdegrees[i];
1270   }
1271   ierr = PetscMalloc1(rootspacesize,&rootspace);CHKERRQ(ierr);
1272   ierr = PetscMalloc1(rootspacesize+1,&rootspaceoffsets);CHKERRQ(ierr);
1273   /* Get information from leaves
1274    * Number of columns other people contribute to my rows
1275    * */
1276   ierr = PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1277   ierr = PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1278   ierr = PetscFree(c_rmtc);CHKERRQ(ierr);
1279   ierr = PetscCalloc1(pn+1,&ptap->c_othi);CHKERRQ(ierr);
1280   /* The number of columns is received for each row */
1281   ptap->c_othi[0] = 0;
1282   rootspacesize = 0;
1283   rootspaceoffsets[0] = 0;
1284   for (i = 0; i < pn; i++) {
1285     rcvncols = 0;
1286     for (j = 0; j<rootdegrees[i]; j++) {
1287       rcvncols += rootspace[rootspacesize];
1288       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1289       rootspacesize++;
1290     }
1291     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1292   }
1293   ierr = PetscFree(rootspace);CHKERRQ(ierr);
1294 
1295   ierr = PetscMalloc1(pon,&c_rmtoffsets);CHKERRQ(ierr);
1296   ierr = PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1297   ierr = PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1298   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1299   ierr = PetscFree(rootspaceoffsets);CHKERRQ(ierr);
1300 
1301   ierr = PetscCalloc1(ptap->c_rmti[pon],&iremote);CHKERRQ(ierr);
1302   nleaves = 0;
1303   for (i = 0; i<pon; i++) {
1304     owner = 0;
1305     ii = i/dof;
1306     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,NULL);CHKERRQ(ierr);
1307     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1308     for (j=0; j<sendncols; j++) {
1309       iremote[nleaves].rank = owner;
1310       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1311     }
1312   }
1313   ierr = PetscFree(c_rmtoffsets);CHKERRQ(ierr);
1314   ierr = PetscCalloc1(ptap->c_othi[pn],&c_othj);CHKERRQ(ierr);
1315 
1316   ierr = PetscSFCreate(comm,&ptap->sf);CHKERRQ(ierr);
1317   ierr = PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1318   ierr = PetscSFSetFromOptions(ptap->sf);CHKERRQ(ierr);
1319   /* One to one map */
1320   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1321 
1322   ierr = PetscMalloc2(pn,&dnz,pn,&onz);CHKERRQ(ierr);
1323   ierr = PetscHSetICreate(&oht);CHKERRQ(ierr);
1324   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1325   pcstart *= dof;
1326   pcend   *= dof;
1327   ierr = PetscMalloc2(pn,&hta,pn,&hto);CHKERRQ(ierr);
1328   for (i=0; i<pn; i++) {
1329     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1330     ierr = PetscHSetICreate(&hto[i]);CHKERRQ(ierr);
1331   }
1332   /* Work on local part */
1333   /* 4) Pass 1: Estimate memory for C_loc */
1334   for (i=0; i<am && pn; i++) {
1335     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1336     ierr = PetscHSetIClear(oht);CHKERRQ(ierr);
1337     offset = i%dof;
1338     ii     = i/dof;
1339     nzi = pd->i[ii+1] - pd->i[ii];
1340     if (!nzi) continue;
1341 
1342     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,oht);CHKERRQ(ierr);
1343     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1344     ierr = PetscHSetIGetSize(oht,&htosize);CHKERRQ(ierr);
1345     if (!(htsize+htosize)) continue;
1346     /* Form C(ii, :) */
1347     pdj = pd->j + pd->i[ii];
1348     for (j=0; j<nzi; j++) {
1349       ierr = PetscHSetIUpdate(hta[pdj[j]*dof+offset],ht);CHKERRQ(ierr);
1350       ierr = PetscHSetIUpdate(hto[pdj[j]*dof+offset],oht);CHKERRQ(ierr);
1351     }
1352   }
1353 
1354   ierr = ISRestoreIndices(map,&mappingindices);CHKERRQ(ierr);
1355 
1356   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1357   ierr = PetscHSetIDestroy(&oht);CHKERRQ(ierr);
1358 
1359   /* Get remote data */
1360   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1361   ierr = PetscFree(c_rmtj);CHKERRQ(ierr);
1362 
1363   for (i = 0; i < pn; i++) {
1364     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1365     rdj = c_othj + ptap->c_othi[i];
1366     for (j = 0; j < nzi; j++) {
1367       col =  rdj[j];
1368       /* diag part */
1369       if (col>=pcstart && col<pcend) {
1370         ierr = PetscHSetIAdd(hta[i],col);CHKERRQ(ierr);
1371       } else { /* off diag */
1372         ierr = PetscHSetIAdd(hto[i],col);CHKERRQ(ierr);
1373       }
1374     }
1375     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1376     dnz[i] = htsize;
1377     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1378     ierr = PetscHSetIGetSize(hto[i],&htsize);CHKERRQ(ierr);
1379     onz[i] = htsize;
1380     ierr = PetscHSetIDestroy(&hto[i]);CHKERRQ(ierr);
1381   }
1382 
1383   ierr = PetscFree2(hta,hto);CHKERRQ(ierr);
1384   ierr = PetscFree(c_othj);CHKERRQ(ierr);
1385 
1386   /* local sizes and preallocation */
1387   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1388   ierr = MatSetBlockSizes(Cmpi,dof>1? dof: P->cmap->bs,dof>1? dof: P->cmap->bs);CHKERRQ(ierr);
1389   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1390   ierr = MatSetUp(Cmpi);CHKERRQ(ierr);
1391   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
1392 
1393   /* attach the supporting struct to Cmpi for reuse */
1394   c = (Mat_MPIAIJ*)Cmpi->data;
1395   c->ap           = ptap;
1396   ptap->duplicate = Cmpi->ops->duplicate;
1397   ptap->destroy   = Cmpi->ops->destroy;
1398   ptap->view      = Cmpi->ops->view;
1399 
1400   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1401   Cmpi->assembled = PETSC_FALSE;
1402   /* pick an algorithm */
1403   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
1404   alg = 0;
1405   ierr = PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
1406   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1407   switch (alg) {
1408     case 0:
1409       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1410       break;
1411     case 1:
1412       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1413       break;
1414     default:
1415       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1416   }
1417   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
1418   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
1419   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1420   *C                     = Cmpi;
1421   PetscFunctionReturn(0);
1422 }
1423 
1424 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,PetscReal fill,Mat *C)
1425 {
1426   PetscErrorCode      ierr;
1427 
1428   PetscFunctionBegin;
1429 
1430   ierr = MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce(A,P,1,fill,C);CHKERRQ(ierr);
1431   PetscFunctionReturn(0);
1432 }
1433 
1434 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce_merged(Mat A,Mat P,PetscInt dof,PetscReal fill,Mat *C)
1435 {
1436   Mat_APMPI           *ptap;
1437   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data,*c;
1438   MPI_Comm            comm;
1439   Mat                 Cmpi;
1440   Mat_SeqAIJ          *pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
1441   MatType             mtype;
1442   PetscSF             sf;
1443   PetscSFNode         *iremote;
1444   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1445   const PetscInt      *rootdegrees;
1446   PetscHSetI          ht,oht,*hta,*hto,*htd;
1447   PetscInt            pn,pon,*c_rmtc,i,j,nzi,dnzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1448   PetscInt            lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1449   PetscInt            nalg=2,alg=0,offset,ii;
1450   PetscMPIInt         owner;
1451   PetscBool           flg;
1452   const char          *algTypes[2] = {"merged","overlapping"};
1453   const PetscInt      *mappingindices;
1454   IS                  map;
1455   PetscErrorCode      ierr;
1456 
1457   PetscFunctionBegin;
1458   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1459 
1460   /* Create symbolic parallel matrix Cmpi */
1461   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1462   pn *= dof;
1463   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1464   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1465   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1466   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1467 
1468   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1469   ptap->reuse = MAT_INITIAL_MATRIX;
1470   ptap->algType = 3;
1471 
1472   /* 0) Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1473   ierr = MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_INITIAL_MATRIX,&ptap->P_oth);CHKERRQ(ierr);
1474   ierr = PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);CHKERRQ(ierr);
1475 
1476   /* This equals to the number of offdiag columns in P */
1477   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1478   pon *= dof;
1479   /* offsets */
1480   ierr = PetscMalloc1(pon+1,&ptap->c_rmti);CHKERRQ(ierr);
1481   /* The number of columns we will send to remote ranks */
1482   ierr = PetscMalloc1(pon,&c_rmtc);CHKERRQ(ierr);
1483   ierr = PetscMalloc1(pon,&hta);CHKERRQ(ierr);
1484   for (i=0; i<pon; i++) {
1485     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1486   }
1487   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1488   ierr = MatGetOwnershipRange(A,&arstart,&arend);CHKERRQ(ierr);
1489   /* Create hash table to merge all columns for C(i, :) */
1490   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1491   ierr = PetscHSetICreate(&oht);CHKERRQ(ierr);
1492   ierr = PetscMalloc2(pn,&htd,pn,&hto);CHKERRQ(ierr);
1493   for (i=0; i<pn; i++) {
1494     ierr = PetscHSetICreate(&htd[i]);CHKERRQ(ierr);
1495     ierr = PetscHSetICreate(&hto[i]);CHKERRQ(ierr);
1496   }
1497 
1498   ierr = ISGetIndices(map,&mappingindices);CHKERRQ(ierr);
1499   ptap->c_rmti[0] = 0;
1500   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1501   for (i=0; i<am && (pon || pn); i++) {
1502     /* Form one row of AP */
1503     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1504     ierr = PetscHSetIClear(oht);CHKERRQ(ierr);
1505     offset = i%dof;
1506     ii     = i/dof;
1507     /* If the off diag is empty, we should not do any calculation */
1508     nzi = po->i[ii+1] - po->i[ii];
1509     dnzi = pd->i[ii+1] - pd->i[ii];
1510     if (!nzi && !dnzi) continue;
1511 
1512     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,oht);CHKERRQ(ierr);
1513     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1514     ierr = PetscHSetIGetSize(oht,&htosize);CHKERRQ(ierr);
1515     /* If AP is empty, just continue */
1516     if (!(htsize+htosize)) continue;
1517 
1518     /* Form remote C(ii, :) */
1519     poj = po->j + po->i[ii];
1520     for (j=0; j<nzi; j++) {
1521       ierr = PetscHSetIUpdate(hta[poj[j]*dof+offset],ht);CHKERRQ(ierr);
1522       ierr = PetscHSetIUpdate(hta[poj[j]*dof+offset],oht);CHKERRQ(ierr);
1523     }
1524 
1525     /* Form local C(ii, :) */
1526     pdj = pd->j + pd->i[ii];
1527     for (j=0; j<dnzi; j++) {
1528       ierr = PetscHSetIUpdate(htd[pdj[j]*dof+offset],ht);CHKERRQ(ierr);
1529       ierr = PetscHSetIUpdate(hto[pdj[j]*dof+offset],oht);CHKERRQ(ierr);
1530     }
1531   }
1532 
1533   ierr = ISRestoreIndices(map,&mappingindices);CHKERRQ(ierr);
1534 
1535   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1536   ierr = PetscHSetIDestroy(&oht);CHKERRQ(ierr);
1537 
1538   for (i=0; i<pon; i++) {
1539     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1540     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1541     c_rmtc[i] = htsize;
1542   }
1543 
1544   ierr = PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);CHKERRQ(ierr);
1545 
1546   for (i=0; i<pon; i++) {
1547     off = 0;
1548     ierr = PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);CHKERRQ(ierr);
1549     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1550   }
1551   ierr = PetscFree(hta);CHKERRQ(ierr);
1552 
1553   ierr = PetscMalloc1(pon,&iremote);CHKERRQ(ierr);
1554   for (i=0; i<pon; i++) {
1555     owner = 0; lidx = 0;
1556     offset = i%dof;
1557     ii     = i/dof;
1558     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,&lidx);CHKERRQ(ierr);
1559     iremote[i].index = lidx*dof+offset;
1560     iremote[i].rank  = owner;
1561   }
1562 
1563   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
1564   ierr = PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1565   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1566   ierr = PetscSFSetRankOrder(sf,PETSC_TRUE);CHKERRQ(ierr);
1567   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
1568   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1569   /* How many neighbors have contributions to my rows? */
1570   ierr = PetscSFComputeDegreeBegin(sf,&rootdegrees);CHKERRQ(ierr);
1571   ierr = PetscSFComputeDegreeEnd(sf,&rootdegrees);CHKERRQ(ierr);
1572   rootspacesize = 0;
1573   for (i = 0; i < pn; i++) {
1574     rootspacesize += rootdegrees[i];
1575   }
1576   ierr = PetscMalloc1(rootspacesize,&rootspace);CHKERRQ(ierr);
1577   ierr = PetscMalloc1(rootspacesize+1,&rootspaceoffsets);CHKERRQ(ierr);
1578   /* Get information from leaves
1579    * Number of columns other people contribute to my rows
1580    * */
1581   ierr = PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1582   ierr = PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1583   ierr = PetscFree(c_rmtc);CHKERRQ(ierr);
1584   ierr = PetscMalloc1(pn+1,&ptap->c_othi);CHKERRQ(ierr);
1585   /* The number of columns is received for each row */
1586   ptap->c_othi[0]     = 0;
1587   rootspacesize       = 0;
1588   rootspaceoffsets[0] = 0;
1589   for (i = 0; i < pn; i++) {
1590     rcvncols = 0;
1591     for (j = 0; j<rootdegrees[i]; j++) {
1592       rcvncols += rootspace[rootspacesize];
1593       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1594       rootspacesize++;
1595     }
1596     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1597   }
1598   ierr = PetscFree(rootspace);CHKERRQ(ierr);
1599 
1600   ierr = PetscMalloc1(pon,&c_rmtoffsets);CHKERRQ(ierr);
1601   ierr = PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1602   ierr = PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1603   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1604   ierr = PetscFree(rootspaceoffsets);CHKERRQ(ierr);
1605 
1606   ierr = PetscCalloc1(ptap->c_rmti[pon],&iremote);CHKERRQ(ierr);
1607   nleaves = 0;
1608   for (i = 0; i<pon; i++) {
1609     owner = 0;
1610     ii    = i/dof;
1611     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,NULL);CHKERRQ(ierr);
1612     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1613     for (j=0; j<sendncols; j++) {
1614       iremote[nleaves].rank    = owner;
1615       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1616     }
1617   }
1618   ierr = PetscFree(c_rmtoffsets);CHKERRQ(ierr);
1619   ierr = PetscCalloc1(ptap->c_othi[pn],&c_othj);CHKERRQ(ierr);
1620 
1621   ierr = PetscSFCreate(comm,&ptap->sf);CHKERRQ(ierr);
1622   ierr = PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1623   ierr = PetscSFSetFromOptions(ptap->sf);CHKERRQ(ierr);
1624   /* One to one map */
1625   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1626   /* Get remote data */
1627   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1628   ierr = PetscFree(c_rmtj);CHKERRQ(ierr);
1629   ierr = PetscMalloc2(pn,&dnz,pn,&onz);CHKERRQ(ierr);
1630   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1631   pcstart *= dof;
1632   pcend   *= dof;
1633   for (i = 0; i < pn; i++) {
1634     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1635     rdj = c_othj + ptap->c_othi[i];
1636     for (j = 0; j < nzi; j++) {
1637       col =  rdj[j];
1638       /* diag part */
1639       if (col>=pcstart && col<pcend) {
1640         ierr = PetscHSetIAdd(htd[i],col);CHKERRQ(ierr);
1641       } else { /* off diag */
1642         ierr = PetscHSetIAdd(hto[i],col);CHKERRQ(ierr);
1643       }
1644     }
1645     ierr = PetscHSetIGetSize(htd[i],&htsize);CHKERRQ(ierr);
1646     dnz[i] = htsize;
1647     ierr = PetscHSetIDestroy(&htd[i]);CHKERRQ(ierr);
1648     ierr = PetscHSetIGetSize(hto[i],&htsize);CHKERRQ(ierr);
1649     onz[i] = htsize;
1650     ierr = PetscHSetIDestroy(&hto[i]);CHKERRQ(ierr);
1651   }
1652 
1653   ierr = PetscFree2(htd,hto);CHKERRQ(ierr);
1654   ierr = PetscFree(c_othj);CHKERRQ(ierr);
1655 
1656   /* local sizes and preallocation */
1657   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1658   ierr = MatSetBlockSizes(Cmpi, dof>1? dof: P->cmap->bs,dof>1? dof: P->cmap->bs);CHKERRQ(ierr);
1659   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1660   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
1661 
1662   /* attach the supporting struct to Cmpi for reuse */
1663   c = (Mat_MPIAIJ*)Cmpi->data;
1664   c->ap           = ptap;
1665   ptap->duplicate = Cmpi->ops->duplicate;
1666   ptap->destroy   = Cmpi->ops->destroy;
1667   ptap->view      = Cmpi->ops->view;
1668 
1669   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1670   Cmpi->assembled        = PETSC_FALSE;
1671   /* pick an algorithm */
1672   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
1673   alg = 0;
1674   ierr = PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
1675   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1676   switch (alg) {
1677     case 0:
1678       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1679       break;
1680     case 1:
1681       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1682       break;
1683     default:
1684       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1685   }
1686   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
1687   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
1688   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1689   *C                     = Cmpi;
1690   PetscFunctionReturn(0);
1691 }
1692 
1693 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,PetscReal fill,Mat *C)
1694 {
1695   PetscErrorCode      ierr;
1696 
1697   PetscFunctionBegin;
1698 
1699   ierr = MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce_merged(A,P,1,fill,C);CHKERRQ(ierr);
1700   PetscFunctionReturn(0);
1701 }
1702 
1703 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat P,PetscReal fill,Mat *C)
1704 {
1705   PetscErrorCode      ierr;
1706   Mat_APMPI           *ptap;
1707   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c;
1708   MPI_Comm            comm;
1709   PetscMPIInt         size,rank;
1710   Mat                 Cmpi;
1711   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
1712   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
1713   PetscInt            *lnk,i,k,pnz,row,nsend;
1714   PetscBT             lnkbt;
1715   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
1716   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
1717   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
1718   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
1719   MPI_Request         *swaits,*rwaits;
1720   MPI_Status          *sstatus,rstatus;
1721   PetscLayout         rowmap;
1722   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
1723   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
1724   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,ap_rmax=0,Crmax,*aj,*ai,*pi;
1725   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
1726   PetscScalar         *apv;
1727   PetscTable          ta;
1728   MatType             mtype;
1729   const char          *prefix;
1730 #if defined(PETSC_USE_INFO)
1731   PetscReal           apfill;
1732 #endif
1733 
1734   PetscFunctionBegin;
1735   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1736   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1737   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1738 
1739   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
1740 
1741   /* create symbolic parallel matrix Cmpi */
1742   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1743   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1744   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1745 
1746   /* Do dense axpy in MatPtAPNumeric_MPIAIJ_MPIAIJ() */
1747   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ;
1748 
1749   /* create struct Mat_APMPI and attached it to C later */
1750   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1751   ptap->reuse = MAT_INITIAL_MATRIX;
1752   ptap->algType = 1;
1753 
1754   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1755   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1756   /* get P_loc by taking all local rows of P */
1757   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
1758 
1759   /* (0) compute Rd = Pd^T, Ro = Po^T  */
1760   /* --------------------------------- */
1761   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
1762   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
1763 
1764   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
1765   /* ----------------------------------------------------------------- */
1766   p_loc  = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1767   if (ptap->P_oth) p_oth  = (Mat_SeqAIJ*)(ptap->P_oth)->data;
1768 
1769   /* create and initialize a linked list */
1770   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
1771   MatRowMergeMax_SeqAIJ(p_loc,ptap->P_loc->rmap->N,ta);
1772   MatRowMergeMax_SeqAIJ(p_oth,ptap->P_oth->rmap->N,ta);
1773   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
1774   /* printf("[%d] est %d, Crmax %d; pN %d\n",rank,5*(p_loc->rmax+p_oth->rmax + (PetscInt)(1.e-2*pN)),Crmax,pN); */
1775 
1776   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
1777 
1778   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
1779   if (ao) {
1780     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
1781   } else {
1782     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
1783   }
1784   current_space = free_space;
1785   nspacedouble  = 0;
1786 
1787   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
1788   api[0] = 0;
1789   for (i=0; i<am; i++) {
1790     /* diagonal portion: Ad[i,:]*P */
1791     ai = ad->i; pi = p_loc->i;
1792     nzi = ai[i+1] - ai[i];
1793     aj  = ad->j + ai[i];
1794     for (j=0; j<nzi; j++) {
1795       row  = aj[j];
1796       pnz  = pi[row+1] - pi[row];
1797       Jptr = p_loc->j + pi[row];
1798       /* add non-zero cols of P into the sorted linked list lnk */
1799       ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1800     }
1801     /* off-diagonal portion: Ao[i,:]*P */
1802     if (ao) {
1803       ai = ao->i; pi = p_oth->i;
1804       nzi = ai[i+1] - ai[i];
1805       aj  = ao->j + ai[i];
1806       for (j=0; j<nzi; j++) {
1807         row  = aj[j];
1808         pnz  = pi[row+1] - pi[row];
1809         Jptr = p_oth->j + pi[row];
1810         ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1811       }
1812     }
1813     apnz     = lnk[0];
1814     api[i+1] = api[i] + apnz;
1815     if (ap_rmax < apnz) ap_rmax = apnz;
1816 
1817     /* if free space is not available, double the total space in the list */
1818     if (current_space->local_remaining<apnz) {
1819       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
1820       nspacedouble++;
1821     }
1822 
1823     /* Copy data into free space, then initialize lnk */
1824     ierr = PetscLLCondensedClean(pN,apnz,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
1825 
1826     current_space->array           += apnz;
1827     current_space->local_used      += apnz;
1828     current_space->local_remaining -= apnz;
1829   }
1830   /* Allocate space for apj and apv, initialize apj, and */
1831   /* destroy list of free space and other temporary array(s) */
1832   ierr   = PetscMalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
1833   ierr   = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
1834   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
1835 
1836   /* Create AP_loc for reuse */
1837   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
1838 
1839 #if defined(PETSC_USE_INFO)
1840   if (ao) {
1841     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
1842   } else {
1843     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
1844   }
1845   ptap->AP_loc->info.mallocs           = nspacedouble;
1846   ptap->AP_loc->info.fill_ratio_given  = fill;
1847   ptap->AP_loc->info.fill_ratio_needed = apfill;
1848 
1849   if (api[am]) {
1850     ierr = PetscInfo3(ptap->AP_loc,"Nonscalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
1851     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
1852   } else {
1853     ierr = PetscInfo(ptap->AP_loc,"Nonscalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
1854   }
1855 #endif
1856 
1857   /* (2-1) compute symbolic Co = Ro*AP_loc  */
1858   /* ------------------------------------ */
1859   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1860   ierr = MatSetOptionsPrefix(ptap->Ro,prefix);CHKERRQ(ierr);
1861   ierr = MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");CHKERRQ(ierr);
1862   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr);
1863 
1864   /* (3) send coj of C_oth to other processors  */
1865   /* ------------------------------------------ */
1866   /* determine row ownership */
1867   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
1868   rowmap->n  = pn;
1869   rowmap->bs = 1;
1870   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
1871   owners = rowmap->range;
1872 
1873   /* determine the number of messages to send, their lengths */
1874   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
1875   ierr = PetscArrayzero(len_s,size);CHKERRQ(ierr);
1876   ierr = PetscArrayzero(len_si,size);CHKERRQ(ierr);
1877 
1878   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
1879   coi   = c_oth->i; coj = c_oth->j;
1880   con   = ptap->C_oth->rmap->n;
1881   proc  = 0;
1882   for (i=0; i<con; i++) {
1883     while (prmap[i] >= owners[proc+1]) proc++;
1884     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
1885     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
1886   }
1887 
1888   len          = 0; /* max length of buf_si[], see (4) */
1889   owners_co[0] = 0;
1890   nsend        = 0;
1891   for (proc=0; proc<size; proc++) {
1892     owners_co[proc+1] = owners_co[proc] + len_si[proc];
1893     if (len_s[proc]) {
1894       nsend++;
1895       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
1896       len         += len_si[proc];
1897     }
1898   }
1899 
1900   /* determine the number and length of messages to receive for coi and coj  */
1901   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
1902   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
1903 
1904   /* post the Irecv and Isend of coj */
1905   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
1906   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
1907   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
1908   for (proc=0, k=0; proc<size; proc++) {
1909     if (!len_s[proc]) continue;
1910     i    = owners_co[proc];
1911     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
1912     k++;
1913   }
1914 
1915   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
1916   /* ---------------------------------------- */
1917   ierr = MatSetOptionsPrefix(ptap->Rd,prefix);CHKERRQ(ierr);
1918   ierr = MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");CHKERRQ(ierr);
1919   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr);
1920   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
1921 
1922   /* receives coj are complete */
1923   for (i=0; i<nrecv; i++) {
1924     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
1925   }
1926   ierr = PetscFree(rwaits);CHKERRQ(ierr);
1927   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
1928 
1929   /* add received column indices into ta to update Crmax */
1930   for (k=0; k<nrecv; k++) {/* k-th received message */
1931     Jptr = buf_rj[k];
1932     for (j=0; j<len_r[k]; j++) {
1933       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
1934     }
1935   }
1936   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
1937   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
1938 
1939   /* (4) send and recv coi */
1940   /*-----------------------*/
1941   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
1942   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
1943   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
1944   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
1945   for (proc=0,k=0; proc<size; proc++) {
1946     if (!len_s[proc]) continue;
1947     /* form outgoing message for i-structure:
1948          buf_si[0]:                 nrows to be sent
1949                [1:nrows]:           row index (global)
1950                [nrows+1:2*nrows+1]: i-structure index
1951     */
1952     /*-------------------------------------------*/
1953     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
1954     buf_si_i    = buf_si + nrows+1;
1955     buf_si[0]   = nrows;
1956     buf_si_i[0] = 0;
1957     nrows       = 0;
1958     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
1959       nzi = coi[i+1] - coi[i];
1960       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
1961       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
1962       nrows++;
1963     }
1964     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
1965     k++;
1966     buf_si += len_si[proc];
1967   }
1968   for (i=0; i<nrecv; i++) {
1969     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
1970   }
1971   ierr = PetscFree(rwaits);CHKERRQ(ierr);
1972   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
1973 
1974   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
1975   ierr = PetscFree(len_ri);CHKERRQ(ierr);
1976   ierr = PetscFree(swaits);CHKERRQ(ierr);
1977   ierr = PetscFree(buf_s);CHKERRQ(ierr);
1978 
1979   /* (5) compute the local portion of Cmpi      */
1980   /* ------------------------------------------ */
1981   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
1982   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
1983   current_space = free_space;
1984 
1985   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
1986   for (k=0; k<nrecv; k++) {
1987     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
1988     nrows       = *buf_ri_k[k];
1989     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
1990     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
1991   }
1992 
1993   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
1994   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
1995   for (i=0; i<pn; i++) {
1996     /* add C_loc into Cmpi */
1997     nzi  = c_loc->i[i+1] - c_loc->i[i];
1998     Jptr = c_loc->j + c_loc->i[i];
1999     ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
2000 
2001     /* add received col data into lnk */
2002     for (k=0; k<nrecv; k++) { /* k-th received message */
2003       if (i == *nextrow[k]) { /* i-th row */
2004         nzi  = *(nextci[k]+1) - *nextci[k];
2005         Jptr = buf_rj[k] + *nextci[k];
2006         ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
2007         nextrow[k]++; nextci[k]++;
2008       }
2009     }
2010     nzi = lnk[0];
2011 
2012     /* copy data into free space, then initialize lnk */
2013     ierr = PetscLLCondensedClean(pN,nzi,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
2014     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
2015   }
2016   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
2017   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
2018   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
2019 
2020   /* local sizes and preallocation */
2021   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2022   if (P->cmap->bs > 0) {
2023     ierr = PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);CHKERRQ(ierr);
2024     ierr = PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);CHKERRQ(ierr);
2025   }
2026   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
2027   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
2028 
2029   /* members in merge */
2030   ierr = PetscFree(id_r);CHKERRQ(ierr);
2031   ierr = PetscFree(len_r);CHKERRQ(ierr);
2032   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
2033   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
2034   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
2035   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
2036   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
2037 
2038   /* attach the supporting struct to Cmpi for reuse */
2039   c = (Mat_MPIAIJ*)Cmpi->data;
2040   c->ap           = ptap;
2041   ptap->duplicate = Cmpi->ops->duplicate;
2042   ptap->destroy   = Cmpi->ops->destroy;
2043   ptap->view      = Cmpi->ops->view;
2044   ierr = PetscCalloc1(pN,&ptap->apa);CHKERRQ(ierr);
2045 
2046   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
2047   Cmpi->assembled        = PETSC_FALSE;
2048   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
2049   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
2050   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
2051   *C                     = Cmpi;
2052   PetscFunctionReturn(0);
2053 }
2054 
2055 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat A,Mat P,Mat C)
2056 {
2057   PetscErrorCode    ierr;
2058   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
2059   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
2060   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
2061   Mat_APMPI         *ptap = c->ap;
2062   Mat               AP_loc,C_loc,C_oth;
2063   PetscInt          i,rstart,rend,cm,ncols,row;
2064   PetscInt          *api,*apj,am = A->rmap->n,j,col,apnz;
2065   PetscScalar       *apa;
2066   const PetscInt    *cols;
2067   const PetscScalar *vals;
2068 
2069   PetscFunctionBegin;
2070   if (!ptap->AP_loc) {
2071     MPI_Comm comm;
2072     ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
2073     SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
2074   }
2075 
2076   ierr = MatZeroEntries(C);CHKERRQ(ierr);
2077   /* 1) get R = Pd^T,Ro = Po^T */
2078   if (ptap->reuse == MAT_REUSE_MATRIX) {
2079     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
2080     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
2081   }
2082 
2083   /* 2) get AP_loc */
2084   AP_loc = ptap->AP_loc;
2085   ap = (Mat_SeqAIJ*)AP_loc->data;
2086 
2087   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
2088   /*-----------------------------------------------------*/
2089   if (ptap->reuse == MAT_REUSE_MATRIX) {
2090     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
2091     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
2092     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
2093   }
2094 
2095   /* 2-2) compute numeric A_loc*P - dominating part */
2096   /* ---------------------------------------------- */
2097   /* get data from symbolic products */
2098   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
2099   if (ptap->P_oth) {
2100     p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
2101   }
2102   apa   = ptap->apa;
2103   api   = ap->i;
2104   apj   = ap->j;
2105   for (i=0; i<am; i++) {
2106     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
2107     AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa);
2108     apnz = api[i+1] - api[i];
2109     for (j=0; j<apnz; j++) {
2110       col = apj[j+api[i]];
2111       ap->a[j+ap->i[i]] = apa[col];
2112       apa[col] = 0.0;
2113     }
2114   }
2115 
2116   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
2117   ierr = ((ptap->C_loc)->ops->matmultnumeric)(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
2118   ierr = ((ptap->C_oth)->ops->matmultnumeric)(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
2119   C_loc = ptap->C_loc;
2120   C_oth = ptap->C_oth;
2121 
2122   /* add C_loc and Co to to C */
2123   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
2124 
2125   /* C_loc -> C */
2126   cm    = C_loc->rmap->N;
2127   c_seq = (Mat_SeqAIJ*)C_loc->data;
2128   cols = c_seq->j;
2129   vals = c_seq->a;
2130 
2131 
2132   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
2133   /* when there are no off-processor parts.  */
2134   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
2135   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
2136   /* a table, and other, more complex stuff has to be done. */
2137   if (C->assembled) {
2138     C->was_assembled = PETSC_TRUE;
2139     C->assembled     = PETSC_FALSE;
2140   }
2141   if (C->was_assembled) {
2142     for (i=0; i<cm; i++) {
2143       ncols = c_seq->i[i+1] - c_seq->i[i];
2144       row = rstart + i;
2145       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
2146       cols += ncols; vals += ncols;
2147     }
2148   } else {
2149     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
2150   }
2151 
2152   /* Co -> C, off-processor part */
2153   cm = C_oth->rmap->N;
2154   c_seq = (Mat_SeqAIJ*)C_oth->data;
2155   cols = c_seq->j;
2156   vals = c_seq->a;
2157   for (i=0; i<cm; i++) {
2158     ncols = c_seq->i[i+1] - c_seq->i[i];
2159     row = p->garray[i];
2160     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
2161     cols += ncols; vals += ncols;
2162   }
2163 
2164   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2165   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2166 
2167   ptap->reuse = MAT_REUSE_MATRIX;
2168 
2169   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
2170   if (ptap->freestruct) {
2171     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
2172   }
2173   PetscFunctionReturn(0);
2174 }
2175