xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 55e7fe800d976e85ed2b5cd8bfdef564daa37bd9)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
18   PetscLayout        rmap;
19   MPI_Comm           comm;
20   PetscSF            sf;
21   PetscSFNode       *iremote;
22   PetscBool          done;
23   PetscErrorCode     ierr;
24 
25   PetscFunctionBegin;
26   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
27   ierr = MatGetLayouts(adj,&rmap,NULL);CHKERRQ(ierr);
28   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
29   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
30   ierr = PetscCalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++){
34     owner = -1;
35     rlocalindex = -1;
36     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
41   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++){
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
47   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
48   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
49   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
50   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
51   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
52   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
53   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
54   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++){
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for(i=0; i<nlrows_mat; i++){
62     Ncols_send += ncols_send[i];
63   }
64   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
65   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++){
69     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
70     for (j=0; j<ncols_recv[i]; j++){
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
76   /*if we need to deal with edge weights ???*/
77   if (a->values){isvalue=1;} else {isvalue=0;}
78   /*involve a global communication */
79   /*ierr = MPIU_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);*/
80   if (isvalue){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
81   nroots = Ncols_send;
82   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
83   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
84   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
85   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
86   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
87   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
88   if (isvalue){
89     ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
90     ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
91   }
92   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
93   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
94   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
95   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
96   rnclos = 0;
97   for (i=0; i<nlrows_is; i++){
98     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
99       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
100       if (loc<0){
101         adjncy_recv[j] = -1;
102         if (isvalue) values_recv[j] = -1;
103         ncols_recv[i]--;
104       } else {
105         rnclos++;
106       }
107     }
108   }
109   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
110   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
111   if (isvalue) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
112   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
113   rnclos = 0;
114   for(i=0; i<nlrows_is; i++){
115     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
116       if (adjncy_recv[j]<0) continue;
117       sadjncy[rnclos] = adjncy_recv[j];
118       if (isvalue) svalues[rnclos] = values_recv[j];
119       rnclos++;
120     }
121   }
122   for(i=0; i<nlrows_is; i++){
123     sxadj[i+1] = sxadj[i]+ncols_recv[i];
124   }
125   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
126   if (sadj_adjncy){ *sadj_adjncy = sadjncy;} else { ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
127   if (sadj_values){
128     if (isvalue) *sadj_values = svalues; else *sadj_values=0;
129   } else {
130     if (isvalue) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
131   }
132   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
133   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
134   if (isvalue) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
135   PetscFunctionReturn(0);
136 }
137 
138 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
139 {
140   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
141   PetscInt          *indices,nindx,j,k,loc;
142   PetscMPIInt        issame;
143   const PetscInt    *irow_indices,*icol_indices;
144   MPI_Comm           scomm_row,scomm_col,scomm_mat;
145   PetscErrorCode     ierr;
146 
147   PetscFunctionBegin;
148   nindx = 0;
149   /*
150    * Estimate a maximum number for allocating memory
151    */
152   for(i=0; i<n; i++){
153     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
154     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
155     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
156   }
157   ierr = PetscCalloc1(nindx,&indices);CHKERRQ(ierr);
158   /* construct a submat */
159   for(i=0; i<n; i++){
160     if (subcomm){
161       ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
162       ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
163       ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRQ(ierr);
164       if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
165       ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRQ(ierr);
166       if (issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
167     } else {
168       scomm_row = PETSC_COMM_SELF;
169     }
170     /*get sub-matrix data*/
171     sxadj=0; sadjncy=0; svalues=0;
172     ierr = MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
173     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
174     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
175     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
176     ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);CHKERRQ(ierr);
177     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
178     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
179     ierr = PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);CHKERRQ(ierr);
180     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
181     nindx = irow_n+icol_n;
182     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
183     /* renumber columns */
184     for(j=0; j<irow_n; j++){
185       for(k=sxadj[j]; k<sxadj[j+1]; k++){
186         ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
187 #if defined(PETSC_USE_DEBUG)
188         if (loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %D",sadjncy[k]);
189 #endif
190         sadjncy[k] = loc;
191       }
192     }
193     if (scall==MAT_INITIAL_MATRIX){
194       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
195     } else {
196        Mat                sadj = *(submat[i]);
197        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
198        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
199        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRQ(ierr);
200        if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
201        ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));CHKERRQ(ierr);
202        ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);
203        if (svalues){ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);}
204        ierr = PetscFree(sxadj);CHKERRQ(ierr);
205        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
206        if (svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
207     }
208   }
209   ierr = PetscFree(indices);CHKERRQ(ierr);
210   PetscFunctionReturn(0);
211 }
212 
213 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
214 {
215   PetscErrorCode     ierr;
216   /*get sub-matrices across a sub communicator */
217   PetscFunctionBegin;
218   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
223 {
224   PetscErrorCode     ierr;
225 
226   PetscFunctionBegin;
227   /*get sub-matrices based on PETSC_COMM_SELF */
228   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
233 {
234   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
235   PetscErrorCode    ierr;
236   PetscInt          i,j,m = A->rmap->n;
237   const char        *name;
238   PetscViewerFormat format;
239 
240   PetscFunctionBegin;
241   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
242   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
243   if (format == PETSC_VIEWER_ASCII_INFO) {
244     PetscFunctionReturn(0);
245   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
246   else {
247     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
248     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
249     for (i=0; i<m; i++) {
250       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
251       for (j=a->i[i]; j<a->i[i+1]; j++) {
252         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
253       }
254       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
255     }
256     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
257     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
258     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
259   }
260   PetscFunctionReturn(0);
261 }
262 
263 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
264 {
265   PetscErrorCode ierr;
266   PetscBool      iascii;
267 
268   PetscFunctionBegin;
269   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
270   if (iascii) {
271     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
272   }
273   PetscFunctionReturn(0);
274 }
275 
276 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
277 {
278   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
279   PetscErrorCode ierr;
280 
281   PetscFunctionBegin;
282 #if defined(PETSC_USE_LOG)
283   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
284 #endif
285   ierr = PetscFree(a->diag);CHKERRQ(ierr);
286   if (a->freeaij) {
287     if (a->freeaijwithfree) {
288       if (a->i) free(a->i);
289       if (a->j) free(a->j);
290     } else {
291       ierr = PetscFree(a->i);CHKERRQ(ierr);
292       ierr = PetscFree(a->j);CHKERRQ(ierr);
293       ierr = PetscFree(a->values);CHKERRQ(ierr);
294     }
295   }
296   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
297   ierr = PetscFree(mat->data);CHKERRQ(ierr);
298   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
299   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
300   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
301   PetscFunctionReturn(0);
302 }
303 
304 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
305 {
306   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
307   PetscErrorCode ierr;
308 
309   PetscFunctionBegin;
310   switch (op) {
311   case MAT_SYMMETRIC:
312   case MAT_STRUCTURALLY_SYMMETRIC:
313   case MAT_HERMITIAN:
314     a->symmetric = flg;
315     break;
316   case MAT_SYMMETRY_ETERNAL:
317     break;
318   default:
319     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
320     break;
321   }
322   PetscFunctionReturn(0);
323 }
324 
325 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
326 {
327   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
328   PetscErrorCode ierr;
329 
330   PetscFunctionBegin;
331   row -= A->rmap->rstart;
332   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
333   *nz = a->i[row+1] - a->i[row];
334   if (v) {
335     PetscInt j;
336     if (a->rowvalues_alloc < *nz) {
337       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
338       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
339       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
340     }
341     for (j=0; j<*nz; j++){
342       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
343     }
344     *v = (*nz) ? a->rowvalues : NULL;
345   }
346   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
347   PetscFunctionReturn(0);
348 }
349 
350 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
351 {
352   PetscFunctionBegin;
353   PetscFunctionReturn(0);
354 }
355 
356 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
357 {
358   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
359   PetscErrorCode ierr;
360   PetscBool      flag;
361 
362   PetscFunctionBegin;
363   /* If the  matrix dimensions are not equal,or no of nonzeros */
364   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
365     flag = PETSC_FALSE;
366   }
367 
368   /* if the a->i are the same */
369   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
370 
371   /* if a->j are the same */
372   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
373 
374   ierr = MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
375   PetscFunctionReturn(0);
376 }
377 
378 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
379 {
380   PetscInt   i;
381   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
382   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
383 
384   PetscFunctionBegin;
385   *m    = A->rmap->n;
386   *ia   = a->i;
387   *ja   = a->j;
388   *done = PETSC_TRUE;
389   if (oshift) {
390     for (i=0; i<(*ia)[*m]; i++) {
391       (*ja)[i]++;
392     }
393     for (i=0; i<=(*m); i++) (*ia)[i]++;
394   }
395   PetscFunctionReturn(0);
396 }
397 
398 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
399 {
400   PetscInt   i;
401   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
402   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
403 
404   PetscFunctionBegin;
405   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
406   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
407   if (oshift) {
408     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
409     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
410     for (i=0; i<=(*m); i++) (*ia)[i]--;
411     for (i=0; i<(*ia)[*m]; i++) {
412       (*ja)[i]--;
413     }
414   }
415   PetscFunctionReturn(0);
416 }
417 
418 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
419 {
420   Mat               B;
421   PetscErrorCode    ierr;
422   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
423   const PetscInt    *rj;
424   const PetscScalar *ra;
425   MPI_Comm          comm;
426 
427   PetscFunctionBegin;
428   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
429   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
430   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
431 
432   /* count the number of nonzeros per row */
433   for (i=0; i<m; i++) {
434     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
435     for (j=0; j<len; j++) {
436       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
437     }
438     nzeros += len;
439     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
440   }
441 
442   /* malloc space for nonzeros */
443   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
444   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
445   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
446 
447   nzeros = 0;
448   ia[0]  = 0;
449   for (i=0; i<m; i++) {
450     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
451     cnt  = 0;
452     for (j=0; j<len; j++) {
453       if (rj[j] != i+rstart) { /* if not diagonal */
454         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
455         ja[nzeros+cnt++] = rj[j];
456       }
457     }
458     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
459     nzeros += cnt;
460     ia[i+1] = nzeros;
461   }
462 
463   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
464   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
465   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
466   ierr = MatSetType(B,type);CHKERRQ(ierr);
467   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
468 
469   if (reuse == MAT_INPLACE_MATRIX) {
470     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
471   } else {
472     *newmat = B;
473   }
474   PetscFunctionReturn(0);
475 }
476 
477 /* -------------------------------------------------------------------*/
478 static struct _MatOps MatOps_Values = {0,
479                                        MatGetRow_MPIAdj,
480                                        MatRestoreRow_MPIAdj,
481                                        0,
482                                 /* 4*/ 0,
483                                        0,
484                                        0,
485                                        0,
486                                        0,
487                                        0,
488                                 /*10*/ 0,
489                                        0,
490                                        0,
491                                        0,
492                                        0,
493                                 /*15*/ 0,
494                                        MatEqual_MPIAdj,
495                                        0,
496                                        0,
497                                        0,
498                                 /*20*/ 0,
499                                        0,
500                                        MatSetOption_MPIAdj,
501                                        0,
502                                 /*24*/ 0,
503                                        0,
504                                        0,
505                                        0,
506                                        0,
507                                 /*29*/ 0,
508                                        0,
509                                        0,
510                                        0,
511                                        0,
512                                 /*34*/ 0,
513                                        0,
514                                        0,
515                                        0,
516                                        0,
517                                 /*39*/ 0,
518                                        MatCreateSubMatrices_MPIAdj,
519                                        0,
520                                        0,
521                                        0,
522                                 /*44*/ 0,
523                                        0,
524                                        MatShift_Basic,
525                                        0,
526                                        0,
527                                 /*49*/ 0,
528                                        MatGetRowIJ_MPIAdj,
529                                        MatRestoreRowIJ_MPIAdj,
530                                        0,
531                                        0,
532                                 /*54*/ 0,
533                                        0,
534                                        0,
535                                        0,
536                                        0,
537                                 /*59*/ 0,
538                                        MatDestroy_MPIAdj,
539                                        MatView_MPIAdj,
540                                        MatConvertFrom_MPIAdj,
541                                        0,
542                                 /*64*/ 0,
543                                        0,
544                                        0,
545                                        0,
546                                        0,
547                                 /*69*/ 0,
548                                        0,
549                                        0,
550                                        0,
551                                        0,
552                                 /*74*/ 0,
553                                        0,
554                                        0,
555                                        0,
556                                        0,
557                                 /*79*/ 0,
558                                        0,
559                                        0,
560                                        0,
561                                        0,
562                                 /*84*/ 0,
563                                        0,
564                                        0,
565                                        0,
566                                        0,
567                                 /*89*/ 0,
568                                        0,
569                                        0,
570                                        0,
571                                        0,
572                                 /*94*/ 0,
573                                        0,
574                                        0,
575                                        0,
576                                        0,
577                                 /*99*/ 0,
578                                        0,
579                                        0,
580                                        0,
581                                        0,
582                                /*104*/ 0,
583                                        0,
584                                        0,
585                                        0,
586                                        0,
587                                /*109*/ 0,
588                                        0,
589                                        0,
590                                        0,
591                                        0,
592                                /*114*/ 0,
593                                        0,
594                                        0,
595                                        0,
596                                        0,
597                                /*119*/ 0,
598                                        0,
599                                        0,
600                                        0,
601                                        0,
602                                /*124*/ 0,
603                                        0,
604                                        0,
605                                        0,
606                                        MatCreateSubMatricesMPI_MPIAdj,
607                                /*129*/ 0,
608                                        0,
609                                        0,
610                                        0,
611                                        0,
612                                /*134*/ 0,
613                                        0,
614                                        0,
615                                        0,
616                                        0,
617                                /*139*/ 0,
618                                        0,
619                                        0
620 };
621 
622 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
623 {
624   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
625   PetscErrorCode ierr;
626 #if defined(PETSC_USE_DEBUG)
627   PetscInt ii;
628 #endif
629 
630   PetscFunctionBegin;
631   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
632   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
633 
634 #if defined(PETSC_USE_DEBUG)
635   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
636   for (ii=1; ii<B->rmap->n; ii++) {
637     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
638   }
639   for (ii=0; ii<i[B->rmap->n]; ii++) {
640     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
641   }
642 #endif
643   B->preallocated = PETSC_TRUE;
644 
645   b->j      = j;
646   b->i      = i;
647   b->values = values;
648 
649   b->nz        = i[B->rmap->n];
650   b->diag      = 0;
651   b->symmetric = PETSC_FALSE;
652   b->freeaij   = PETSC_TRUE;
653 
654   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
655   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
656   PetscFunctionReturn(0);
657 }
658 
659 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
660 {
661   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
662   PetscErrorCode ierr;
663   const PetscInt *ranges;
664   MPI_Comm       acomm,bcomm;
665   MPI_Group      agroup,bgroup;
666   PetscMPIInt    i,rank,size,nranks,*ranks;
667 
668   PetscFunctionBegin;
669   *B    = NULL;
670   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
671   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
672   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
673   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
674   for (i=0,nranks=0; i<size; i++) {
675     if (ranges[i+1] - ranges[i] > 0) nranks++;
676   }
677   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
678     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
679     *B   = A;
680     PetscFunctionReturn(0);
681   }
682 
683   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
684   for (i=0,nranks=0; i<size; i++) {
685     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
686   }
687   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
688   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
689   ierr = PetscFree(ranks);CHKERRQ(ierr);
690   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
691   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
692   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
693   if (bcomm != MPI_COMM_NULL) {
694     PetscInt   m,N;
695     Mat_MPIAdj *b;
696     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
697     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
698     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
699     b          = (Mat_MPIAdj*)(*B)->data;
700     b->freeaij = PETSC_FALSE;
701     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
702   }
703   PetscFunctionReturn(0);
704 }
705 
706 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
707 {
708   PetscErrorCode ierr;
709   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
710   PetscInt       *Values = NULL;
711   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
712   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
713 
714   PetscFunctionBegin;
715   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
716   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
717   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
718   nz   = adj->nz;
719   if (adj->i[m] != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %D not correct i[m] %d",nz,adj->i[m]);
720   ierr = MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
721 
722   ierr = PetscMPIIntCast(nz,&mnz);CHKERRQ(ierr);
723   ierr = PetscCalloc2(size,&allnz,size,&dispnz);CHKERRQ(ierr);
724   ierr = MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
725   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
726   if (adj->values) {
727     ierr = PetscMalloc1(NZ,&Values);CHKERRQ(ierr);
728     ierr = MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
729   }
730   ierr = PetscMalloc1(NZ,&J);CHKERRQ(ierr);
731   ierr = MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
732   ierr = PetscFree2(allnz,dispnz);CHKERRQ(ierr);
733   ierr = MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
734   nzstart -= nz;
735   /* shift the i[] values so they will be correct after being received */
736   for (i=0; i<m; i++) adj->i[i] += nzstart;
737   ierr = PetscMalloc1(M+1,&II);CHKERRQ(ierr);
738   ierr = PetscMPIIntCast(m,&mm);CHKERRQ(ierr);
739   ierr = PetscMalloc2(size,&allm,size,&dispm);CHKERRQ(ierr);
740   ierr = MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
741   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
742   ierr = MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
743   ierr = PetscFree2(allm,dispm);CHKERRQ(ierr);
744   II[M] = NZ;
745   /* shift the i[] values back */
746   for (i=0; i<m; i++) adj->i[i] -= nzstart;
747   ierr = MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B);CHKERRQ(ierr);
748   PetscFunctionReturn(0);
749 }
750 
751 /*@
752    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
753 
754    Collective
755 
756    Input Arguments:
757 .  A - original MPIAdj matrix
758 
759    Output Arguments:
760 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
761 
762    Level: developer
763 
764    Note:
765    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
766 
767    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
768 
769 .seealso: MatCreateMPIAdj()
770 @*/
771 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
772 {
773   PetscErrorCode ierr;
774 
775   PetscFunctionBegin;
776   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
777   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
778   PetscFunctionReturn(0);
779 }
780 
781 /*MC
782    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
783    intended for use constructing orderings and partitionings.
784 
785   Level: beginner
786 
787 .seealso: MatCreateMPIAdj
788 M*/
789 
790 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
791 {
792   Mat_MPIAdj     *b;
793   PetscErrorCode ierr;
794 
795   PetscFunctionBegin;
796   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
797   B->data      = (void*)b;
798   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
799   B->assembled = PETSC_FALSE;
800 
801   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
802   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
803   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj);CHKERRQ(ierr);
804   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
805   PetscFunctionReturn(0);
806 }
807 
808 /*@C
809    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
810 
811    Logically Collective on MPI_Comm
812 
813    Input Parameter:
814 .  A - the matrix
815 
816    Output Parameter:
817 .  B - the same matrix on all processes
818 
819    Level: intermediate
820 
821 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
822 @*/
823 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
824 {
825   PetscErrorCode ierr;
826 
827   PetscFunctionBegin;
828   ierr = PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
829   PetscFunctionReturn(0);
830 }
831 
832 /*@C
833    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
834 
835    Logically Collective on MPI_Comm
836 
837    Input Parameters:
838 +  A - the matrix
839 .  i - the indices into j for the start of each row
840 .  j - the column indices for each row (sorted for each row).
841        The indices in i and j start with zero (NOT with one).
842 -  values - [optional] edge weights
843 
844    Level: intermediate
845 
846 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
847 @*/
848 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
849 {
850   PetscErrorCode ierr;
851 
852   PetscFunctionBegin;
853   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
854   PetscFunctionReturn(0);
855 }
856 
857 /*@C
858    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
859    The matrix does not have numerical values associated with it, but is
860    intended for ordering (to reduce bandwidth etc) and partitioning.
861 
862    Collective on MPI_Comm
863 
864    Input Parameters:
865 +  comm - MPI communicator
866 .  m - number of local rows
867 .  N - number of global columns
868 .  i - the indices into j for the start of each row
869 .  j - the column indices for each row (sorted for each row).
870        The indices in i and j start with zero (NOT with one).
871 -  values -[optional] edge weights
872 
873    Output Parameter:
874 .  A - the matrix
875 
876    Level: intermediate
877 
878    Notes:
879     This matrix object does not support most matrix operations, include
880    MatSetValues().
881    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
882    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
883     call from Fortran you need not create the arrays with PetscMalloc().
884    Should not include the matrix diagonals.
885 
886    If you already have a matrix, you can create its adjacency matrix by a call
887    to MatConvert, specifying a type of MATMPIADJ.
888 
889    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
890 
891 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
892 @*/
893 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
894 {
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   ierr = MatCreate(comm,A);CHKERRQ(ierr);
899   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
900   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
901   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 
906 
907 
908 
909 
910 
911