1 2 /* 3 Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 4 */ 5 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/ 6 #include <petscsf.h> 7 8 /* 9 * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices) 10 * */ 11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values) 12 { 13 PetscInt nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv; 14 PetscInt nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv; 15 PetscInt *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues; 16 const PetscInt *irows_indices,*icols_indices,*xadj, *adjncy; 17 PetscMPIInt owner; 18 Mat_MPIAdj *a = (Mat_MPIAdj*)adj->data; 19 PetscLayout rmap; 20 MPI_Comm comm; 21 PetscSF sf; 22 PetscSFNode *iremote; 23 PetscBool done; 24 25 PetscFunctionBegin; 26 PetscCall(PetscObjectGetComm((PetscObject)adj,&comm)); 27 PetscCall(MatGetLayouts(adj,&rmap,NULL)); 28 PetscCall(ISGetLocalSize(irows,&nlrows_is)); 29 PetscCall(ISGetIndices(irows,&irows_indices)); 30 PetscCall(PetscMalloc1(nlrows_is,&iremote)); 31 /* construct sf graph*/ 32 nleaves = nlrows_is; 33 for (i=0; i<nlrows_is; i++) { 34 owner = -1; 35 rlocalindex = -1; 36 PetscCall(PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex)); 37 iremote[i].rank = owner; 38 iremote[i].index = rlocalindex; 39 } 40 PetscCall(MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done)); 41 PetscCall(PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv)); 42 nroots = nlrows_mat; 43 for (i=0; i<nlrows_mat; i++) { 44 ncols_send[i] = xadj[i+1]-xadj[i]; 45 } 46 PetscCall(PetscSFCreate(comm,&sf)); 47 PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER)); 48 PetscCall(PetscSFSetType(sf,PETSCSFBASIC)); 49 PetscCall(PetscSFSetFromOptions(sf)); 50 PetscCall(PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE)); 51 PetscCall(PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE)); 52 PetscCall(PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE)); 53 PetscCall(PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE)); 54 PetscCall(PetscSFDestroy(&sf)); 55 Ncols_recv =0; 56 for (i=0; i<nlrows_is; i++) { 57 Ncols_recv += ncols_recv[i]; 58 ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i]; 59 } 60 Ncols_send = 0; 61 for (i=0; i<nlrows_mat; i++) { 62 Ncols_send += ncols_send[i]; 63 } 64 PetscCall(PetscCalloc1(Ncols_recv,&iremote)); 65 PetscCall(PetscCalloc1(Ncols_recv,&adjncy_recv)); 66 nleaves = Ncols_recv; 67 Ncols_recv = 0; 68 for (i=0; i<nlrows_is; i++) { 69 PetscCall(PetscLayoutFindOwner(rmap,irows_indices[i],&owner)); 70 for (j=0; j<ncols_recv[i]; j++) { 71 iremote[Ncols_recv].rank = owner; 72 iremote[Ncols_recv++].index = xadj_recv[i]+j; 73 } 74 } 75 PetscCall(ISRestoreIndices(irows,&irows_indices)); 76 /*if we need to deal with edge weights ???*/ 77 if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv,&values_recv)); 78 nroots = Ncols_send; 79 PetscCall(PetscSFCreate(comm,&sf)); 80 PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER)); 81 PetscCall(PetscSFSetType(sf,PETSCSFBASIC)); 82 PetscCall(PetscSFSetFromOptions(sf)); 83 PetscCall(PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE)); 84 PetscCall(PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE)); 85 if (a->useedgeweights) { 86 PetscCall(PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE)); 87 PetscCall(PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE)); 88 } 89 PetscCall(PetscSFDestroy(&sf)); 90 PetscCall(MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done)); 91 PetscCall(ISGetLocalSize(icols,&icols_n)); 92 PetscCall(ISGetIndices(icols,&icols_indices)); 93 rnclos = 0; 94 for (i=0; i<nlrows_is; i++) { 95 for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) { 96 PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc)); 97 if (loc<0) { 98 adjncy_recv[j] = -1; 99 if (a->useedgeweights) values_recv[j] = -1; 100 ncols_recv[i]--; 101 } else { 102 rnclos++; 103 } 104 } 105 } 106 PetscCall(ISRestoreIndices(icols,&icols_indices)); 107 PetscCall(PetscCalloc1(rnclos,&sadjncy)); 108 if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos,&svalues)); 109 PetscCall(PetscCalloc1(nlrows_is+1,&sxadj)); 110 rnclos = 0; 111 for (i=0; i<nlrows_is; i++) { 112 for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) { 113 if (adjncy_recv[j]<0) continue; 114 sadjncy[rnclos] = adjncy_recv[j]; 115 if (a->useedgeweights) svalues[rnclos] = values_recv[j]; 116 rnclos++; 117 } 118 } 119 for (i=0; i<nlrows_is; i++) { 120 sxadj[i+1] = sxadj[i]+ncols_recv[i]; 121 } 122 if (sadj_xadj) { *sadj_xadj = sxadj;} else PetscCall(PetscFree(sxadj)); 123 if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else PetscCall(PetscFree(sadjncy)); 124 if (sadj_values) { 125 if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL; 126 } else { 127 if (a->useedgeweights) PetscCall(PetscFree(svalues)); 128 } 129 PetscCall(PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv)); 130 PetscCall(PetscFree(adjncy_recv)); 131 if (a->useedgeweights) PetscCall(PetscFree(values_recv)); 132 PetscFunctionReturn(0); 133 } 134 135 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[]) 136 { 137 PetscInt i,irow_n,icol_n,*sxadj,*sadjncy,*svalues; 138 PetscInt *indices,nindx,j,k,loc; 139 PetscMPIInt issame; 140 const PetscInt *irow_indices,*icol_indices; 141 MPI_Comm scomm_row,scomm_col,scomm_mat; 142 143 PetscFunctionBegin; 144 nindx = 0; 145 /* 146 * Estimate a maximum number for allocating memory 147 */ 148 for (i=0; i<n; i++) { 149 PetscCall(ISGetLocalSize(irow[i],&irow_n)); 150 PetscCall(ISGetLocalSize(icol[i],&icol_n)); 151 nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n); 152 } 153 PetscCall(PetscMalloc1(nindx,&indices)); 154 /* construct a submat */ 155 for (i=0; i<n; i++) { 156 if (subcomm) { 157 PetscCall(PetscObjectGetComm((PetscObject)irow[i],&scomm_row)); 158 PetscCall(PetscObjectGetComm((PetscObject)icol[i],&scomm_col)); 159 PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_col,&issame)); 160 PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set"); 161 PetscCallMPI(MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame)); 162 PetscCheck(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix"); 163 } else { 164 scomm_row = PETSC_COMM_SELF; 165 } 166 /*get sub-matrix data*/ 167 sxadj=NULL; sadjncy=NULL; svalues=NULL; 168 PetscCall(MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues)); 169 PetscCall(ISGetLocalSize(irow[i],&irow_n)); 170 PetscCall(ISGetLocalSize(icol[i],&icol_n)); 171 PetscCall(ISGetIndices(irow[i],&irow_indices)); 172 PetscCall(PetscArraycpy(indices,irow_indices,irow_n)); 173 PetscCall(ISRestoreIndices(irow[i],&irow_indices)); 174 PetscCall(ISGetIndices(icol[i],&icol_indices)); 175 PetscCall(PetscArraycpy(indices+irow_n,icol_indices,icol_n)); 176 PetscCall(ISRestoreIndices(icol[i],&icol_indices)); 177 nindx = irow_n+icol_n; 178 PetscCall(PetscSortRemoveDupsInt(&nindx,indices)); 179 /* renumber columns */ 180 for (j=0; j<irow_n; j++) { 181 for (k=sxadj[j]; k<sxadj[j+1]; k++) { 182 PetscCall(PetscFindInt(sadjncy[k],nindx,indices,&loc)); 183 PetscCheck(loc>=0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %" PetscInt_FMT,sadjncy[k]); 184 sadjncy[k] = loc; 185 } 186 } 187 if (scall==MAT_INITIAL_MATRIX) { 188 PetscCall(MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i])); 189 } else { 190 Mat sadj = *(submat[i]); 191 Mat_MPIAdj *sa = (Mat_MPIAdj*)((sadj)->data); 192 PetscCall(PetscObjectGetComm((PetscObject)sadj,&scomm_mat)); 193 PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_mat,&issame)); 194 PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix must have the same comm as the col index set"); 195 PetscCall(PetscArraycpy(sa->i,sxadj,irow_n+1)); 196 PetscCall(PetscArraycpy(sa->j,sadjncy,sxadj[irow_n])); 197 if (svalues) PetscCall(PetscArraycpy(sa->values,svalues,sxadj[irow_n])); 198 PetscCall(PetscFree(sxadj)); 199 PetscCall(PetscFree(sadjncy)); 200 if (svalues) PetscCall(PetscFree(svalues)); 201 } 202 } 203 PetscCall(PetscFree(indices)); 204 PetscFunctionReturn(0); 205 } 206 207 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[]) 208 { 209 /*get sub-matrices across a sub communicator */ 210 PetscFunctionBegin; 211 PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat)); 212 PetscFunctionReturn(0); 213 } 214 215 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[]) 216 { 217 PetscFunctionBegin; 218 /*get sub-matrices based on PETSC_COMM_SELF */ 219 PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat)); 220 PetscFunctionReturn(0); 221 } 222 223 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer) 224 { 225 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 226 PetscInt i,j,m = A->rmap->n; 227 const char *name; 228 PetscViewerFormat format; 229 230 PetscFunctionBegin; 231 PetscCall(PetscObjectGetName((PetscObject)A,&name)); 232 PetscCall(PetscViewerGetFormat(viewer,&format)); 233 if (format == PETSC_VIEWER_ASCII_INFO) { 234 PetscFunctionReturn(0); 235 } else PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported"); 236 else { 237 PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_FALSE)); 238 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 239 for (i=0; i<m; i++) { 240 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"row %" PetscInt_FMT ":",i+A->rmap->rstart)); 241 for (j=a->i[i]; j<a->i[i+1]; j++) { 242 if (a->values) { 243 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," (%" PetscInt_FMT ", %" PetscInt_FMT ") ",a->j[j], a->values[j])); 244 } else { 245 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," %" PetscInt_FMT " ",a->j[j])); 246 } 247 } 248 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"\n")); 249 } 250 PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_TRUE)); 251 PetscCall(PetscViewerFlush(viewer)); 252 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 253 } 254 PetscFunctionReturn(0); 255 } 256 257 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer) 258 { 259 PetscBool iascii; 260 261 PetscFunctionBegin; 262 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii)); 263 if (iascii) { 264 PetscCall(MatView_MPIAdj_ASCII(A,viewer)); 265 } 266 PetscFunctionReturn(0); 267 } 268 269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat) 270 { 271 Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data; 272 273 PetscFunctionBegin; 274 #if defined(PETSC_USE_LOG) 275 PetscLogObjectState((PetscObject)mat,"Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT,mat->rmap->n,mat->cmap->n,a->nz); 276 #endif 277 PetscCall(PetscFree(a->diag)); 278 if (a->freeaij) { 279 if (a->freeaijwithfree) { 280 if (a->i) free(a->i); 281 if (a->j) free(a->j); 282 } else { 283 PetscCall(PetscFree(a->i)); 284 PetscCall(PetscFree(a->j)); 285 PetscCall(PetscFree(a->values)); 286 } 287 } 288 PetscCall(PetscFree(a->rowvalues)); 289 PetscCall(PetscFree(mat->data)); 290 PetscCall(PetscObjectChangeTypeName((PetscObject)mat,NULL)); 291 PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL)); 292 PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL)); 293 PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjToSeq_C",NULL)); 294 PetscFunctionReturn(0); 295 } 296 297 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg) 298 { 299 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 300 301 PetscFunctionBegin; 302 switch (op) { 303 case MAT_SYMMETRIC: 304 case MAT_STRUCTURALLY_SYMMETRIC: 305 case MAT_HERMITIAN: 306 a->symmetric = flg; 307 break; 308 case MAT_SYMMETRY_ETERNAL: 309 break; 310 default: 311 PetscCall(PetscInfo(A,"Option %s ignored\n",MatOptions[op])); 312 break; 313 } 314 PetscFunctionReturn(0); 315 } 316 317 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 318 { 319 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 320 321 PetscFunctionBegin; 322 row -= A->rmap->rstart; 323 PetscCheck(row >= 0 && row < A->rmap->n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range"); 324 *nz = a->i[row+1] - a->i[row]; 325 if (v) { 326 PetscInt j; 327 if (a->rowvalues_alloc < *nz) { 328 PetscCall(PetscFree(a->rowvalues)); 329 a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz); 330 PetscCall(PetscMalloc1(a->rowvalues_alloc,&a->rowvalues)); 331 } 332 for (j=0; j<*nz; j++) { 333 a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0; 334 } 335 *v = (*nz) ? a->rowvalues : NULL; 336 } 337 if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL; 338 PetscFunctionReturn(0); 339 } 340 341 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 342 { 343 PetscFunctionBegin; 344 PetscFunctionReturn(0); 345 } 346 347 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg) 348 { 349 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data; 350 PetscBool flag; 351 352 PetscFunctionBegin; 353 /* If the matrix dimensions are not equal,or no of nonzeros */ 354 if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) { 355 flag = PETSC_FALSE; 356 } 357 358 /* if the a->i are the same */ 359 PetscCall(PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag)); 360 361 /* if a->j are the same */ 362 PetscCall(PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag)); 363 364 PetscCall(MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A))); 365 PetscFunctionReturn(0); 366 } 367 368 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool *done) 369 { 370 PetscInt i; 371 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 372 PetscInt **ia = (PetscInt**)inia,**ja = (PetscInt**)inja; 373 374 PetscFunctionBegin; 375 *m = A->rmap->n; 376 *ia = a->i; 377 *ja = a->j; 378 *done = PETSC_TRUE; 379 if (oshift) { 380 for (i=0; i<(*ia)[*m]; i++) { 381 (*ja)[i]++; 382 } 383 for (i=0; i<=(*m); i++) (*ia)[i]++; 384 } 385 PetscFunctionReturn(0); 386 } 387 388 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool *done) 389 { 390 PetscInt i; 391 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 392 PetscInt **ia = (PetscInt**)inia,**ja = (PetscInt**)inja; 393 394 PetscFunctionBegin; 395 PetscCheck(!ia || a->i == *ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()"); 396 PetscCheck(!ja || a->j == *ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()"); 397 if (oshift) { 398 PetscCheck(ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument"); 399 PetscCheck(ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument"); 400 for (i=0; i<=(*m); i++) (*ia)[i]--; 401 for (i=0; i<(*ia)[*m]; i++) { 402 (*ja)[i]--; 403 } 404 } 405 PetscFunctionReturn(0); 406 } 407 408 PetscErrorCode MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat) 409 { 410 Mat B; 411 PetscInt i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a; 412 const PetscInt *rj; 413 const PetscScalar *ra; 414 MPI_Comm comm; 415 416 PetscFunctionBegin; 417 PetscCall(MatGetSize(A,NULL,&N)); 418 PetscCall(MatGetLocalSize(A,&m,NULL)); 419 PetscCall(MatGetOwnershipRange(A,&rstart,NULL)); 420 421 /* count the number of nonzeros per row */ 422 for (i=0; i<m; i++) { 423 PetscCall(MatGetRow(A,i+rstart,&len,&rj,NULL)); 424 for (j=0; j<len; j++) { 425 if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */ 426 } 427 nzeros += len; 428 PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,NULL)); 429 } 430 431 /* malloc space for nonzeros */ 432 PetscCall(PetscMalloc1(nzeros+1,&a)); 433 PetscCall(PetscMalloc1(N+1,&ia)); 434 PetscCall(PetscMalloc1(nzeros+1,&ja)); 435 436 nzeros = 0; 437 ia[0] = 0; 438 for (i=0; i<m; i++) { 439 PetscCall(MatGetRow(A,i+rstart,&len,&rj,&ra)); 440 cnt = 0; 441 for (j=0; j<len; j++) { 442 if (rj[j] != i+rstart) { /* if not diagonal */ 443 a[nzeros+cnt] = (PetscInt) PetscAbsScalar(ra[j]); 444 ja[nzeros+cnt++] = rj[j]; 445 } 446 } 447 PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,&ra)); 448 nzeros += cnt; 449 ia[i+1] = nzeros; 450 } 451 452 PetscCall(PetscObjectGetComm((PetscObject)A,&comm)); 453 PetscCall(MatCreate(comm,&B)); 454 PetscCall(MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N)); 455 PetscCall(MatSetType(B,type)); 456 PetscCall(MatMPIAdjSetPreallocation(B,ia,ja,a)); 457 458 if (reuse == MAT_INPLACE_MATRIX) { 459 PetscCall(MatHeaderReplace(A,&B)); 460 } else { 461 *newmat = B; 462 } 463 PetscFunctionReturn(0); 464 } 465 466 /* -------------------------------------------------------------------*/ 467 static struct _MatOps MatOps_Values = {NULL, 468 MatGetRow_MPIAdj, 469 MatRestoreRow_MPIAdj, 470 NULL, 471 /* 4*/ NULL, 472 NULL, 473 NULL, 474 NULL, 475 NULL, 476 NULL, 477 /*10*/ NULL, 478 NULL, 479 NULL, 480 NULL, 481 NULL, 482 /*15*/ NULL, 483 MatEqual_MPIAdj, 484 NULL, 485 NULL, 486 NULL, 487 /*20*/ NULL, 488 NULL, 489 MatSetOption_MPIAdj, 490 NULL, 491 /*24*/ NULL, 492 NULL, 493 NULL, 494 NULL, 495 NULL, 496 /*29*/ NULL, 497 NULL, 498 NULL, 499 NULL, 500 NULL, 501 /*34*/ NULL, 502 NULL, 503 NULL, 504 NULL, 505 NULL, 506 /*39*/ NULL, 507 MatCreateSubMatrices_MPIAdj, 508 NULL, 509 NULL, 510 NULL, 511 /*44*/ NULL, 512 NULL, 513 MatShift_Basic, 514 NULL, 515 NULL, 516 /*49*/ NULL, 517 MatGetRowIJ_MPIAdj, 518 MatRestoreRowIJ_MPIAdj, 519 NULL, 520 NULL, 521 /*54*/ NULL, 522 NULL, 523 NULL, 524 NULL, 525 NULL, 526 /*59*/ NULL, 527 MatDestroy_MPIAdj, 528 MatView_MPIAdj, 529 MatConvertFrom_MPIAdj, 530 NULL, 531 /*64*/ NULL, 532 NULL, 533 NULL, 534 NULL, 535 NULL, 536 /*69*/ NULL, 537 NULL, 538 NULL, 539 NULL, 540 NULL, 541 /*74*/ NULL, 542 NULL, 543 NULL, 544 NULL, 545 NULL, 546 /*79*/ NULL, 547 NULL, 548 NULL, 549 NULL, 550 NULL, 551 /*84*/ NULL, 552 NULL, 553 NULL, 554 NULL, 555 NULL, 556 /*89*/ NULL, 557 NULL, 558 NULL, 559 NULL, 560 NULL, 561 /*94*/ NULL, 562 NULL, 563 NULL, 564 NULL, 565 NULL, 566 /*99*/ NULL, 567 NULL, 568 NULL, 569 NULL, 570 NULL, 571 /*104*/ NULL, 572 NULL, 573 NULL, 574 NULL, 575 NULL, 576 /*109*/ NULL, 577 NULL, 578 NULL, 579 NULL, 580 NULL, 581 /*114*/ NULL, 582 NULL, 583 NULL, 584 NULL, 585 NULL, 586 /*119*/ NULL, 587 NULL, 588 NULL, 589 NULL, 590 NULL, 591 /*124*/ NULL, 592 NULL, 593 NULL, 594 NULL, 595 MatCreateSubMatricesMPI_MPIAdj, 596 /*129*/ NULL, 597 NULL, 598 NULL, 599 NULL, 600 NULL, 601 /*134*/ NULL, 602 NULL, 603 NULL, 604 NULL, 605 NULL, 606 /*139*/ NULL, 607 NULL, 608 NULL, 609 NULL, 610 NULL, 611 /*144*/NULL, 612 NULL, 613 NULL, 614 NULL 615 }; 616 617 static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values) 618 { 619 Mat_MPIAdj *b = (Mat_MPIAdj*)B->data; 620 PetscBool useedgeweights; 621 622 PetscFunctionBegin; 623 PetscCall(PetscLayoutSetUp(B->rmap)); 624 PetscCall(PetscLayoutSetUp(B->cmap)); 625 if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE; 626 /* Make everybody knows if they are using edge weights or not */ 627 PetscCall(MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B))); 628 629 if (PetscDefined(USE_DEBUG)) { 630 PetscInt ii; 631 632 PetscCheck(i[0] == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %" PetscInt_FMT,i[0]); 633 for (ii=1; ii<B->rmap->n; ii++) { 634 PetscCheck(i[ii] >= 0 && i[ii] >= i[ii-1],PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT,ii,i[ii],ii-1,i[ii-1]); 635 } 636 for (ii=0; ii<i[B->rmap->n]; ii++) { 637 PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %" PetscInt_FMT " out of range %" PetscInt_FMT,ii,j[ii]); 638 } 639 } 640 B->preallocated = PETSC_TRUE; 641 642 b->j = j; 643 b->i = i; 644 b->values = values; 645 646 b->nz = i[B->rmap->n]; 647 b->diag = NULL; 648 b->symmetric = PETSC_FALSE; 649 b->freeaij = PETSC_TRUE; 650 651 PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY)); 652 PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY)); 653 PetscFunctionReturn(0); 654 } 655 656 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B) 657 { 658 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 659 const PetscInt *ranges; 660 MPI_Comm acomm,bcomm; 661 MPI_Group agroup,bgroup; 662 PetscMPIInt i,rank,size,nranks,*ranks; 663 664 PetscFunctionBegin; 665 *B = NULL; 666 PetscCall(PetscObjectGetComm((PetscObject)A,&acomm)); 667 PetscCallMPI(MPI_Comm_size(acomm,&size)); 668 PetscCallMPI(MPI_Comm_size(acomm,&rank)); 669 PetscCall(MatGetOwnershipRanges(A,&ranges)); 670 for (i=0,nranks=0; i<size; i++) { 671 if (ranges[i+1] - ranges[i] > 0) nranks++; 672 } 673 if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */ 674 PetscCall(PetscObjectReference((PetscObject)A)); 675 *B = A; 676 PetscFunctionReturn(0); 677 } 678 679 PetscCall(PetscMalloc1(nranks,&ranks)); 680 for (i=0,nranks=0; i<size; i++) { 681 if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i; 682 } 683 PetscCallMPI(MPI_Comm_group(acomm,&agroup)); 684 PetscCallMPI(MPI_Group_incl(agroup,nranks,ranks,&bgroup)); 685 PetscCall(PetscFree(ranks)); 686 PetscCallMPI(MPI_Comm_create(acomm,bgroup,&bcomm)); 687 PetscCallMPI(MPI_Group_free(&agroup)); 688 PetscCallMPI(MPI_Group_free(&bgroup)); 689 if (bcomm != MPI_COMM_NULL) { 690 PetscInt m,N; 691 Mat_MPIAdj *b; 692 PetscCall(MatGetLocalSize(A,&m,NULL)); 693 PetscCall(MatGetSize(A,NULL,&N)); 694 PetscCall(MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B)); 695 b = (Mat_MPIAdj*)(*B)->data; 696 b->freeaij = PETSC_FALSE; 697 PetscCallMPI(MPI_Comm_free(&bcomm)); 698 } 699 PetscFunctionReturn(0); 700 } 701 702 PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B) 703 { 704 PetscInt M,N,*II,*J,NZ,nz,m,nzstart,i; 705 PetscInt *Values = NULL; 706 Mat_MPIAdj *adj = (Mat_MPIAdj*)A->data; 707 PetscMPIInt mnz,mm,*allnz,*allm,size,*dispnz,*dispm; 708 709 PetscFunctionBegin; 710 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size)); 711 PetscCall(MatGetSize(A,&M,&N)); 712 PetscCall(MatGetLocalSize(A,&m,NULL)); 713 nz = adj->nz; 714 PetscCheck(adj->i[m] == nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]); 715 PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A))); 716 717 PetscCall(PetscMPIIntCast(nz,&mnz)); 718 PetscCall(PetscMalloc2(size,&allnz,size,&dispnz)); 719 PetscCallMPI(MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A))); 720 dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1]; 721 if (adj->values) { 722 PetscCall(PetscMalloc1(NZ,&Values)); 723 PetscCallMPI(MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A))); 724 } 725 PetscCall(PetscMalloc1(NZ,&J)); 726 PetscCallMPI(MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A))); 727 PetscCall(PetscFree2(allnz,dispnz)); 728 PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A))); 729 nzstart -= nz; 730 /* shift the i[] values so they will be correct after being received */ 731 for (i=0; i<m; i++) adj->i[i] += nzstart; 732 PetscCall(PetscMalloc1(M+1,&II)); 733 PetscCall(PetscMPIIntCast(m,&mm)); 734 PetscCall(PetscMalloc2(size,&allm,size,&dispm)); 735 PetscCallMPI(MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A))); 736 dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1]; 737 PetscCallMPI(MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A))); 738 PetscCall(PetscFree2(allm,dispm)); 739 II[M] = NZ; 740 /* shift the i[] values back */ 741 for (i=0; i<m; i++) adj->i[i] -= nzstart; 742 PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B)); 743 PetscFunctionReturn(0); 744 } 745 746 /*@ 747 MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows 748 749 Collective 750 751 Input Parameter: 752 . A - original MPIAdj matrix 753 754 Output Parameter: 755 . B - matrix on subcommunicator, NULL on ranks that owned zero rows of A 756 757 Level: developer 758 759 Note: 760 This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row. 761 762 The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed. 763 764 .seealso: `MatCreateMPIAdj()` 765 @*/ 766 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B) 767 { 768 PetscFunctionBegin; 769 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 770 PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B)); 771 PetscFunctionReturn(0); 772 } 773 774 /*MC 775 MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices, 776 intended for use constructing orderings and partitionings. 777 778 Level: beginner 779 780 .seealso: `MatCreateMPIAdj` 781 M*/ 782 783 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B) 784 { 785 Mat_MPIAdj *b; 786 787 PetscFunctionBegin; 788 PetscCall(PetscNewLog(B,&b)); 789 B->data = (void*)b; 790 PetscCall(PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps))); 791 B->assembled = PETSC_FALSE; 792 793 PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj)); 794 PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj)); 795 PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj)); 796 PetscCall(PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ)); 797 PetscFunctionReturn(0); 798 } 799 800 /*@C 801 MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners) 802 803 Logically Collective 804 805 Input Parameter: 806 . A - the matrix 807 808 Output Parameter: 809 . B - the same matrix on all processes 810 811 Level: intermediate 812 813 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()` 814 @*/ 815 PetscErrorCode MatMPIAdjToSeq(Mat A,Mat *B) 816 { 817 PetscFunctionBegin; 818 PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B)); 819 PetscFunctionReturn(0); 820 } 821 822 /*@C 823 MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements 824 825 Logically Collective 826 827 Input Parameters: 828 + A - the matrix 829 . i - the indices into j for the start of each row 830 . j - the column indices for each row (sorted for each row). 831 The indices in i and j start with zero (NOT with one). 832 - values - [optional] edge weights 833 834 Level: intermediate 835 836 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()` 837 @*/ 838 PetscErrorCode MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values) 839 { 840 PetscFunctionBegin; 841 PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values)); 842 PetscFunctionReturn(0); 843 } 844 845 /*@C 846 MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list. 847 The matrix does not have numerical values associated with it, but is 848 intended for ordering (to reduce bandwidth etc) and partitioning. 849 850 Collective 851 852 Input Parameters: 853 + comm - MPI communicator 854 . m - number of local rows 855 . N - number of global columns 856 . i - the indices into j for the start of each row 857 . j - the column indices for each row (sorted for each row). 858 The indices in i and j start with zero (NOT with one). 859 - values -[optional] edge weights 860 861 Output Parameter: 862 . A - the matrix 863 864 Level: intermediate 865 866 Notes: 867 This matrix object does not support most matrix operations, include 868 MatSetValues(). 869 You must NOT free the ii, values and jj arrays yourself. PETSc will free them 870 when the matrix is destroyed; you must allocate them with PetscMalloc(). If you 871 call from Fortran you need not create the arrays with PetscMalloc(). 872 Should not include the matrix diagonals. 873 874 If you already have a matrix, you can create its adjacency matrix by a call 875 to MatConvert, specifying a type of MATMPIADJ. 876 877 Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC 878 879 .seealso: `MatCreate()`, `MatConvert()`, `MatGetOrdering()` 880 @*/ 881 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A) 882 { 883 PetscFunctionBegin; 884 PetscCall(MatCreate(comm,A)); 885 PetscCall(MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N)); 886 PetscCall(MatSetType(*A,MATMPIADJ)); 887 PetscCall(MatMPIAdjSetPreallocation(*A,i,j,values)); 888 PetscFunctionReturn(0); 889 } 890