1 #define PETSCMAT_DLL 2 3 /* 4 Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 5 */ 6 #include "src/mat/impls/adj/mpi/mpiadj.h" 7 #include "petscsys.h" 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "MatView_MPIAdj_ASCII" 11 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer) 12 { 13 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 14 PetscErrorCode ierr; 15 PetscInt i,j,m = A->m; 16 char *name; 17 PetscViewerFormat format; 18 19 PetscFunctionBegin; 20 ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr); 21 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 22 if (format == PETSC_VIEWER_ASCII_INFO) { 23 PetscFunctionReturn(0); 24 } else if (format == PETSC_VIEWER_ASCII_MATLAB) { 25 SETERRQ(PETSC_ERR_SUP,"Matlab format not supported"); 26 } else { 27 ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr); 28 for (i=0; i<m; i++) { 29 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+a->rstart);CHKERRQ(ierr); 30 for (j=a->i[i]; j<a->i[i+1]; j++) { 31 ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr); 32 } 33 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr); 34 } 35 ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr); 36 } 37 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 38 PetscFunctionReturn(0); 39 } 40 41 #undef __FUNCT__ 42 #define __FUNCT__ "MatView_MPIAdj" 43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer) 44 { 45 PetscErrorCode ierr; 46 PetscTruth iascii; 47 48 PetscFunctionBegin; 49 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 50 if (iascii) { 51 ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr); 52 } else { 53 SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name); 54 } 55 PetscFunctionReturn(0); 56 } 57 58 #undef __FUNCT__ 59 #define __FUNCT__ "MatDestroy_MPIAdj" 60 PetscErrorCode MatDestroy_MPIAdj(Mat mat) 61 { 62 Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data; 63 PetscErrorCode ierr; 64 65 PetscFunctionBegin; 66 #if defined(PETSC_USE_LOG) 67 PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->m,mat->n,a->nz); 68 #endif 69 if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);} 70 if (a->freeaij) { 71 ierr = PetscFree(a->i);CHKERRQ(ierr); 72 ierr = PetscFree(a->j);CHKERRQ(ierr); 73 if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);} 74 } 75 ierr = PetscFree(a->rowners);CHKERRQ(ierr); 76 ierr = PetscFree(a);CHKERRQ(ierr); 77 78 ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr); 79 PetscFunctionReturn(0); 80 } 81 82 #undef __FUNCT__ 83 #define __FUNCT__ "MatSetOption_MPIAdj" 84 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op) 85 { 86 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 87 PetscErrorCode ierr; 88 89 PetscFunctionBegin; 90 switch (op) { 91 case MAT_SYMMETRIC: 92 case MAT_STRUCTURALLY_SYMMETRIC: 93 case MAT_HERMITIAN: 94 a->symmetric = PETSC_TRUE; 95 break; 96 case MAT_NOT_SYMMETRIC: 97 case MAT_NOT_STRUCTURALLY_SYMMETRIC: 98 case MAT_NOT_HERMITIAN: 99 a->symmetric = PETSC_FALSE; 100 break; 101 case MAT_SYMMETRY_ETERNAL: 102 case MAT_NOT_SYMMETRY_ETERNAL: 103 break; 104 default: 105 ierr = PetscLogInfo((A,"MatSetOption_MPIAdj:Option ignored\n"));CHKERRQ(ierr); 106 break; 107 } 108 PetscFunctionReturn(0); 109 } 110 111 112 /* 113 Adds diagonal pointers to sparse matrix structure. 114 */ 115 116 #undef __FUNCT__ 117 #define __FUNCT__ "MatMarkDiagonal_MPIAdj" 118 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A) 119 { 120 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 121 PetscErrorCode ierr; 122 PetscInt i,j,*diag,m = A->m; 123 124 PetscFunctionBegin; 125 ierr = PetscMalloc((m+1)*sizeof(PetscInt),&diag);CHKERRQ(ierr); 126 ierr = PetscLogObjectMemory(A,(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 127 for (i=0; i<A->m; i++) { 128 for (j=a->i[i]; j<a->i[i+1]; j++) { 129 if (a->j[j] == i) { 130 diag[i] = j; 131 break; 132 } 133 } 134 } 135 a->diag = diag; 136 PetscFunctionReturn(0); 137 } 138 139 #undef __FUNCT__ 140 #define __FUNCT__ "MatGetRow_MPIAdj" 141 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 142 { 143 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 144 PetscInt *itmp; 145 146 PetscFunctionBegin; 147 row -= a->rstart; 148 149 if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range"); 150 151 *nz = a->i[row+1] - a->i[row]; 152 if (v) *v = PETSC_NULL; 153 if (idx) { 154 itmp = a->j + a->i[row]; 155 if (*nz) { 156 *idx = itmp; 157 } 158 else *idx = 0; 159 } 160 PetscFunctionReturn(0); 161 } 162 163 #undef __FUNCT__ 164 #define __FUNCT__ "MatRestoreRow_MPIAdj" 165 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 166 { 167 PetscFunctionBegin; 168 PetscFunctionReturn(0); 169 } 170 171 #undef __FUNCT__ 172 #define __FUNCT__ "MatEqual_MPIAdj" 173 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg) 174 { 175 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data; 176 PetscErrorCode ierr; 177 PetscTruth flag; 178 179 PetscFunctionBegin; 180 /* If the matrix dimensions are not equal,or no of nonzeros */ 181 if ((A->m != B->m) ||(a->nz != b->nz)) { 182 flag = PETSC_FALSE; 183 } 184 185 /* if the a->i are the same */ 186 ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr); 187 188 /* if a->j are the same */ 189 ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr); 190 191 ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr); 192 PetscFunctionReturn(0); 193 } 194 195 #undef __FUNCT__ 196 #define __FUNCT__ "MatGetRowIJ_MPIAdj" 197 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done) 198 { 199 PetscErrorCode ierr; 200 PetscMPIInt size; 201 PetscInt i; 202 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 203 204 PetscFunctionBegin; 205 ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr); 206 if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);} 207 *m = A->m; 208 *ia = a->i; 209 *ja = a->j; 210 *done = PETSC_TRUE; 211 if (oshift) { 212 for (i=0; i<(*ia)[*m]; i++) { 213 (*ja)[i]++; 214 } 215 for (i=0; i<=(*m); i++) (*ia)[i]++; 216 } 217 PetscFunctionReturn(0); 218 } 219 220 #undef __FUNCT__ 221 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj" 222 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done) 223 { 224 PetscInt i; 225 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 226 227 PetscFunctionBegin; 228 if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()"); 229 if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()"); 230 if (oshift) { 231 for (i=0; i<=(*m); i++) (*ia)[i]--; 232 for (i=0; i<(*ia)[*m]; i++) { 233 (*ja)[i]--; 234 } 235 } 236 PetscFunctionReturn(0); 237 } 238 239 /* -------------------------------------------------------------------*/ 240 static struct _MatOps MatOps_Values = {0, 241 MatGetRow_MPIAdj, 242 MatRestoreRow_MPIAdj, 243 0, 244 /* 4*/ 0, 245 0, 246 0, 247 0, 248 0, 249 0, 250 /*10*/ 0, 251 0, 252 0, 253 0, 254 0, 255 /*15*/ 0, 256 MatEqual_MPIAdj, 257 0, 258 0, 259 0, 260 /*20*/ 0, 261 0, 262 0, 263 MatSetOption_MPIAdj, 264 0, 265 /*25*/ 0, 266 0, 267 0, 268 0, 269 0, 270 /*30*/ 0, 271 0, 272 0, 273 0, 274 0, 275 /*35*/ 0, 276 0, 277 0, 278 0, 279 0, 280 /*40*/ 0, 281 0, 282 0, 283 0, 284 0, 285 /*45*/ 0, 286 0, 287 0, 288 0, 289 0, 290 /*50*/ 0, 291 MatGetRowIJ_MPIAdj, 292 MatRestoreRowIJ_MPIAdj, 293 0, 294 0, 295 /*55*/ 0, 296 0, 297 0, 298 0, 299 0, 300 /*60*/ 0, 301 MatDestroy_MPIAdj, 302 MatView_MPIAdj, 303 MatGetPetscMaps_Petsc, 304 0, 305 /*65*/ 0, 306 0, 307 0, 308 0, 309 0, 310 /*70*/ 0, 311 0, 312 0, 313 0, 314 0, 315 /*75*/ 0, 316 0, 317 0, 318 0, 319 0, 320 /*80*/ 0, 321 0, 322 0, 323 0, 324 0, 325 /*85*/ 0, 326 0, 327 0, 328 0, 329 0, 330 /*90*/ 0, 331 0, 332 0, 333 0, 334 0, 335 /*95*/ 0, 336 0, 337 0, 338 0}; 339 340 EXTERN_C_BEGIN 341 #undef __FUNCT__ 342 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj" 343 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values) 344 { 345 Mat_MPIAdj *b = (Mat_MPIAdj *)B->data; 346 PetscErrorCode ierr; 347 #if defined(PETSC_USE_DEBUG) 348 PetscInt ii; 349 #endif 350 351 PetscFunctionBegin; 352 B->preallocated = PETSC_TRUE; 353 #if defined(PETSC_USE_DEBUG) 354 if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]); 355 for (ii=1; ii<B->m; ii++) { 356 if (i[ii] < 0 || i[ii] < i[ii-1]) { 357 SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]); 358 } 359 } 360 for (ii=0; ii<i[B->m]; ii++) { 361 if (j[ii] < 0 || j[ii] >= B->N) { 362 SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]); 363 } 364 } 365 #endif 366 367 b->j = j; 368 b->i = i; 369 b->values = values; 370 371 b->nz = i[B->m]; 372 b->diag = 0; 373 b->symmetric = PETSC_FALSE; 374 b->freeaij = PETSC_TRUE; 375 376 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 377 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 378 PetscFunctionReturn(0); 379 } 380 EXTERN_C_END 381 382 /*MC 383 MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices, 384 intended for use constructing orderings and partitionings. 385 386 Level: beginner 387 388 .seealso: MatCreateMPIAdj 389 M*/ 390 391 EXTERN_C_BEGIN 392 #undef __FUNCT__ 393 #define __FUNCT__ "MatCreate_MPIAdj" 394 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B) 395 { 396 Mat_MPIAdj *b; 397 PetscErrorCode ierr; 398 PetscInt ii; 399 PetscMPIInt size,rank; 400 401 PetscFunctionBegin; 402 ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr); 403 ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr); 404 405 ierr = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr); 406 B->data = (void*)b; 407 ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 408 B->factor = 0; 409 B->mapping = 0; 410 B->assembled = PETSC_FALSE; 411 412 ierr = PetscSplitOwnership(B->comm,&B->m,&B->M);CHKERRQ(ierr); 413 B->n = B->N = PetscMax(B->N,B->n);CHKERRQ(ierr); 414 415 /* the information in the maps duplicates the information computed below, eventually 416 we should remove the duplicate information that is not contained in the maps */ 417 ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr); 418 /* we don't know the "local columns" so just use the row information :-(*/ 419 ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr); 420 421 ierr = PetscMalloc((size+1)*sizeof(PetscInt),&b->rowners);CHKERRQ(ierr); 422 ierr = PetscLogObjectMemory(B,(size+2)*sizeof(PetscInt)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));CHKERRQ(ierr); 423 ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr); 424 b->rowners[0] = 0; 425 for (ii=2; ii<=size; ii++) { 426 b->rowners[ii] += b->rowners[ii-1]; 427 } 428 b->rstart = b->rowners[rank]; 429 b->rend = b->rowners[rank+1]; 430 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C", 431 "MatMPIAdjSetPreallocation_MPIAdj", 432 MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr); 433 PetscFunctionReturn(0); 434 } 435 EXTERN_C_END 436 437 #undef __FUNCT__ 438 #define __FUNCT__ "MatMPIAdjSetPreallocation" 439 /*@C 440 MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements 441 442 Collective on MPI_Comm 443 444 Input Parameters: 445 + A - the matrix 446 . i - the indices into j for the start of each row 447 . j - the column indices for each row (sorted for each row). 448 The indices in i and j start with zero (NOT with one). 449 - values - [optional] edge weights 450 451 Level: intermediate 452 453 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues() 454 @*/ 455 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values) 456 { 457 PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*); 458 459 PetscFunctionBegin; 460 ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr); 461 if (f) { 462 ierr = (*f)(B,i,j,values);CHKERRQ(ierr); 463 } 464 PetscFunctionReturn(0); 465 } 466 467 #undef __FUNCT__ 468 #define __FUNCT__ "MatCreateMPIAdj" 469 /*@C 470 MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list. 471 The matrix does not have numerical values associated with it, but is 472 intended for ordering (to reduce bandwidth etc) and partitioning. 473 474 Collective on MPI_Comm 475 476 Input Parameters: 477 + comm - MPI communicator 478 . m - number of local rows 479 . n - number of columns 480 . i - the indices into j for the start of each row 481 . j - the column indices for each row (sorted for each row). 482 The indices in i and j start with zero (NOT with one). 483 - values -[optional] edge weights 484 485 Output Parameter: 486 . A - the matrix 487 488 Level: intermediate 489 490 Notes: This matrix object does not support most matrix operations, include 491 MatSetValues(). 492 You must NOT free the ii, values and jj arrays yourself. PETSc will free them 493 when the matrix is destroyed; you must allocate them with PetscMalloc(). If you 494 call from Fortran you need not create the arrays with PetscMalloc(). 495 Should not include the matrix diagonals. 496 497 If you already have a matrix, you can create its adjacency matrix by a call 498 to MatConvert, specifying a type of MATMPIADJ. 499 500 Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC 501 502 .seealso: MatCreate(), MatConvert(), MatGetOrdering() 503 @*/ 504 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A) 505 { 506 PetscErrorCode ierr; 507 508 PetscFunctionBegin; 509 ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr); 510 ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr); 511 ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr); 512 PetscFunctionReturn(0); 513 } 514 515 EXTERN_C_BEGIN 516 #undef __FUNCT__ 517 #define __FUNCT__ "MatConvertTo_MPIAdj" 518 PetscErrorCode PETSCMAT_DLLEXPORT MatConvertTo_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat) 519 { 520 Mat B; 521 PetscErrorCode ierr; 522 PetscInt i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a; 523 const PetscInt *rj; 524 const PetscScalar *ra; 525 MPI_Comm comm; 526 527 PetscFunctionBegin; 528 ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr); 529 ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr); 530 ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr); 531 532 /* count the number of nonzeros per row */ 533 for (i=0; i<m; i++) { 534 ierr = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr); 535 for (j=0; j<len; j++) { 536 if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */ 537 } 538 ierr = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr); 539 nzeros += len; 540 } 541 542 /* malloc space for nonzeros */ 543 ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr); 544 ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr); 545 ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr); 546 547 nzeros = 0; 548 ia[0] = 0; 549 for (i=0; i<m; i++) { 550 ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr); 551 cnt = 0; 552 for (j=0; j<len; j++) { 553 if (rj[j] != i+rstart) { /* if not diagonal */ 554 a[nzeros+cnt] = (PetscInt) PetscAbsScalar(ra[j]); 555 ja[nzeros+cnt++] = rj[j]; 556 } 557 } 558 ierr = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr); 559 nzeros += cnt; 560 ia[i+1] = nzeros; 561 } 562 563 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 564 ierr = MatCreate(comm,m,N,PETSC_DETERMINE,N,&B);CHKERRQ(ierr); 565 ierr = MatSetType(B,type);CHKERRQ(ierr); 566 ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr); 567 568 if (reuse == MAT_REUSE_MATRIX) { 569 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 570 } else { 571 *newmat = B; 572 } 573 PetscFunctionReturn(0); 574 } 575 EXTERN_C_END 576 577 578 579 580 581