1 #define PETSCMAT_DLL 2 3 /* 4 Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 5 */ 6 #include "src/mat/impls/adj/mpi/mpiadj.h" 7 #include "petscsys.h" 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "MatView_MPIAdj_ASCII" 11 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer) 12 { 13 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 14 PetscErrorCode ierr; 15 PetscInt i,j,m = A->rmap.n; 16 const char *name; 17 PetscViewerFormat format; 18 19 PetscFunctionBegin; 20 ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr); 21 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 22 if (format == PETSC_VIEWER_ASCII_INFO) { 23 PetscFunctionReturn(0); 24 } else if (format == PETSC_VIEWER_ASCII_MATLAB) { 25 SETERRQ(PETSC_ERR_SUP,"Matlab format not supported"); 26 } else { 27 ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr); 28 for (i=0; i<m; i++) { 29 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap.rstart);CHKERRQ(ierr); 30 for (j=a->i[i]; j<a->i[i+1]; j++) { 31 ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr); 32 } 33 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr); 34 } 35 ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr); 36 } 37 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 38 PetscFunctionReturn(0); 39 } 40 41 #undef __FUNCT__ 42 #define __FUNCT__ "MatView_MPIAdj" 43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer) 44 { 45 PetscErrorCode ierr; 46 PetscTruth iascii; 47 48 PetscFunctionBegin; 49 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 50 if (iascii) { 51 ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr); 52 } else { 53 SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name); 54 } 55 PetscFunctionReturn(0); 56 } 57 58 #undef __FUNCT__ 59 #define __FUNCT__ "MatDestroy_MPIAdj" 60 PetscErrorCode MatDestroy_MPIAdj(Mat mat) 61 { 62 Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data; 63 PetscErrorCode ierr; 64 65 PetscFunctionBegin; 66 #if defined(PETSC_USE_LOG) 67 PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap.n,mat->cmap.n,a->nz); 68 #endif 69 ierr = PetscFree(a->diag);CHKERRQ(ierr); 70 if (a->freeaij) { 71 ierr = PetscFree(a->i);CHKERRQ(ierr); 72 ierr = PetscFree(a->j);CHKERRQ(ierr); 73 ierr = PetscFree(a->values);CHKERRQ(ierr); 74 } 75 ierr = PetscFree(a);CHKERRQ(ierr); 76 77 ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr); 78 PetscFunctionReturn(0); 79 } 80 81 #undef __FUNCT__ 82 #define __FUNCT__ "MatSetOption_MPIAdj" 83 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op) 84 { 85 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 86 PetscErrorCode ierr; 87 88 PetscFunctionBegin; 89 switch (op) { 90 case MAT_SYMMETRIC: 91 case MAT_STRUCTURALLY_SYMMETRIC: 92 case MAT_HERMITIAN: 93 a->symmetric = PETSC_TRUE; 94 break; 95 case MAT_NOT_SYMMETRIC: 96 case MAT_NOT_STRUCTURALLY_SYMMETRIC: 97 case MAT_NOT_HERMITIAN: 98 a->symmetric = PETSC_FALSE; 99 break; 100 case MAT_SYMMETRY_ETERNAL: 101 case MAT_NOT_SYMMETRY_ETERNAL: 102 break; 103 default: 104 ierr = PetscInfo(A,"Option ignored\n");CHKERRQ(ierr); 105 break; 106 } 107 PetscFunctionReturn(0); 108 } 109 110 111 /* 112 Adds diagonal pointers to sparse matrix structure. 113 */ 114 115 #undef __FUNCT__ 116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj" 117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A) 118 { 119 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 120 PetscErrorCode ierr; 121 PetscInt i,j,*diag,m = A->rmap.n; 122 123 PetscFunctionBegin; 124 ierr = PetscMalloc((m+1)*sizeof(PetscInt),&diag);CHKERRQ(ierr); 125 ierr = PetscLogObjectMemory(A,(m+1)*sizeof(PetscInt));CHKERRQ(ierr); 126 for (i=0; i<A->rmap.n; i++) { 127 for (j=a->i[i]; j<a->i[i+1]; j++) { 128 if (a->j[j] == i) { 129 diag[i] = j; 130 break; 131 } 132 } 133 } 134 a->diag = diag; 135 PetscFunctionReturn(0); 136 } 137 138 #undef __FUNCT__ 139 #define __FUNCT__ "MatGetRow_MPIAdj" 140 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 141 { 142 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 143 PetscInt *itmp; 144 145 PetscFunctionBegin; 146 row -= A->rmap.rstart; 147 148 if (row < 0 || row >= A->rmap.n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range"); 149 150 *nz = a->i[row+1] - a->i[row]; 151 if (v) *v = PETSC_NULL; 152 if (idx) { 153 itmp = a->j + a->i[row]; 154 if (*nz) { 155 *idx = itmp; 156 } 157 else *idx = 0; 158 } 159 PetscFunctionReturn(0); 160 } 161 162 #undef __FUNCT__ 163 #define __FUNCT__ "MatRestoreRow_MPIAdj" 164 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 165 { 166 PetscFunctionBegin; 167 PetscFunctionReturn(0); 168 } 169 170 #undef __FUNCT__ 171 #define __FUNCT__ "MatEqual_MPIAdj" 172 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg) 173 { 174 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data; 175 PetscErrorCode ierr; 176 PetscTruth flag; 177 178 PetscFunctionBegin; 179 /* If the matrix dimensions are not equal,or no of nonzeros */ 180 if ((A->rmap.n != B->rmap.n) ||(a->nz != b->nz)) { 181 flag = PETSC_FALSE; 182 } 183 184 /* if the a->i are the same */ 185 ierr = PetscMemcmp(a->i,b->i,(A->rmap.n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr); 186 187 /* if a->j are the same */ 188 ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr); 189 190 ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr); 191 PetscFunctionReturn(0); 192 } 193 194 #undef __FUNCT__ 195 #define __FUNCT__ "MatGetRowIJ_MPIAdj" 196 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done) 197 { 198 PetscErrorCode ierr; 199 PetscMPIInt size; 200 PetscInt i; 201 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 202 203 PetscFunctionBegin; 204 ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr); 205 if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);} 206 *m = A->rmap.n; 207 *ia = a->i; 208 *ja = a->j; 209 *done = PETSC_TRUE; 210 if (oshift) { 211 for (i=0; i<(*ia)[*m]; i++) { 212 (*ja)[i]++; 213 } 214 for (i=0; i<=(*m); i++) (*ia)[i]++; 215 } 216 PetscFunctionReturn(0); 217 } 218 219 #undef __FUNCT__ 220 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj" 221 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done) 222 { 223 PetscInt i; 224 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 225 226 PetscFunctionBegin; 227 if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()"); 228 if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()"); 229 if (oshift) { 230 for (i=0; i<=(*m); i++) (*ia)[i]--; 231 for (i=0; i<(*ia)[*m]; i++) { 232 (*ja)[i]--; 233 } 234 } 235 PetscFunctionReturn(0); 236 } 237 238 /* -------------------------------------------------------------------*/ 239 static struct _MatOps MatOps_Values = {0, 240 MatGetRow_MPIAdj, 241 MatRestoreRow_MPIAdj, 242 0, 243 /* 4*/ 0, 244 0, 245 0, 246 0, 247 0, 248 0, 249 /*10*/ 0, 250 0, 251 0, 252 0, 253 0, 254 /*15*/ 0, 255 MatEqual_MPIAdj, 256 0, 257 0, 258 0, 259 /*20*/ 0, 260 0, 261 0, 262 MatSetOption_MPIAdj, 263 0, 264 /*25*/ 0, 265 0, 266 0, 267 0, 268 0, 269 /*30*/ 0, 270 0, 271 0, 272 0, 273 0, 274 /*35*/ 0, 275 0, 276 0, 277 0, 278 0, 279 /*40*/ 0, 280 0, 281 0, 282 0, 283 0, 284 /*45*/ 0, 285 0, 286 0, 287 0, 288 0, 289 /*50*/ 0, 290 MatGetRowIJ_MPIAdj, 291 MatRestoreRowIJ_MPIAdj, 292 0, 293 0, 294 /*55*/ 0, 295 0, 296 0, 297 0, 298 0, 299 /*60*/ 0, 300 MatDestroy_MPIAdj, 301 MatView_MPIAdj, 302 0, 303 0, 304 /*65*/ 0, 305 0, 306 0, 307 0, 308 0, 309 /*70*/ 0, 310 0, 311 0, 312 0, 313 0, 314 /*75*/ 0, 315 0, 316 0, 317 0, 318 0, 319 /*80*/ 0, 320 0, 321 0, 322 0, 323 0, 324 /*85*/ 0, 325 0, 326 0, 327 0, 328 0, 329 /*90*/ 0, 330 0, 331 0, 332 0, 333 0, 334 /*95*/ 0, 335 0, 336 0, 337 0}; 338 339 EXTERN_C_BEGIN 340 #undef __FUNCT__ 341 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj" 342 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values) 343 { 344 Mat_MPIAdj *b = (Mat_MPIAdj *)B->data; 345 PetscErrorCode ierr; 346 #if defined(PETSC_USE_DEBUG) 347 PetscInt ii; 348 #endif 349 350 PetscFunctionBegin; 351 B->preallocated = PETSC_TRUE; 352 #if defined(PETSC_USE_DEBUG) 353 if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]); 354 for (ii=1; ii<B->rmap.n; ii++) { 355 if (i[ii] < 0 || i[ii] < i[ii-1]) { 356 SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]); 357 } 358 } 359 for (ii=0; ii<i[B->rmap.n]; ii++) { 360 if (j[ii] < 0 || j[ii] >= B->cmap.N) { 361 SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]); 362 } 363 } 364 #endif 365 366 b->j = j; 367 b->i = i; 368 b->values = values; 369 370 b->nz = i[B->rmap.n]; 371 b->diag = 0; 372 b->symmetric = PETSC_FALSE; 373 b->freeaij = PETSC_TRUE; 374 375 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 376 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 377 PetscFunctionReturn(0); 378 } 379 EXTERN_C_END 380 381 /*MC 382 MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices, 383 intended for use constructing orderings and partitionings. 384 385 Level: beginner 386 387 .seealso: MatCreateMPIAdj 388 M*/ 389 390 EXTERN_C_BEGIN 391 #undef __FUNCT__ 392 #define __FUNCT__ "MatCreate_MPIAdj" 393 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B) 394 { 395 Mat_MPIAdj *b; 396 PetscErrorCode ierr; 397 PetscMPIInt size,rank; 398 399 PetscFunctionBegin; 400 ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr); 401 ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr); 402 403 ierr = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr); 404 B->data = (void*)b; 405 ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 406 B->factor = 0; 407 B->mapping = 0; 408 B->assembled = PETSC_FALSE; 409 410 ierr = PetscMapInitialize(B->comm,&B->rmap);CHKERRQ(ierr); 411 ierr = PetscMapInitialize(B->comm,&B->cmap);CHKERRQ(ierr); 412 413 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C", 414 "MatMPIAdjSetPreallocation_MPIAdj", 415 MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr); 416 PetscFunctionReturn(0); 417 } 418 EXTERN_C_END 419 420 #undef __FUNCT__ 421 #define __FUNCT__ "MatMPIAdjSetPreallocation" 422 /*@C 423 MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements 424 425 Collective on MPI_Comm 426 427 Input Parameters: 428 + A - the matrix 429 . i - the indices into j for the start of each row 430 . j - the column indices for each row (sorted for each row). 431 The indices in i and j start with zero (NOT with one). 432 - values - [optional] edge weights 433 434 Level: intermediate 435 436 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues() 437 @*/ 438 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values) 439 { 440 PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*); 441 442 PetscFunctionBegin; 443 ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr); 444 if (f) { 445 ierr = (*f)(B,i,j,values);CHKERRQ(ierr); 446 } 447 PetscFunctionReturn(0); 448 } 449 450 #undef __FUNCT__ 451 #define __FUNCT__ "MatCreateMPIAdj" 452 /*@C 453 MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list. 454 The matrix does not have numerical values associated with it, but is 455 intended for ordering (to reduce bandwidth etc) and partitioning. 456 457 Collective on MPI_Comm 458 459 Input Parameters: 460 + comm - MPI communicator 461 . m - number of local rows 462 . n - number of columns 463 . i - the indices into j for the start of each row 464 . j - the column indices for each row (sorted for each row). 465 The indices in i and j start with zero (NOT with one). 466 - values -[optional] edge weights 467 468 Output Parameter: 469 . A - the matrix 470 471 Level: intermediate 472 473 Notes: This matrix object does not support most matrix operations, include 474 MatSetValues(). 475 You must NOT free the ii, values and jj arrays yourself. PETSc will free them 476 when the matrix is destroyed; you must allocate them with PetscMalloc(). If you 477 call from Fortran you need not create the arrays with PetscMalloc(). 478 Should not include the matrix diagonals. 479 480 If you already have a matrix, you can create its adjacency matrix by a call 481 to MatConvert, specifying a type of MATMPIADJ. 482 483 Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC 484 485 .seealso: MatCreate(), MatConvert(), MatGetOrdering() 486 @*/ 487 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A) 488 { 489 PetscErrorCode ierr; 490 491 PetscFunctionBegin; 492 ierr = MatCreate(comm,A);CHKERRQ(ierr); 493 ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 494 ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr); 495 ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr); 496 PetscFunctionReturn(0); 497 } 498 499 EXTERN_C_BEGIN 500 #undef __FUNCT__ 501 #define __FUNCT__ "MatConvertTo_MPIAdj" 502 PetscErrorCode PETSCMAT_DLLEXPORT MatConvertTo_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat) 503 { 504 Mat B; 505 PetscErrorCode ierr; 506 PetscInt i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a; 507 const PetscInt *rj; 508 const PetscScalar *ra; 509 MPI_Comm comm; 510 511 PetscFunctionBegin; 512 ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr); 513 ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr); 514 ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr); 515 516 /* count the number of nonzeros per row */ 517 for (i=0; i<m; i++) { 518 ierr = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr); 519 for (j=0; j<len; j++) { 520 if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */ 521 } 522 ierr = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr); 523 nzeros += len; 524 } 525 526 /* malloc space for nonzeros */ 527 ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr); 528 ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr); 529 ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr); 530 531 nzeros = 0; 532 ia[0] = 0; 533 for (i=0; i<m; i++) { 534 ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr); 535 cnt = 0; 536 for (j=0; j<len; j++) { 537 if (rj[j] != i+rstart) { /* if not diagonal */ 538 a[nzeros+cnt] = (PetscInt) PetscAbsScalar(ra[j]); 539 ja[nzeros+cnt++] = rj[j]; 540 } 541 } 542 ierr = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr); 543 nzeros += cnt; 544 ia[i+1] = nzeros; 545 } 546 547 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 548 ierr = MatCreate(comm,&B);CHKERRQ(ierr); 549 ierr = MatSetSizes(B,m,N,PETSC_DETERMINE,N);CHKERRQ(ierr); 550 ierr = MatSetType(B,type);CHKERRQ(ierr); 551 ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr); 552 553 if (reuse == MAT_REUSE_MATRIX) { 554 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 555 } else { 556 *newmat = B; 557 } 558 PetscFunctionReturn(0); 559 } 560 EXTERN_C_END 561 562 563 564 565 566