1 #define PETSCMAT_DLL 2 3 /* 4 Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 5 */ 6 #include "src/mat/impls/adj/mpi/mpiadj.h" 7 #include "petscsys.h" 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "MatView_MPIAdj_ASCII" 11 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer) 12 { 13 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 14 PetscErrorCode ierr; 15 PetscInt i,j,m = A->rmap.n; 16 const char *name; 17 PetscViewerFormat format; 18 19 PetscFunctionBegin; 20 ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr); 21 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 22 if (format == PETSC_VIEWER_ASCII_INFO) { 23 PetscFunctionReturn(0); 24 } else if (format == PETSC_VIEWER_ASCII_MATLAB) { 25 SETERRQ(PETSC_ERR_SUP,"Matlab format not supported"); 26 } else { 27 ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr); 28 for (i=0; i<m; i++) { 29 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap.rstart);CHKERRQ(ierr); 30 for (j=a->i[i]; j<a->i[i+1]; j++) { 31 ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr); 32 } 33 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr); 34 } 35 ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr); 36 } 37 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 38 PetscFunctionReturn(0); 39 } 40 41 #undef __FUNCT__ 42 #define __FUNCT__ "MatView_MPIAdj" 43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer) 44 { 45 PetscErrorCode ierr; 46 PetscTruth iascii; 47 48 PetscFunctionBegin; 49 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 50 if (iascii) { 51 ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr); 52 } else { 53 SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name); 54 } 55 PetscFunctionReturn(0); 56 } 57 58 #undef __FUNCT__ 59 #define __FUNCT__ "MatDestroy_MPIAdj" 60 PetscErrorCode MatDestroy_MPIAdj(Mat mat) 61 { 62 Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data; 63 PetscErrorCode ierr; 64 65 PetscFunctionBegin; 66 #if defined(PETSC_USE_LOG) 67 PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap.n,mat->cmap.n,a->nz); 68 #endif 69 ierr = PetscFree(a->diag);CHKERRQ(ierr); 70 if (a->freeaij) { 71 ierr = PetscFree(a->i);CHKERRQ(ierr); 72 ierr = PetscFree(a->j);CHKERRQ(ierr); 73 ierr = PetscFree(a->values);CHKERRQ(ierr); 74 } 75 ierr = PetscFree(a);CHKERRQ(ierr); 76 ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr); 77 ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr); 78 PetscFunctionReturn(0); 79 } 80 81 #undef __FUNCT__ 82 #define __FUNCT__ "MatSetOption_MPIAdj" 83 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscTruth flg) 84 { 85 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 86 PetscErrorCode ierr; 87 88 PetscFunctionBegin; 89 switch (op) { 90 case MAT_SYMMETRIC: 91 case MAT_STRUCTURALLY_SYMMETRIC: 92 case MAT_HERMITIAN: 93 a->symmetric = flg; 94 break; 95 case MAT_SYMMETRY_ETERNAL: 96 break; 97 default: 98 ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 99 break; 100 } 101 PetscFunctionReturn(0); 102 } 103 104 105 /* 106 Adds diagonal pointers to sparse matrix structure. 107 */ 108 109 #undef __FUNCT__ 110 #define __FUNCT__ "MatMarkDiagonal_MPIAdj" 111 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A) 112 { 113 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 114 PetscErrorCode ierr; 115 PetscInt i,j,m = A->rmap.n; 116 117 PetscFunctionBegin; 118 ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr); 119 ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr); 120 for (i=0; i<A->rmap.n; i++) { 121 for (j=a->i[i]; j<a->i[i+1]; j++) { 122 if (a->j[j] == i) { 123 a->diag[i] = j; 124 break; 125 } 126 } 127 } 128 PetscFunctionReturn(0); 129 } 130 131 #undef __FUNCT__ 132 #define __FUNCT__ "MatGetRow_MPIAdj" 133 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 134 { 135 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 136 PetscInt *itmp; 137 138 PetscFunctionBegin; 139 row -= A->rmap.rstart; 140 141 if (row < 0 || row >= A->rmap.n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range"); 142 143 *nz = a->i[row+1] - a->i[row]; 144 if (v) *v = PETSC_NULL; 145 if (idx) { 146 itmp = a->j + a->i[row]; 147 if (*nz) { 148 *idx = itmp; 149 } 150 else *idx = 0; 151 } 152 PetscFunctionReturn(0); 153 } 154 155 #undef __FUNCT__ 156 #define __FUNCT__ "MatRestoreRow_MPIAdj" 157 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 158 { 159 PetscFunctionBegin; 160 PetscFunctionReturn(0); 161 } 162 163 #undef __FUNCT__ 164 #define __FUNCT__ "MatEqual_MPIAdj" 165 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg) 166 { 167 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data; 168 PetscErrorCode ierr; 169 PetscTruth flag; 170 171 PetscFunctionBegin; 172 /* If the matrix dimensions are not equal,or no of nonzeros */ 173 if ((A->rmap.n != B->rmap.n) ||(a->nz != b->nz)) { 174 flag = PETSC_FALSE; 175 } 176 177 /* if the a->i are the same */ 178 ierr = PetscMemcmp(a->i,b->i,(A->rmap.n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr); 179 180 /* if a->j are the same */ 181 ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr); 182 183 ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr); 184 PetscFunctionReturn(0); 185 } 186 187 #undef __FUNCT__ 188 #define __FUNCT__ "MatGetRowIJ_MPIAdj" 189 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done) 190 { 191 PetscErrorCode ierr; 192 PetscMPIInt size; 193 PetscInt i; 194 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 195 196 PetscFunctionBegin; 197 ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr); 198 if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);} 199 *m = A->rmap.n; 200 *ia = a->i; 201 *ja = a->j; 202 *done = PETSC_TRUE; 203 if (oshift) { 204 for (i=0; i<(*ia)[*m]; i++) { 205 (*ja)[i]++; 206 } 207 for (i=0; i<=(*m); i++) (*ia)[i]++; 208 } 209 PetscFunctionReturn(0); 210 } 211 212 #undef __FUNCT__ 213 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj" 214 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done) 215 { 216 PetscInt i; 217 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 218 219 PetscFunctionBegin; 220 if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()"); 221 if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()"); 222 if (oshift) { 223 for (i=0; i<=(*m); i++) (*ia)[i]--; 224 for (i=0; i<(*ia)[*m]; i++) { 225 (*ja)[i]--; 226 } 227 } 228 PetscFunctionReturn(0); 229 } 230 231 #undef __FUNCT__ 232 #define __FUNCT__ "MatConvertFrom_MPIAdj" 233 PetscErrorCode PETSCMAT_DLLEXPORT MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat) 234 { 235 Mat B; 236 PetscErrorCode ierr; 237 PetscInt i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a; 238 const PetscInt *rj; 239 const PetscScalar *ra; 240 MPI_Comm comm; 241 242 PetscFunctionBegin; 243 ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr); 244 ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr); 245 ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr); 246 247 /* count the number of nonzeros per row */ 248 for (i=0; i<m; i++) { 249 ierr = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr); 250 for (j=0; j<len; j++) { 251 if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */ 252 } 253 ierr = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr); 254 nzeros += len; 255 } 256 257 /* malloc space for nonzeros */ 258 ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr); 259 ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr); 260 ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr); 261 262 nzeros = 0; 263 ia[0] = 0; 264 for (i=0; i<m; i++) { 265 ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr); 266 cnt = 0; 267 for (j=0; j<len; j++) { 268 if (rj[j] != i+rstart) { /* if not diagonal */ 269 a[nzeros+cnt] = (PetscInt) PetscAbsScalar(ra[j]); 270 ja[nzeros+cnt++] = rj[j]; 271 } 272 } 273 ierr = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr); 274 nzeros += cnt; 275 ia[i+1] = nzeros; 276 } 277 278 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 279 ierr = MatCreate(comm,&B);CHKERRQ(ierr); 280 ierr = MatSetSizes(B,m,N,PETSC_DETERMINE,N);CHKERRQ(ierr); 281 ierr = MatSetType(B,type);CHKERRQ(ierr); 282 ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr); 283 284 if (reuse == MAT_REUSE_MATRIX) { 285 ierr = MatHeaderReplace(A,B);CHKERRQ(ierr); 286 } else { 287 *newmat = B; 288 } 289 PetscFunctionReturn(0); 290 } 291 292 /* -------------------------------------------------------------------*/ 293 static struct _MatOps MatOps_Values = {0, 294 MatGetRow_MPIAdj, 295 MatRestoreRow_MPIAdj, 296 0, 297 /* 4*/ 0, 298 0, 299 0, 300 0, 301 0, 302 0, 303 /*10*/ 0, 304 0, 305 0, 306 0, 307 0, 308 /*15*/ 0, 309 MatEqual_MPIAdj, 310 0, 311 0, 312 0, 313 /*20*/ 0, 314 0, 315 0, 316 MatSetOption_MPIAdj, 317 0, 318 /*25*/ 0, 319 0, 320 0, 321 0, 322 0, 323 /*30*/ 0, 324 0, 325 0, 326 0, 327 0, 328 /*35*/ 0, 329 0, 330 0, 331 0, 332 0, 333 /*40*/ 0, 334 0, 335 0, 336 0, 337 0, 338 /*45*/ 0, 339 0, 340 0, 341 0, 342 0, 343 /*50*/ 0, 344 MatGetRowIJ_MPIAdj, 345 MatRestoreRowIJ_MPIAdj, 346 0, 347 0, 348 /*55*/ 0, 349 0, 350 0, 351 0, 352 0, 353 /*60*/ 0, 354 MatDestroy_MPIAdj, 355 MatView_MPIAdj, 356 MatConvertFrom_MPIAdj, 357 0, 358 /*65*/ 0, 359 0, 360 0, 361 0, 362 0, 363 /*70*/ 0, 364 0, 365 0, 366 0, 367 0, 368 /*75*/ 0, 369 0, 370 0, 371 0, 372 0, 373 /*80*/ 0, 374 0, 375 0, 376 0, 377 0, 378 /*85*/ 0, 379 0, 380 0, 381 0, 382 0, 383 /*90*/ 0, 384 0, 385 0, 386 0, 387 0, 388 /*95*/ 0, 389 0, 390 0, 391 0}; 392 393 EXTERN_C_BEGIN 394 #undef __FUNCT__ 395 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj" 396 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values) 397 { 398 Mat_MPIAdj *b = (Mat_MPIAdj *)B->data; 399 PetscErrorCode ierr; 400 #if defined(PETSC_USE_DEBUG) 401 PetscInt ii; 402 #endif 403 404 PetscFunctionBegin; 405 B->preallocated = PETSC_TRUE; 406 #if defined(PETSC_USE_DEBUG) 407 if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]); 408 for (ii=1; ii<B->rmap.n; ii++) { 409 if (i[ii] < 0 || i[ii] < i[ii-1]) { 410 SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]); 411 } 412 } 413 for (ii=0; ii<i[B->rmap.n]; ii++) { 414 if (j[ii] < 0 || j[ii] >= B->cmap.N) { 415 SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]); 416 } 417 } 418 #endif 419 420 b->j = j; 421 b->i = i; 422 b->values = values; 423 424 b->nz = i[B->rmap.n]; 425 b->diag = 0; 426 b->symmetric = PETSC_FALSE; 427 b->freeaij = PETSC_TRUE; 428 429 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 430 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 431 PetscFunctionReturn(0); 432 } 433 EXTERN_C_END 434 435 /*MC 436 MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices, 437 intended for use constructing orderings and partitionings. 438 439 Level: beginner 440 441 .seealso: MatCreateMPIAdj 442 M*/ 443 444 EXTERN_C_BEGIN 445 #undef __FUNCT__ 446 #define __FUNCT__ "MatCreate_MPIAdj" 447 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B) 448 { 449 Mat_MPIAdj *b; 450 PetscErrorCode ierr; 451 PetscMPIInt size,rank; 452 453 PetscFunctionBegin; 454 ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr); 455 ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr); 456 457 ierr = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr); 458 B->data = (void*)b; 459 ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 460 B->factor = 0; 461 B->mapping = 0; 462 B->assembled = PETSC_FALSE; 463 464 ierr = PetscMapSetBlockSize(&B->rmap,1);CHKERRQ(ierr); 465 ierr = PetscMapSetBlockSize(&B->cmap,1);CHKERRQ(ierr); 466 ierr = PetscMapSetUp(&B->rmap);CHKERRQ(ierr); 467 if (B->cmap.n < 0) B->cmap.n = B->cmap.N; 468 if (B->cmap.N < 0) B->cmap.N = B->cmap.n; 469 470 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C", 471 "MatMPIAdjSetPreallocation_MPIAdj", 472 MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr); 473 ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr); 474 PetscFunctionReturn(0); 475 } 476 EXTERN_C_END 477 478 #undef __FUNCT__ 479 #define __FUNCT__ "MatMPIAdjSetPreallocation" 480 /*@C 481 MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements 482 483 Collective on MPI_Comm 484 485 Input Parameters: 486 + A - the matrix 487 . i - the indices into j for the start of each row 488 . j - the column indices for each row (sorted for each row). 489 The indices in i and j start with zero (NOT with one). 490 - values - [optional] edge weights 491 492 Level: intermediate 493 494 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues() 495 @*/ 496 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values) 497 { 498 PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*); 499 500 PetscFunctionBegin; 501 ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr); 502 if (f) { 503 ierr = (*f)(B,i,j,values);CHKERRQ(ierr); 504 } 505 PetscFunctionReturn(0); 506 } 507 508 #undef __FUNCT__ 509 #define __FUNCT__ "MatCreateMPIAdj" 510 /*@C 511 MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list. 512 The matrix does not have numerical values associated with it, but is 513 intended for ordering (to reduce bandwidth etc) and partitioning. 514 515 Collective on MPI_Comm 516 517 Input Parameters: 518 + comm - MPI communicator 519 . m - number of local rows 520 . n - number of columns 521 . i - the indices into j for the start of each row 522 . j - the column indices for each row (sorted for each row). 523 The indices in i and j start with zero (NOT with one). 524 - values -[optional] edge weights 525 526 Output Parameter: 527 . A - the matrix 528 529 Level: intermediate 530 531 Notes: This matrix object does not support most matrix operations, include 532 MatSetValues(). 533 You must NOT free the ii, values and jj arrays yourself. PETSc will free them 534 when the matrix is destroyed; you must allocate them with PetscMalloc(). If you 535 call from Fortran you need not create the arrays with PetscMalloc(). 536 Should not include the matrix diagonals. 537 538 If you already have a matrix, you can create its adjacency matrix by a call 539 to MatConvert, specifying a type of MATMPIADJ. 540 541 Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC 542 543 .seealso: MatCreate(), MatConvert(), MatGetOrdering() 544 @*/ 545 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A) 546 { 547 PetscErrorCode ierr; 548 549 PetscFunctionBegin; 550 ierr = MatCreate(comm,A);CHKERRQ(ierr); 551 ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,n);CHKERRQ(ierr); 552 ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr); 553 ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr); 554 PetscFunctionReturn(0); 555 } 556 557 558 559 560 561 562 563