1 /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/ 2 3 /* 4 Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 5 */ 6 #include "src/mat/impls/adj/mpi/mpiadj.h" 7 #include "petscsys.h" 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "MatView_MPIAdj_ASCII" 11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer) 12 { 13 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 14 int ierr,i,j,m = A->m; 15 char *name; 16 PetscViewerFormat format; 17 18 PetscFunctionBegin; 19 ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr); 20 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 21 if (format == PETSC_VIEWER_ASCII_INFO) { 22 PetscFunctionReturn(0); 23 } else if (format == PETSC_VIEWER_ASCII_MATLAB) { 24 SETERRQ(PETSC_ERR_SUP,"Matlab format not supported"); 25 } else { 26 ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr); 27 for (i=0; i<m; i++) { 28 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr); 29 for (j=a->i[i]; j<a->i[i+1]; j++) { 30 ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr); 31 } 32 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr); 33 } 34 ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr); 35 } 36 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 37 PetscFunctionReturn(0); 38 } 39 40 #undef __FUNCT__ 41 #define __FUNCT__ "MatView_MPIAdj" 42 int MatView_MPIAdj(Mat A,PetscViewer viewer) 43 { 44 int ierr; 45 PetscTruth isascii; 46 47 PetscFunctionBegin; 48 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr); 49 if (isascii) { 50 ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr); 51 } else { 52 SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name); 53 } 54 PetscFunctionReturn(0); 55 } 56 57 #undef __FUNCT__ 58 #define __FUNCT__ "MatDestroy_MPIAdj" 59 int MatDestroy_MPIAdj(Mat mat) 60 { 61 Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data; 62 int ierr; 63 64 PetscFunctionBegin; 65 #if defined(PETSC_USE_LOG) 66 PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz); 67 #endif 68 if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);} 69 if (a->freeaij) { 70 ierr = PetscFree(a->i);CHKERRQ(ierr); 71 ierr = PetscFree(a->j);CHKERRQ(ierr); 72 if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);} 73 } 74 ierr = PetscFree(a->rowners);CHKERRQ(ierr); 75 ierr = PetscFree(a);CHKERRQ(ierr); 76 PetscFunctionReturn(0); 77 } 78 79 #undef __FUNCT__ 80 #define __FUNCT__ "MatSetOption_MPIAdj" 81 int MatSetOption_MPIAdj(Mat A,MatOption op) 82 { 83 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 84 85 PetscFunctionBegin; 86 switch (op) { 87 case MAT_SYMMETRIC: 88 case MAT_STRUCTURALLY_SYMMETRIC: 89 case MAT_HERMITIAN: 90 a->symmetric = PETSC_TRUE; 91 break; 92 case MAT_NOT_SYMMETRIC: 93 case MAT_NOT_STRUCTURALLY_SYMMETRIC: 94 case MAT_NOT_HERMITIAN: 95 a->symmetric = PETSC_FALSE; 96 break; 97 case MAT_SYMMETRY_ETERNAL: 98 case MAT_NOT_SYMMETRY_ETERNAL: 99 break; 100 default: 101 PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n"); 102 break; 103 } 104 PetscFunctionReturn(0); 105 } 106 107 108 /* 109 Adds diagonal pointers to sparse matrix structure. 110 */ 111 112 #undef __FUNCT__ 113 #define __FUNCT__ "MatMarkDiagonal_MPIAdj" 114 int MatMarkDiagonal_MPIAdj(Mat A) 115 { 116 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 117 int i,j,*diag,m = A->m,ierr; 118 119 PetscFunctionBegin; 120 ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr); 121 PetscLogObjectMemory(A,(m+1)*sizeof(int)); 122 for (i=0; i<A->m; i++) { 123 for (j=a->i[i]; j<a->i[i+1]; j++) { 124 if (a->j[j] == i) { 125 diag[i] = j; 126 break; 127 } 128 } 129 } 130 a->diag = diag; 131 PetscFunctionReturn(0); 132 } 133 134 #undef __FUNCT__ 135 #define __FUNCT__ "MatGetRow_MPIAdj" 136 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v) 137 { 138 Mat_MPIAdj *a = (Mat_MPIAdj*)A->data; 139 int *itmp; 140 141 PetscFunctionBegin; 142 row -= a->rstart; 143 144 if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range"); 145 146 *nz = a->i[row+1] - a->i[row]; 147 if (v) *v = PETSC_NULL; 148 if (idx) { 149 itmp = a->j + a->i[row]; 150 if (*nz) { 151 *idx = itmp; 152 } 153 else *idx = 0; 154 } 155 PetscFunctionReturn(0); 156 } 157 158 #undef __FUNCT__ 159 #define __FUNCT__ "MatRestoreRow_MPIAdj" 160 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v) 161 { 162 PetscFunctionBegin; 163 PetscFunctionReturn(0); 164 } 165 166 #undef __FUNCT__ 167 #define __FUNCT__ "MatGetBlockSize_MPIAdj" 168 int MatGetBlockSize_MPIAdj(Mat A,int *bs) 169 { 170 PetscFunctionBegin; 171 *bs = 1; 172 PetscFunctionReturn(0); 173 } 174 175 176 #undef __FUNCT__ 177 #define __FUNCT__ "MatEqual_MPIAdj" 178 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg) 179 { 180 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data; 181 int ierr; 182 PetscTruth flag; 183 184 PetscFunctionBegin; 185 /* If the matrix dimensions are not equal,or no of nonzeros */ 186 if ((A->m != B->m) ||(a->nz != b->nz)) { 187 flag = PETSC_FALSE; 188 } 189 190 /* if the a->i are the same */ 191 ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr); 192 193 /* if a->j are the same */ 194 ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr); 195 196 ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr); 197 PetscFunctionReturn(0); 198 } 199 200 #undef __FUNCT__ 201 #define __FUNCT__ "MatGetRowIJ_MPIAdj" 202 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done) 203 { 204 int ierr,size,i; 205 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 206 207 PetscFunctionBegin; 208 ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr); 209 if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);} 210 *m = A->m; 211 *ia = a->i; 212 *ja = a->j; 213 *done = PETSC_TRUE; 214 if (oshift) { 215 for (i=0; i<(*ia)[*m]; i++) { 216 (*ja)[i]++; 217 } 218 for (i=0; i<=(*m); i++) (*ia)[i]++; 219 } 220 PetscFunctionReturn(0); 221 } 222 223 #undef __FUNCT__ 224 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj" 225 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done) 226 { 227 int i; 228 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 229 230 PetscFunctionBegin; 231 if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()"); 232 if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()"); 233 if (oshift) { 234 for (i=0; i<=(*m); i++) (*ia)[i]--; 235 for (i=0; i<(*ia)[*m]; i++) { 236 (*ja)[i]--; 237 } 238 } 239 PetscFunctionReturn(0); 240 } 241 242 /* -------------------------------------------------------------------*/ 243 static struct _MatOps MatOps_Values = {0, 244 MatGetRow_MPIAdj, 245 MatRestoreRow_MPIAdj, 246 0, 247 /* 4*/ 0, 248 0, 249 0, 250 0, 251 0, 252 0, 253 /*10*/ 0, 254 0, 255 0, 256 0, 257 0, 258 /*15*/ 0, 259 MatEqual_MPIAdj, 260 0, 261 0, 262 0, 263 /*20*/ 0, 264 0, 265 0, 266 MatSetOption_MPIAdj, 267 0, 268 /*25*/ 0, 269 0, 270 0, 271 0, 272 0, 273 /*30*/ 0, 274 0, 275 0, 276 0, 277 0, 278 /*35*/ 0, 279 0, 280 0, 281 0, 282 0, 283 /*40*/ 0, 284 0, 285 0, 286 0, 287 0, 288 /*45*/ 0, 289 0, 290 0, 291 0, 292 0, 293 /*50*/ MatGetBlockSize_MPIAdj, 294 MatGetRowIJ_MPIAdj, 295 MatRestoreRowIJ_MPIAdj, 296 0, 297 0, 298 /*55*/ 0, 299 0, 300 0, 301 0, 302 0, 303 /*60*/ 0, 304 MatDestroy_MPIAdj, 305 MatView_MPIAdj, 306 MatGetPetscMaps_Petsc, 307 0, 308 /*65*/ 0, 309 0, 310 0, 311 0, 312 0, 313 /*70*/ 0, 314 0, 315 0, 316 0, 317 0, 318 /*75*/ 0, 319 0, 320 0, 321 0, 322 0, 323 /*80*/ 0, 324 0, 325 0, 326 0, 327 /*85*/ 0 328 }; 329 330 EXTERN_C_BEGIN 331 #undef __FUNCT__ 332 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj" 333 int MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values) 334 { 335 Mat_MPIAdj *b = (Mat_MPIAdj *)B->data; 336 int ierr; 337 #if defined(PETSC_USE_BOPT_g) 338 int ii; 339 #endif 340 341 PetscFunctionBegin; 342 B->preallocated = PETSC_TRUE; 343 #if defined(PETSC_USE_BOPT_g) 344 if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]); 345 for (ii=1; ii<B->m; ii++) { 346 if (i[ii] < 0 || i[ii] < i[ii-1]) { 347 SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]); 348 } 349 } 350 for (ii=0; ii<i[B->m]; ii++) { 351 if (j[ii] < 0 || j[ii] >= B->N) { 352 SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]); 353 } 354 } 355 #endif 356 357 b->j = j; 358 b->i = i; 359 b->values = values; 360 361 b->nz = i[B->m]; 362 b->diag = 0; 363 b->symmetric = PETSC_FALSE; 364 b->freeaij = PETSC_TRUE; 365 366 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 367 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 368 PetscFunctionReturn(0); 369 } 370 EXTERN_C_END 371 372 /*MC 373 MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices, 374 intended for use constructing orderings and partitionings. 375 376 Level: beginner 377 378 .seealso: MatCreateMPIAdj 379 M*/ 380 381 EXTERN_C_BEGIN 382 #undef __FUNCT__ 383 #define __FUNCT__ "MatCreate_MPIAdj" 384 int MatCreate_MPIAdj(Mat B) 385 { 386 Mat_MPIAdj *b; 387 int ii,ierr,size,rank; 388 389 PetscFunctionBegin; 390 ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr); 391 ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr); 392 393 ierr = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr); 394 B->data = (void*)b; 395 ierr = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr); 396 ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 397 B->factor = 0; 398 B->lupivotthreshold = 1.0; 399 B->mapping = 0; 400 B->assembled = PETSC_FALSE; 401 402 ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr); 403 B->n = B->N; 404 405 /* the information in the maps duplicates the information computed below, eventually 406 we should remove the duplicate information that is not contained in the maps */ 407 ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr); 408 /* we don't know the "local columns" so just use the row information :-(*/ 409 ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr); 410 411 ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr); 412 PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj)); 413 ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr); 414 b->rowners[0] = 0; 415 for (ii=2; ii<=size; ii++) { 416 b->rowners[ii] += b->rowners[ii-1]; 417 } 418 b->rstart = b->rowners[rank]; 419 b->rend = b->rowners[rank+1]; 420 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C", 421 "MatMPIAdjSetPreallocation_MPIAdj", 422 MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr); 423 PetscFunctionReturn(0); 424 } 425 EXTERN_C_END 426 427 #undef __FUNCT__ 428 #define __FUNCT__ "MatMPIAdjSetPreallocation" 429 /*@C 430 MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements 431 432 Collective on MPI_Comm 433 434 Input Parameters: 435 + A - the matrix 436 . i - the indices into j for the start of each row 437 . j - the column indices for each row (sorted for each row). 438 The indices in i and j start with zero (NOT with one). 439 - values - [optional] edge weights 440 441 Level: intermediate 442 443 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues() 444 @*/ 445 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values) 446 { 447 int ierr,(*f)(Mat,int*,int*,int*); 448 449 PetscFunctionBegin; 450 ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr); 451 if (f) { 452 ierr = (*f)(B,i,j,values);CHKERRQ(ierr); 453 } 454 PetscFunctionReturn(0); 455 } 456 457 #undef __FUNCT__ 458 #define __FUNCT__ "MatCreateMPIAdj" 459 /*@C 460 MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list. 461 The matrix does not have numerical values associated with it, but is 462 intended for ordering (to reduce bandwidth etc) and partitioning. 463 464 Collective on MPI_Comm 465 466 Input Parameters: 467 + comm - MPI communicator 468 . m - number of local rows 469 . n - number of columns 470 . i - the indices into j for the start of each row 471 . j - the column indices for each row (sorted for each row). 472 The indices in i and j start with zero (NOT with one). 473 - values -[optional] edge weights 474 475 Output Parameter: 476 . A - the matrix 477 478 Level: intermediate 479 480 Notes: This matrix object does not support most matrix operations, include 481 MatSetValues(). 482 You must NOT free the ii, values and jj arrays yourself. PETSc will free them 483 when the matrix is destroyed; you must allocate them with PetscMalloc(). If you 484 call from Fortran you need not create the arrays with PetscMalloc(). 485 Should not include the matrix diagonals. 486 487 If you already have a matrix, you can create its adjacency matrix by a call 488 to MatConvert, specifying a type of MATMPIADJ. 489 490 Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC 491 492 .seealso: MatCreate(), MatConvert(), MatGetOrdering() 493 @*/ 494 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A) 495 { 496 int ierr; 497 498 PetscFunctionBegin; 499 ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr); 500 ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr); 501 ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr); 502 PetscFunctionReturn(0); 503 } 504 505 EXTERN_C_BEGIN 506 #undef __FUNCT__ 507 #define __FUNCT__ "MatConvertTo_MPIAdj" 508 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat) { 509 Mat B; 510 int i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a; 511 PetscScalar *ra; 512 MPI_Comm comm; 513 514 PetscFunctionBegin; 515 ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr); 516 ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr); 517 ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr); 518 519 /* count the number of nonzeros per row */ 520 for (i=0; i<m; i++) { 521 ierr = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr); 522 for (j=0; j<len; j++) { 523 if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */ 524 } 525 ierr = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr); 526 nzeros += len; 527 } 528 529 /* malloc space for nonzeros */ 530 ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr); 531 ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr); 532 ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr); 533 534 nzeros = 0; 535 ia[0] = 0; 536 for (i=0; i<m; i++) { 537 ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr); 538 cnt = 0; 539 for (j=0; j<len; j++) { 540 if (rj[j] != i+rstart) { /* if not diagonal */ 541 a[nzeros+cnt] = (int) PetscAbsScalar(ra[j]); 542 ja[nzeros+cnt++] = rj[j]; 543 } 544 } 545 ierr = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr); 546 nzeros += cnt; 547 ia[i+1] = nzeros; 548 } 549 550 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 551 ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,&B);CHKERRQ(ierr); 552 553 /* Fake support for "inplace" convert. */ 554 if (*newmat == A) { 555 ierr = MatDestroy(A);CHKERRQ(ierr); 556 } 557 *newmat = B; 558 PetscFunctionReturn(0); 559 } 560 EXTERN_C_END 561 562 563 564 565 566