1 2 /* 3 Creates a matrix class for using the Neumann-Neumann type preconditioners. 4 This stores the matrices in globally unassembled form. Each processor 5 assembles only its local Neumann problem and the parallel matrix vector 6 product is handled "implicitly". 7 8 We provide: 9 MatMult() 10 11 Currently this allows for only one subdomain per processor. 12 13 */ 14 15 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 16 17 static PetscErrorCode MatISComputeSF_Private(Mat); 18 19 #undef __FUNCT__ 20 #define __FUNCT__ "MatISComputeSF_Private" 21 static PetscErrorCode MatISComputeSF_Private(Mat B) 22 { 23 Mat_IS *matis = (Mat_IS*)(B->data); 24 const PetscInt *gidxs; 25 PetscErrorCode ierr; 26 27 PetscFunctionBegin; 28 ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr); 29 ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr); 30 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 31 ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 32 /* PETSC_OWN_POINTER refers to ilocal which is NULL */ 33 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 34 ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 35 ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 36 PetscFunctionReturn(0); 37 } 38 39 #undef __FUNCT__ 40 #define __FUNCT__ "MatISSetPreallocation" 41 /*@ 42 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 43 44 Collective on MPI_Comm 45 46 Input Parameters: 47 + B - the matrix 48 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 49 (same value is used for all local rows) 50 . d_nnz - array containing the number of nonzeros in the various rows of the 51 DIAGONAL portion of the local submatrix (possibly different for each row) 52 or NULL, if d_nz is used to specify the nonzero structure. 53 The size of this array is equal to the number of local rows, i.e 'm'. 54 For matrices that will be factored, you must leave room for (and set) 55 the diagonal entry even if it is zero. 56 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 57 submatrix (same value is used for all local rows). 58 - o_nnz - array containing the number of nonzeros in the various rows of the 59 OFF-DIAGONAL portion of the local submatrix (possibly different for 60 each row) or NULL, if o_nz is used to specify the nonzero 61 structure. The size of this array is equal to the number 62 of local rows, i.e 'm'. 63 64 If the *_nnz parameter is given then the *_nz parameter is ignored 65 66 Level: intermediate 67 68 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 69 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 70 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 71 72 .keywords: matrix 73 74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat() 75 @*/ 76 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 77 { 78 PetscErrorCode ierr; 79 80 PetscFunctionBegin; 81 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 82 PetscValidType(B,1); 83 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 84 PetscFunctionReturn(0); 85 } 86 87 #undef __FUNCT__ 88 #define __FUNCT__ "MatISSetPreallocation_IS" 89 PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 90 { 91 Mat_IS *matis = (Mat_IS*)(B->data); 92 PetscInt bs,i,nlocalcols; 93 PetscErrorCode ierr; 94 95 PetscFunctionBegin; 96 if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 97 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 98 ierr = MatISComputeSF_Private(B);CHKERRQ(ierr); 99 } 100 if (!d_nnz) { 101 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz; 102 } else { 103 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 104 } 105 if (!o_nnz) { 106 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz; 107 } else { 108 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 109 } 110 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 111 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 112 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 113 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 114 for (i=0;i<matis->sf_nleaves;i++) { 115 matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 116 } 117 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 118 for (i=0;i<matis->sf_nleaves/bs;i++) { 119 matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 120 } 121 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 122 for (i=0;i<matis->sf_nleaves/bs;i++) { 123 matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 124 } 125 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 126 PetscFunctionReturn(0); 127 } 128 129 #undef __FUNCT__ 130 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private" 131 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce) 132 { 133 Mat_IS *matis = (Mat_IS*)(A->data); 134 PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership; 135 const PetscInt *global_indices_r,*global_indices_c; 136 PetscInt i,j,bs,rows,cols; 137 PetscInt lrows,lcols; 138 PetscInt local_rows,local_cols; 139 PetscMPIInt nsubdomains; 140 PetscBool isdense,issbaij; 141 PetscErrorCode ierr; 142 143 PetscFunctionBegin; 144 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr); 145 ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr); 146 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 147 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 148 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 149 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 150 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 151 if (A->rmap->mapping != A->cmap->mapping) { 152 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 153 } else { 154 global_indices_c = global_indices_r; 155 } 156 157 if (issbaij) { 158 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 159 } 160 /* 161 An SF reduce is needed to sum up properly on shared rows. 162 Note that generally preallocation is not exact, since it overestimates nonzeros 163 */ 164 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 165 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 166 } 167 ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr); 168 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr); 169 /* All processes need to compute entire row ownership */ 170 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 171 ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 172 for (i=0;i<nsubdomains;i++) { 173 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 174 row_ownership[j] = i; 175 } 176 } 177 ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 178 179 /* 180 my_dnz and my_onz contains exact contribution to preallocation from each local mat 181 then, they will be summed up properly. This way, preallocation is always sufficient 182 */ 183 ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr); 184 /* preallocation as a MATAIJ */ 185 if (isdense) { /* special case for dense local matrices */ 186 for (i=0;i<local_rows;i++) { 187 PetscInt owner = row_ownership[global_indices_r[i]]; 188 for (j=0;j<local_cols;j++) { 189 PetscInt index_col = global_indices_c[j]; 190 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 191 my_dnz[i] += 1; 192 } else { /* offdiag block */ 193 my_onz[i] += 1; 194 } 195 } 196 } 197 } else { /* TODO: this could be optimized using MatGetRowIJ */ 198 for (i=0;i<local_rows;i++) { 199 const PetscInt *cols; 200 PetscInt ncols,index_row = global_indices_r[i]; 201 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 202 for (j=0;j<ncols;j++) { 203 PetscInt owner = row_ownership[index_row]; 204 PetscInt index_col = global_indices_c[cols[j]]; 205 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 206 my_dnz[i] += 1; 207 } else { /* offdiag block */ 208 my_onz[i] += 1; 209 } 210 /* same as before, interchanging rows and cols */ 211 if (issbaij && index_col != index_row) { 212 owner = row_ownership[index_col]; 213 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 214 my_dnz[cols[j]] += 1; 215 } else { 216 my_onz[cols[j]] += 1; 217 } 218 } 219 } 220 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 221 } 222 } 223 if (global_indices_c != global_indices_r) { 224 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 225 } 226 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 227 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 228 229 /* Reduce my_dnz and my_onz */ 230 if (maxreduce) { 231 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 232 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 233 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 234 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 235 } else { 236 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 237 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 238 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 239 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 240 } 241 ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr); 242 243 /* Resize preallocation if overestimated */ 244 for (i=0;i<lrows;i++) { 245 dnz[i] = PetscMin(dnz[i],lcols); 246 onz[i] = PetscMin(onz[i],cols-lcols); 247 } 248 /* set preallocation */ 249 ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr); 250 for (i=0;i<lrows/bs;i++) { 251 dnz[i] = dnz[i*bs]/bs; 252 onz[i] = onz[i*bs]/bs; 253 } 254 ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 255 ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 256 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 257 if (issbaij) { 258 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 259 } 260 PetscFunctionReturn(0); 261 } 262 263 #undef __FUNCT__ 264 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 265 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 266 { 267 Mat_IS *matis = (Mat_IS*)(mat->data); 268 Mat local_mat; 269 /* info on mat */ 270 PetscInt bs,rows,cols,lrows,lcols; 271 PetscInt local_rows,local_cols; 272 PetscBool isdense,issbaij,isseqaij; 273 PetscMPIInt nsubdomains; 274 /* values insertion */ 275 PetscScalar *array; 276 /* work */ 277 PetscErrorCode ierr; 278 279 PetscFunctionBegin; 280 /* get info from mat */ 281 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 282 if (nsubdomains == 1) { 283 if (reuse == MAT_INITIAL_MATRIX) { 284 ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr); 285 } else { 286 ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 287 } 288 PetscFunctionReturn(0); 289 } 290 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 291 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 292 ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr); 293 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 294 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 295 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 296 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 297 298 if (reuse == MAT_INITIAL_MATRIX) { 299 MatType new_mat_type; 300 PetscBool issbaij_red; 301 302 /* determining new matrix type */ 303 ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 304 if (issbaij_red) { 305 new_mat_type = MATSBAIJ; 306 } else { 307 if (bs>1) { 308 new_mat_type = MATBAIJ; 309 } else { 310 new_mat_type = MATAIJ; 311 } 312 } 313 314 ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr); 315 ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr); 316 ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr); 317 ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr); 318 ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr); 319 } else { 320 PetscInt mbs,mrows,mcols,mlrows,mlcols; 321 /* some checks */ 322 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 323 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 324 ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr); 325 if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 326 if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 327 if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows); 328 if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols); 329 if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 330 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 331 } 332 333 if (issbaij) { 334 ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr); 335 } else { 336 ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr); 337 local_mat = matis->A; 338 } 339 340 /* Set values */ 341 ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr); 342 if (isdense) { /* special case for dense local matrices */ 343 PetscInt i,*dummy_rows,*dummy_cols; 344 345 if (local_rows != local_cols) { 346 ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr); 347 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 348 for (i=0;i<local_cols;i++) dummy_cols[i] = i; 349 } else { 350 ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr); 351 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 352 dummy_cols = dummy_rows; 353 } 354 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 355 ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr); 356 ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr); 357 ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr); 358 if (dummy_rows != dummy_cols) { 359 ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr); 360 } else { 361 ierr = PetscFree(dummy_rows);CHKERRQ(ierr); 362 } 363 } else if (isseqaij) { 364 PetscInt i,nvtxs,*xadj,*adjncy; 365 PetscBool done; 366 367 ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 368 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__); 369 ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr); 370 for (i=0;i<nvtxs;i++) { 371 ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr); 372 } 373 ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 374 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__); 375 ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr); 376 } else { /* very basic values insertion for all other matrix types */ 377 PetscInt i; 378 379 for (i=0;i<local_rows;i++) { 380 PetscInt j; 381 const PetscInt *local_indices_cols; 382 383 ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 384 ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr); 385 ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 386 } 387 } 388 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 389 ierr = MatDestroy(&local_mat);CHKERRQ(ierr); 390 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 391 if (isdense) { 392 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 393 } 394 PetscFunctionReturn(0); 395 } 396 397 #undef __FUNCT__ 398 #define __FUNCT__ "MatISGetMPIXAIJ" 399 /*@ 400 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 401 402 Input Parameter: 403 . mat - the matrix (should be of type MATIS) 404 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 405 406 Output Parameter: 407 . newmat - the matrix in AIJ format 408 409 Level: developer 410 411 Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested. 412 413 .seealso: MATIS 414 @*/ 415 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 416 { 417 PetscErrorCode ierr; 418 419 PetscFunctionBegin; 420 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 421 PetscValidLogicalCollectiveEnum(mat,reuse,2); 422 PetscValidPointer(newmat,3); 423 if (reuse != MAT_INITIAL_MATRIX) { 424 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 425 PetscCheckSameComm(mat,1,*newmat,3); 426 if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix"); 427 } 428 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 429 PetscFunctionReturn(0); 430 } 431 432 #undef __FUNCT__ 433 #define __FUNCT__ "MatDuplicate_IS" 434 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 435 { 436 PetscErrorCode ierr; 437 Mat_IS *matis = (Mat_IS*)(mat->data); 438 PetscInt bs,m,n,M,N; 439 Mat B,localmat; 440 441 PetscFunctionBegin; 442 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 443 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 444 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 445 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr); 446 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 447 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 448 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 449 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 450 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 451 *newmat = B; 452 PetscFunctionReturn(0); 453 } 454 455 #undef __FUNCT__ 456 #define __FUNCT__ "MatIsHermitian_IS" 457 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 458 { 459 PetscErrorCode ierr; 460 Mat_IS *matis = (Mat_IS*)A->data; 461 PetscBool local_sym; 462 463 PetscFunctionBegin; 464 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 465 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 466 PetscFunctionReturn(0); 467 } 468 469 #undef __FUNCT__ 470 #define __FUNCT__ "MatIsSymmetric_IS" 471 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 472 { 473 PetscErrorCode ierr; 474 Mat_IS *matis = (Mat_IS*)A->data; 475 PetscBool local_sym; 476 477 PetscFunctionBegin; 478 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 479 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 480 PetscFunctionReturn(0); 481 } 482 483 #undef __FUNCT__ 484 #define __FUNCT__ "MatDestroy_IS" 485 PetscErrorCode MatDestroy_IS(Mat A) 486 { 487 PetscErrorCode ierr; 488 Mat_IS *b = (Mat_IS*)A->data; 489 490 PetscFunctionBegin; 491 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 492 ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr); 493 ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr); 494 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 495 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 496 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 497 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 498 ierr = PetscFree(A->data);CHKERRQ(ierr); 499 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 500 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 501 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 502 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 503 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 504 PetscFunctionReturn(0); 505 } 506 507 #undef __FUNCT__ 508 #define __FUNCT__ "MatMult_IS" 509 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 510 { 511 PetscErrorCode ierr; 512 Mat_IS *is = (Mat_IS*)A->data; 513 PetscScalar zero = 0.0; 514 515 PetscFunctionBegin; 516 /* scatter the global vector x into the local work vector */ 517 ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 518 ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 519 520 /* multiply the local matrix */ 521 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 522 523 /* scatter product back into global memory */ 524 ierr = VecSet(y,zero);CHKERRQ(ierr); 525 ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 526 ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 527 PetscFunctionReturn(0); 528 } 529 530 #undef __FUNCT__ 531 #define __FUNCT__ "MatMultAdd_IS" 532 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 533 { 534 Vec temp_vec; 535 PetscErrorCode ierr; 536 537 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 538 if (v3 != v2) { 539 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 540 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 541 } else { 542 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 543 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 544 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 545 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 546 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 547 } 548 PetscFunctionReturn(0); 549 } 550 551 #undef __FUNCT__ 552 #define __FUNCT__ "MatMultTranspose_IS" 553 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x) 554 { 555 Mat_IS *is = (Mat_IS*)A->data; 556 PetscErrorCode ierr; 557 558 PetscFunctionBegin; 559 /* scatter the global vector x into the local work vector */ 560 ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 561 ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 562 563 /* multiply the local matrix */ 564 ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr); 565 566 /* scatter product back into global vector */ 567 ierr = VecSet(x,0);CHKERRQ(ierr); 568 ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 569 ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 570 PetscFunctionReturn(0); 571 } 572 573 #undef __FUNCT__ 574 #define __FUNCT__ "MatMultTransposeAdd_IS" 575 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 576 { 577 Vec temp_vec; 578 PetscErrorCode ierr; 579 580 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 581 if (v3 != v2) { 582 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 583 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 584 } else { 585 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 586 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 587 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 588 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 589 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 590 } 591 PetscFunctionReturn(0); 592 } 593 594 #undef __FUNCT__ 595 #define __FUNCT__ "MatView_IS" 596 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 597 { 598 Mat_IS *a = (Mat_IS*)A->data; 599 PetscErrorCode ierr; 600 PetscViewer sviewer; 601 602 PetscFunctionBegin; 603 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 604 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 605 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 606 PetscFunctionReturn(0); 607 } 608 609 #undef __FUNCT__ 610 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 611 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 612 { 613 PetscErrorCode ierr; 614 PetscInt nr,rbs,nc,cbs; 615 Mat_IS *is = (Mat_IS*)A->data; 616 IS from,to; 617 Vec cglobal,rglobal; 618 619 PetscFunctionBegin; 620 PetscCheckSameComm(A,1,rmapping,2); 621 PetscCheckSameComm(A,1,cmapping,3); 622 /* Destroy any previous data */ 623 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 624 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 625 ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr); 626 ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr); 627 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 628 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 629 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 630 631 /* Setup Layout and set local to global maps */ 632 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 633 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 634 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 635 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 636 637 /* Create the local matrix A */ 638 ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr); 639 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr); 640 ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr); 641 ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr); 642 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 643 ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr); 644 ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr); 645 ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr); 646 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 647 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 648 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 649 650 /* Create the local work vectors */ 651 ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr); 652 653 /* setup the global to local scatters */ 654 ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr); 655 ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr); 656 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 657 ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr); 658 if (rmapping != cmapping) { 659 ierr = ISDestroy(&to);CHKERRQ(ierr); 660 ierr = ISDestroy(&from);CHKERRQ(ierr); 661 ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr); 662 ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr); 663 ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr); 664 } else { 665 ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr); 666 is->cctx = is->rctx; 667 } 668 ierr = VecDestroy(&rglobal);CHKERRQ(ierr); 669 ierr = VecDestroy(&cglobal);CHKERRQ(ierr); 670 ierr = ISDestroy(&to);CHKERRQ(ierr); 671 ierr = ISDestroy(&from);CHKERRQ(ierr); 672 PetscFunctionReturn(0); 673 } 674 675 #undef __FUNCT__ 676 #define __FUNCT__ "MatSetValues_IS" 677 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 678 { 679 Mat_IS *is = (Mat_IS*)mat->data; 680 PetscInt rows_l[2048],cols_l[2048]; 681 PetscErrorCode ierr; 682 683 PetscFunctionBegin; 684 #if defined(PETSC_USE_DEBUG) 685 if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n); 686 #endif 687 ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 688 ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 689 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 690 PetscFunctionReturn(0); 691 } 692 693 #undef __FUNCT__ 694 #define __FUNCT__ "MatSetValuesLocal_IS" 695 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 696 { 697 PetscErrorCode ierr; 698 Mat_IS *is = (Mat_IS*)A->data; 699 700 PetscFunctionBegin; 701 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 702 PetscFunctionReturn(0); 703 } 704 705 #undef __FUNCT__ 706 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 707 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 708 { 709 PetscErrorCode ierr; 710 Mat_IS *is = (Mat_IS*)A->data; 711 712 PetscFunctionBegin; 713 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 714 PetscFunctionReturn(0); 715 } 716 717 #undef __FUNCT__ 718 #define __FUNCT__ "MatZeroRows_IS" 719 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 720 { 721 PetscInt n_l = 0, *rows_l = NULL; 722 PetscErrorCode ierr; 723 724 PetscFunctionBegin; 725 if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support"); 726 if (n) { 727 ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr); 728 ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr); 729 } 730 ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr); 731 ierr = PetscFree(rows_l);CHKERRQ(ierr); 732 PetscFunctionReturn(0); 733 } 734 735 #undef __FUNCT__ 736 #define __FUNCT__ "MatZeroRowsLocal_IS" 737 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 738 { 739 Mat_IS *is = (Mat_IS*)A->data; 740 PetscErrorCode ierr; 741 PetscInt i; 742 PetscScalar *array; 743 744 PetscFunctionBegin; 745 if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support"); 746 { 747 /* 748 Set up is->x as a "counting vector". This is in order to MatMult_IS 749 work properly in the interface nodes. 750 */ 751 Vec counter; 752 ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr); 753 ierr = VecSet(counter,0.);CHKERRQ(ierr); 754 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 755 ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 756 ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 757 ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 758 ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 759 ierr = VecDestroy(&counter);CHKERRQ(ierr); 760 } 761 if (!n) { 762 is->pure_neumann = PETSC_TRUE; 763 } else { 764 is->pure_neumann = PETSC_FALSE; 765 766 ierr = VecGetArray(is->y,&array);CHKERRQ(ierr); 767 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 768 for (i=0; i<n; i++) { 769 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 770 } 771 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 772 ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 773 ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr); 774 } 775 PetscFunctionReturn(0); 776 } 777 778 #undef __FUNCT__ 779 #define __FUNCT__ "MatAssemblyBegin_IS" 780 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 781 { 782 Mat_IS *is = (Mat_IS*)A->data; 783 PetscErrorCode ierr; 784 785 PetscFunctionBegin; 786 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 787 PetscFunctionReturn(0); 788 } 789 790 #undef __FUNCT__ 791 #define __FUNCT__ "MatAssemblyEnd_IS" 792 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 793 { 794 Mat_IS *is = (Mat_IS*)A->data; 795 PetscErrorCode ierr; 796 797 PetscFunctionBegin; 798 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 799 PetscFunctionReturn(0); 800 } 801 802 #undef __FUNCT__ 803 #define __FUNCT__ "MatISGetLocalMat_IS" 804 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 805 { 806 Mat_IS *is = (Mat_IS*)mat->data; 807 808 PetscFunctionBegin; 809 *local = is->A; 810 PetscFunctionReturn(0); 811 } 812 813 #undef __FUNCT__ 814 #define __FUNCT__ "MatISGetLocalMat" 815 /*@ 816 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 817 818 Input Parameter: 819 . mat - the matrix 820 821 Output Parameter: 822 . local - the local matrix 823 824 Level: advanced 825 826 Notes: 827 This can be called if you have precomputed the nonzero structure of the 828 matrix and want to provide it to the inner matrix object to improve the performance 829 of the MatSetValues() operation. 830 831 This function does not increase the reference count for the local Mat. Do not destroy it and do not attempt to use 832 your reference after destroying the parent mat. 833 834 .seealso: MATIS 835 @*/ 836 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 837 { 838 PetscErrorCode ierr; 839 840 PetscFunctionBegin; 841 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 842 PetscValidPointer(local,2); 843 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 844 PetscFunctionReturn(0); 845 } 846 847 #undef __FUNCT__ 848 #define __FUNCT__ "MatISSetLocalMat_IS" 849 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 850 { 851 Mat_IS *is = (Mat_IS*)mat->data; 852 PetscInt nrows,ncols,orows,ocols; 853 PetscErrorCode ierr; 854 855 PetscFunctionBegin; 856 if (is->A) { 857 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 858 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 859 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols); 860 } 861 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 862 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 863 is->A = local; 864 PetscFunctionReturn(0); 865 } 866 867 #undef __FUNCT__ 868 #define __FUNCT__ "MatISSetLocalMat" 869 /*@ 870 MatISSetLocalMat - Replace the local matrix stored inside a MATIS object. 871 872 Input Parameter: 873 . mat - the matrix 874 . local - the local matrix 875 876 Output Parameter: 877 878 Level: advanced 879 880 Notes: 881 This can be called if you have precomputed the local matrix and 882 want to provide it to the matrix object MATIS. 883 884 .seealso: MATIS 885 @*/ 886 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 887 { 888 PetscErrorCode ierr; 889 890 PetscFunctionBegin; 891 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 892 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 893 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 894 PetscFunctionReturn(0); 895 } 896 897 #undef __FUNCT__ 898 #define __FUNCT__ "MatZeroEntries_IS" 899 PetscErrorCode MatZeroEntries_IS(Mat A) 900 { 901 Mat_IS *a = (Mat_IS*)A->data; 902 PetscErrorCode ierr; 903 904 PetscFunctionBegin; 905 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 906 PetscFunctionReturn(0); 907 } 908 909 #undef __FUNCT__ 910 #define __FUNCT__ "MatScale_IS" 911 PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 912 { 913 Mat_IS *is = (Mat_IS*)A->data; 914 PetscErrorCode ierr; 915 916 PetscFunctionBegin; 917 ierr = MatScale(is->A,a);CHKERRQ(ierr); 918 PetscFunctionReturn(0); 919 } 920 921 #undef __FUNCT__ 922 #define __FUNCT__ "MatGetDiagonal_IS" 923 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 924 { 925 Mat_IS *is = (Mat_IS*)A->data; 926 PetscErrorCode ierr; 927 928 PetscFunctionBegin; 929 /* get diagonal of the local matrix */ 930 ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr); 931 932 /* scatter diagonal back into global vector */ 933 ierr = VecSet(v,0);CHKERRQ(ierr); 934 ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 935 ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 936 PetscFunctionReturn(0); 937 } 938 939 #undef __FUNCT__ 940 #define __FUNCT__ "MatSetOption_IS" 941 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 942 { 943 Mat_IS *a = (Mat_IS*)A->data; 944 PetscErrorCode ierr; 945 946 PetscFunctionBegin; 947 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 948 PetscFunctionReturn(0); 949 } 950 951 #undef __FUNCT__ 952 #define __FUNCT__ "MatCreateIS" 953 /*@ 954 MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each 955 process but not across processes. 956 957 Input Parameters: 958 + comm - MPI communicator that will share the matrix 959 . bs - block size of the matrix 960 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 961 . rmap - local to global map for rows 962 - cmap - local to global map for cols 963 964 Output Parameter: 965 . A - the resulting matrix 966 967 Level: advanced 968 969 Notes: See MATIS for more details 970 m and n are NOT related to the size of the map, they are the size of the part of the vector owned 971 by that process. The sizes of rmap and cmap define the size of the local matrices. 972 If either rmap or cmap are NULL, than the matrix is assumed to be square 973 974 .seealso: MATIS, MatSetLocalToGlobalMapping() 975 @*/ 976 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A) 977 { 978 PetscErrorCode ierr; 979 980 PetscFunctionBegin; 981 if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping"); 982 ierr = MatCreate(comm,A);CHKERRQ(ierr); 983 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 984 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 985 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 986 ierr = MatSetUp(*A);CHKERRQ(ierr); 987 if (rmap && cmap) { 988 ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr); 989 } else if (!rmap) { 990 ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr); 991 } else { 992 ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr); 993 } 994 PetscFunctionReturn(0); 995 } 996 997 /*MC 998 MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC). 999 This stores the matrices in globally unassembled form. Each processor 1000 assembles only its local Neumann problem and the parallel matrix vector 1001 product is handled "implicitly". 1002 1003 Operations Provided: 1004 + MatMult() 1005 . MatMultAdd() 1006 . MatMultTranspose() 1007 . MatMultTransposeAdd() 1008 . MatZeroEntries() 1009 . MatSetOption() 1010 . MatZeroRows() 1011 . MatZeroRowsLocal() 1012 . MatSetValues() 1013 . MatSetValuesLocal() 1014 . MatScale() 1015 . MatGetDiagonal() 1016 - MatSetLocalToGlobalMapping() 1017 1018 Options Database Keys: 1019 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 1020 1021 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 1022 1023 You must call MatSetLocalToGlobalMapping() before using this matrix type. 1024 1025 You can do matrix preallocation on the local matrix after you obtain it with 1026 MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation() 1027 1028 Level: advanced 1029 1030 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC 1031 1032 M*/ 1033 1034 #undef __FUNCT__ 1035 #define __FUNCT__ "MatCreate_IS" 1036 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 1037 { 1038 PetscErrorCode ierr; 1039 Mat_IS *b; 1040 1041 PetscFunctionBegin; 1042 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 1043 A->data = (void*)b; 1044 1045 /* matrix ops */ 1046 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1047 A->ops->mult = MatMult_IS; 1048 A->ops->multadd = MatMultAdd_IS; 1049 A->ops->multtranspose = MatMultTranspose_IS; 1050 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1051 A->ops->destroy = MatDestroy_IS; 1052 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1053 A->ops->setvalues = MatSetValues_IS; 1054 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1055 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1056 A->ops->zerorows = MatZeroRows_IS; 1057 A->ops->zerorowslocal = MatZeroRowsLocal_IS; 1058 A->ops->assemblybegin = MatAssemblyBegin_IS; 1059 A->ops->assemblyend = MatAssemblyEnd_IS; 1060 A->ops->view = MatView_IS; 1061 A->ops->zeroentries = MatZeroEntries_IS; 1062 A->ops->scale = MatScale_IS; 1063 A->ops->getdiagonal = MatGetDiagonal_IS; 1064 A->ops->setoption = MatSetOption_IS; 1065 A->ops->ishermitian = MatIsHermitian_IS; 1066 A->ops->issymmetric = MatIsSymmetric_IS; 1067 A->ops->duplicate = MatDuplicate_IS; 1068 1069 /* special MATIS functions */ 1070 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1071 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1072 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1073 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1074 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1075 PetscFunctionReturn(0); 1076 } 1077