1 /* 2 Creates a matrix class for using the Neumann-Neumann type preconditioners. 3 This stores the matrices in globally unassembled form. Each processor 4 assembles only its local Neumann problem and the parallel matrix vector 5 product is handled "implicitly". 6 7 Currently this allows for only one subdomain per processor. 8 */ 9 10 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 11 #include <petsc/private/sfimpl.h> 12 13 #define MATIS_MAX_ENTRIES_INSERTION 2048 14 15 #undef __FUNCT__ 16 #define __FUNCT__ "MatISSetUpSF" 17 /*@ 18 MatISSetUpSF - Setup star forest objects used by MatIS. 19 20 Collective on MPI_Comm 21 22 Input Parameters: 23 + A - the matrix 24 25 Level: advanced 26 27 Notes: This function does not need to be called by the user. 28 29 .keywords: matrix 30 31 .seealso: MatCreate(), MatCreateIS(), MatISSetPreallocation(), MatISGetLocalMat() 32 @*/ 33 PetscErrorCode MatISSetUpSF(Mat A) 34 { 35 PetscErrorCode ierr; 36 37 PetscFunctionBegin; 38 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 39 PetscValidType(A,1); 40 ierr = PetscTryMethod(A,"MatISSetUpSF_C",(Mat),(A));CHKERRQ(ierr); 41 PetscFunctionReturn(0); 42 } 43 44 #undef __FUNCT__ 45 #define __FUNCT__ "MatDiagonalSet_IS" 46 PetscErrorCode MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode) 47 { 48 Mat_IS *is = (Mat_IS*)A->data; 49 PetscErrorCode ierr; 50 51 PetscFunctionBegin; 52 if (!D) { /* this code branch is used by MatShift_IS */ 53 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 54 } else { 55 ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 56 ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 57 } 58 ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr); 59 ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr); 60 PetscFunctionReturn(0); 61 } 62 63 #undef __FUNCT__ 64 #define __FUNCT__ "MatShift_IS" 65 PetscErrorCode MatShift_IS(Mat A,PetscScalar a) 66 { 67 PetscErrorCode ierr; 68 69 PetscFunctionBegin; 70 ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr); 71 PetscFunctionReturn(0); 72 } 73 74 #undef __FUNCT__ 75 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS" 76 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 77 { 78 PetscErrorCode ierr; 79 Mat_IS *is = (Mat_IS*)A->data; 80 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 81 82 PetscFunctionBegin; 83 #if defined(PETSC_USE_DEBUG) 84 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 85 #endif 86 ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 87 ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 88 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 89 PetscFunctionReturn(0); 90 } 91 92 #undef __FUNCT__ 93 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS" 94 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 95 { 96 PetscErrorCode ierr; 97 Mat_IS *is = (Mat_IS*)A->data; 98 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 99 100 PetscFunctionBegin; 101 #if defined(PETSC_USE_DEBUG) 102 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 103 #endif 104 ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 105 ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 106 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 107 PetscFunctionReturn(0); 108 } 109 110 #undef __FUNCT__ 111 #define __FUNCT__ "PetscLayoutMapLocal_Private" 112 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs) 113 { 114 PetscInt *owners = map->range; 115 PetscInt n = map->n; 116 PetscSF sf; 117 PetscInt *lidxs,*work = NULL; 118 PetscSFNode *ridxs; 119 PetscMPIInt rank; 120 PetscInt r, p = 0, len = 0; 121 PetscErrorCode ierr; 122 123 PetscFunctionBegin; 124 /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */ 125 ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr); 126 ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr); 127 for (r = 0; r < n; ++r) lidxs[r] = -1; 128 ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr); 129 for (r = 0; r < N; ++r) { 130 const PetscInt idx = idxs[r]; 131 if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N); 132 if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */ 133 ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr); 134 } 135 ridxs[r].rank = p; 136 ridxs[r].index = idxs[r] - owners[p]; 137 } 138 ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr); 139 ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr); 140 ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 141 ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 142 if (ogidxs) { /* communicate global idxs */ 143 PetscInt cum = 0,start,*work2; 144 145 ierr = PetscMalloc1(n,&work);CHKERRQ(ierr); 146 ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr); 147 for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++; 148 ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr); 149 start -= cum; 150 cum = 0; 151 for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++; 152 ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 153 ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 154 ierr = PetscFree(work2);CHKERRQ(ierr); 155 } 156 ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 157 /* Compress and put in indices */ 158 for (r = 0; r < n; ++r) 159 if (lidxs[r] >= 0) { 160 if (work) work[len] = work[r]; 161 lidxs[len++] = r; 162 } 163 if (on) *on = len; 164 if (oidxs) *oidxs = lidxs; 165 if (ogidxs) *ogidxs = work; 166 PetscFunctionReturn(0); 167 } 168 169 #undef __FUNCT__ 170 #define __FUNCT__ "MatGetSubMatrix_IS" 171 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat) 172 { 173 Mat locmat,newlocmat; 174 Mat_IS *newmatis; 175 #if defined(PETSC_USE_DEBUG) 176 Vec rtest,ltest; 177 const PetscScalar *array; 178 #endif 179 const PetscInt *idxs; 180 PetscInt i,m,n; 181 PetscErrorCode ierr; 182 183 PetscFunctionBegin; 184 if (scall == MAT_REUSE_MATRIX) { 185 PetscBool ismatis; 186 187 ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr); 188 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type"); 189 newmatis = (Mat_IS*)(*newmat)->data; 190 if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS"); 191 if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS"); 192 } 193 /* irow and icol may not have duplicate entries */ 194 #if defined(PETSC_USE_DEBUG) 195 ierr = MatCreateVecs(mat,<est,&rtest);CHKERRQ(ierr); 196 ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr); 197 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 198 for (i=0;i<n;i++) { 199 ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 200 } 201 ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr); 202 ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr); 203 ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr); 204 ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr); 205 ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr); 206 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 207 ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr); 208 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 209 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 210 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 211 for (i=0;i<n;i++) { 212 ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 213 } 214 ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr); 215 ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr); 216 ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr); 217 ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr); 218 ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr); 219 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 220 ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr); 221 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 222 ierr = VecDestroy(&rtest);CHKERRQ(ierr); 223 ierr = VecDestroy(<est);CHKERRQ(ierr); 224 #endif 225 if (scall == MAT_INITIAL_MATRIX) { 226 Mat_IS *matis = (Mat_IS*)mat->data; 227 ISLocalToGlobalMapping rl2g; 228 IS is; 229 PetscInt *lidxs,*lgidxs,*newgidxs; 230 PetscInt ll,newloc; 231 MPI_Comm comm; 232 233 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 234 ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr); 235 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 236 ierr = MatCreate(comm,newmat);CHKERRQ(ierr); 237 ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr); 238 ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 239 /* communicate irow to their owners in the layout */ 240 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 241 ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 242 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 243 ierr = MatISSetUpSF(mat);CHKERRQ(ierr); 244 ierr = PetscMemzero(matis->sf_rootdata,matis->sf->nroots*sizeof(PetscInt));CHKERRQ(ierr); 245 for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1; 246 ierr = PetscFree(lidxs);CHKERRQ(ierr); 247 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 248 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 249 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 250 for (i=0,newloc=0;i<matis->sf->nleaves;i++) if (matis->sf_leafdata[i]) newloc++; 251 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 252 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 253 for (i=0,newloc=0;i<matis->sf->nleaves;i++) 254 if (matis->sf_leafdata[i]) { 255 lidxs[newloc] = i; 256 newgidxs[newloc++] = matis->sf_leafdata[i]-1; 257 } 258 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 259 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 260 ierr = ISDestroy(&is);CHKERRQ(ierr); 261 /* local is to extract local submatrix */ 262 newmatis = (Mat_IS*)(*newmat)->data; 263 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr); 264 if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */ 265 PetscBool cong; 266 ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr); 267 if (cong) mat->congruentlayouts = 1; 268 else mat->congruentlayouts = 0; 269 } 270 if (mat->congruentlayouts && irow == icol) { 271 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr); 272 ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr); 273 newmatis->getsub_cis = newmatis->getsub_ris; 274 } else { 275 ISLocalToGlobalMapping cl2g; 276 277 /* communicate icol to their owners in the layout */ 278 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 279 ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 280 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 281 ierr = PetscMemzero(matis->csf_rootdata,matis->csf->nroots*sizeof(PetscInt));CHKERRQ(ierr); 282 for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1; 283 ierr = PetscFree(lidxs);CHKERRQ(ierr); 284 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 285 ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 286 ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 287 for (i=0,newloc=0;i<matis->csf->nleaves;i++) if (matis->csf_leafdata[i]) newloc++; 288 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 289 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 290 for (i=0,newloc=0;i<matis->csf->nleaves;i++) 291 if (matis->csf_leafdata[i]) { 292 lidxs[newloc] = i; 293 newgidxs[newloc++] = matis->csf_leafdata[i]-1; 294 } 295 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 296 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 297 ierr = ISDestroy(&is);CHKERRQ(ierr); 298 /* local is to extract local submatrix */ 299 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr); 300 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr); 301 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 302 } 303 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 304 } else { 305 ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr); 306 } 307 ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr); 308 newmatis = (Mat_IS*)(*newmat)->data; 309 ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr); 310 if (scall == MAT_INITIAL_MATRIX) { 311 ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr); 312 ierr = MatDestroy(&newlocmat);CHKERRQ(ierr); 313 } 314 ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 315 ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 316 PetscFunctionReturn(0); 317 } 318 319 #undef __FUNCT__ 320 #define __FUNCT__ "MatCopy_IS" 321 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str) 322 { 323 Mat_IS *a = (Mat_IS*)A->data,*b; 324 PetscBool ismatis; 325 PetscErrorCode ierr; 326 327 PetscFunctionBegin; 328 ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr); 329 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented"); 330 b = (Mat_IS*)B->data; 331 ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 332 PetscFunctionReturn(0); 333 } 334 335 #undef __FUNCT__ 336 #define __FUNCT__ "MatMissingDiagonal_IS" 337 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool *missing,PetscInt *d) 338 { 339 Vec v; 340 const PetscScalar *array; 341 PetscInt i,n; 342 PetscErrorCode ierr; 343 344 PetscFunctionBegin; 345 *missing = PETSC_FALSE; 346 ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr); 347 ierr = MatGetDiagonal(A,v);CHKERRQ(ierr); 348 ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr); 349 ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr); 350 for (i=0;i<n;i++) if (array[i] == 0.) break; 351 ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr); 352 ierr = VecDestroy(&v);CHKERRQ(ierr); 353 if (i != n) *missing = PETSC_TRUE; 354 if (d) { 355 *d = -1; 356 if (*missing) { 357 PetscInt rstart; 358 ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr); 359 *d = i+rstart; 360 } 361 } 362 PetscFunctionReturn(0); 363 } 364 365 #undef __FUNCT__ 366 #define __FUNCT__ "MatISSetUpSF_IS" 367 static PetscErrorCode MatISSetUpSF_IS(Mat B) 368 { 369 Mat_IS *matis = (Mat_IS*)(B->data); 370 const PetscInt *gidxs; 371 PetscInt nleaves; 372 PetscErrorCode ierr; 373 374 PetscFunctionBegin; 375 if (matis->sf) PetscFunctionReturn(0); 376 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 377 ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 378 ierr = ISLocalToGlobalMappingGetSize(B->rmap->mapping,&nleaves);CHKERRQ(ierr); 379 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 380 ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 381 ierr = PetscMalloc2(matis->sf->nroots,&matis->sf_rootdata,matis->sf->nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 382 if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */ 383 ierr = ISLocalToGlobalMappingGetSize(B->cmap->mapping,&nleaves);CHKERRQ(ierr); 384 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr); 385 ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 386 ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 387 ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 388 ierr = PetscMalloc2(matis->csf->nroots,&matis->csf_rootdata,matis->csf->nleaves,&matis->csf_leafdata);CHKERRQ(ierr); 389 } else { 390 matis->csf = matis->sf; 391 matis->csf_leafdata = matis->sf_leafdata; 392 matis->csf_rootdata = matis->sf_rootdata; 393 } 394 PetscFunctionReturn(0); 395 } 396 397 #undef __FUNCT__ 398 #define __FUNCT__ "MatISSetPreallocation" 399 /*@ 400 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 401 402 Collective on MPI_Comm 403 404 Input Parameters: 405 + B - the matrix 406 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 407 (same value is used for all local rows) 408 . d_nnz - array containing the number of nonzeros in the various rows of the 409 DIAGONAL portion of the local submatrix (possibly different for each row) 410 or NULL, if d_nz is used to specify the nonzero structure. 411 The size of this array is equal to the number of local rows, i.e 'm'. 412 For matrices that will be factored, you must leave room for (and set) 413 the diagonal entry even if it is zero. 414 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 415 submatrix (same value is used for all local rows). 416 - o_nnz - array containing the number of nonzeros in the various rows of the 417 OFF-DIAGONAL portion of the local submatrix (possibly different for 418 each row) or NULL, if o_nz is used to specify the nonzero 419 structure. The size of this array is equal to the number 420 of local rows, i.e 'm'. 421 422 If the *_nnz parameter is given then the *_nz parameter is ignored 423 424 Level: intermediate 425 426 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 427 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 428 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 429 430 .keywords: matrix 431 432 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS 433 @*/ 434 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 435 { 436 PetscErrorCode ierr; 437 438 PetscFunctionBegin; 439 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 440 PetscValidType(B,1); 441 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 442 PetscFunctionReturn(0); 443 } 444 445 #undef __FUNCT__ 446 #define __FUNCT__ "MatISSetPreallocation_IS" 447 static PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 448 { 449 Mat_IS *matis = (Mat_IS*)(B->data); 450 PetscInt bs,i,nlocalcols; 451 PetscErrorCode ierr; 452 453 PetscFunctionBegin; 454 if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 455 ierr = MatISSetUpSF(B);CHKERRQ(ierr); 456 457 if (!d_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nz; 458 else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 459 460 if (!o_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nz; 461 else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 462 463 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 464 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 465 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 466 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 467 468 for (i=0;i<matis->sf->nleaves;i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 469 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 470 471 for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 472 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 473 474 for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 475 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 476 PetscFunctionReturn(0); 477 } 478 479 #undef __FUNCT__ 480 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private" 481 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce) 482 { 483 Mat_IS *matis = (Mat_IS*)(A->data); 484 PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership; 485 const PetscInt *global_indices_r,*global_indices_c; 486 PetscInt i,j,bs,rows,cols; 487 PetscInt lrows,lcols; 488 PetscInt local_rows,local_cols; 489 PetscMPIInt nsubdomains; 490 PetscBool isdense,issbaij; 491 PetscErrorCode ierr; 492 493 PetscFunctionBegin; 494 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr); 495 ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr); 496 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 497 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 498 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 499 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 500 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 501 if (A->rmap->mapping != A->cmap->mapping) { 502 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 503 } else { 504 global_indices_c = global_indices_r; 505 } 506 507 if (issbaij) { 508 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 509 } 510 /* 511 An SF reduce is needed to sum up properly on shared rows. 512 Note that generally preallocation is not exact, since it overestimates nonzeros 513 */ 514 ierr = MatISSetUpSF(A);CHKERRQ(ierr); 515 ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr); 516 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr); 517 /* All processes need to compute entire row ownership */ 518 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 519 ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 520 for (i=0;i<nsubdomains;i++) { 521 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 522 row_ownership[j] = i; 523 } 524 } 525 ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 526 527 /* 528 my_dnz and my_onz contains exact contribution to preallocation from each local mat 529 then, they will be summed up properly. This way, preallocation is always sufficient 530 */ 531 ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr); 532 /* preallocation as a MATAIJ */ 533 if (isdense) { /* special case for dense local matrices */ 534 for (i=0;i<local_rows;i++) { 535 PetscInt index_row = global_indices_r[i]; 536 for (j=i;j<local_rows;j++) { 537 PetscInt owner = row_ownership[index_row]; 538 PetscInt index_col = global_indices_c[j]; 539 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 540 my_dnz[i] += 1; 541 } else { /* offdiag block */ 542 my_onz[i] += 1; 543 } 544 /* same as before, interchanging rows and cols */ 545 if (i != j) { 546 owner = row_ownership[index_col]; 547 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 548 my_dnz[j] += 1; 549 } else { 550 my_onz[j] += 1; 551 } 552 } 553 } 554 } 555 } else if (matis->A->ops->getrowij) { 556 const PetscInt *ii,*jj,*jptr; 557 PetscBool done; 558 ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 559 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 560 jptr = jj; 561 for (i=0;i<local_rows;i++) { 562 PetscInt index_row = global_indices_r[i]; 563 for (j=0;j<ii[i+1]-ii[i];j++,jptr++) { 564 PetscInt owner = row_ownership[index_row]; 565 PetscInt index_col = global_indices_c[*jptr]; 566 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 567 my_dnz[i] += 1; 568 } else { /* offdiag block */ 569 my_onz[i] += 1; 570 } 571 /* same as before, interchanging rows and cols */ 572 if (issbaij && index_col != index_row) { 573 owner = row_ownership[index_col]; 574 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 575 my_dnz[*jptr] += 1; 576 } else { 577 my_onz[*jptr] += 1; 578 } 579 } 580 } 581 } 582 ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 583 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 584 } else { /* loop over rows and use MatGetRow */ 585 for (i=0;i<local_rows;i++) { 586 const PetscInt *cols; 587 PetscInt ncols,index_row = global_indices_r[i]; 588 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 589 for (j=0;j<ncols;j++) { 590 PetscInt owner = row_ownership[index_row]; 591 PetscInt index_col = global_indices_c[cols[j]]; 592 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 593 my_dnz[i] += 1; 594 } else { /* offdiag block */ 595 my_onz[i] += 1; 596 } 597 /* same as before, interchanging rows and cols */ 598 if (issbaij && index_col != index_row) { 599 owner = row_ownership[index_col]; 600 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 601 my_dnz[cols[j]] += 1; 602 } else { 603 my_onz[cols[j]] += 1; 604 } 605 } 606 } 607 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 608 } 609 } 610 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 611 if (global_indices_c != global_indices_r) { 612 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 613 } 614 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 615 616 /* Reduce my_dnz and my_onz */ 617 if (maxreduce) { 618 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 619 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 620 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 621 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 622 } else { 623 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 624 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 625 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 626 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 627 } 628 ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr); 629 630 /* Resize preallocation if overestimated */ 631 for (i=0;i<lrows;i++) { 632 dnz[i] = PetscMin(dnz[i],lcols); 633 onz[i] = PetscMin(onz[i],cols-lcols); 634 } 635 /* set preallocation */ 636 ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr); 637 for (i=0;i<lrows/bs;i++) { 638 dnz[i] = dnz[i*bs]/bs; 639 onz[i] = onz[i*bs]/bs; 640 } 641 ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 642 ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 643 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 644 if (issbaij) { 645 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 646 } 647 PetscFunctionReturn(0); 648 } 649 650 #undef __FUNCT__ 651 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 652 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 653 { 654 Mat_IS *matis = (Mat_IS*)(mat->data); 655 Mat local_mat; 656 /* info on mat */ 657 PetscInt bs,rows,cols,lrows,lcols; 658 PetscInt local_rows,local_cols; 659 PetscBool isdense,issbaij,isseqaij; 660 PetscMPIInt nsubdomains; 661 /* values insertion */ 662 PetscScalar *array; 663 /* work */ 664 PetscErrorCode ierr; 665 666 PetscFunctionBegin; 667 /* get info from mat */ 668 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 669 if (nsubdomains == 1) { 670 if (reuse == MAT_INITIAL_MATRIX) { 671 ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr); 672 } else { 673 ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 674 } 675 PetscFunctionReturn(0); 676 } 677 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 678 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 679 ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr); 680 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 681 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 682 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 683 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 684 685 if (reuse == MAT_INITIAL_MATRIX) { 686 MatType new_mat_type; 687 PetscBool issbaij_red; 688 689 /* determining new matrix type */ 690 ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 691 if (issbaij_red) { 692 new_mat_type = MATSBAIJ; 693 } else { 694 if (bs>1) { 695 new_mat_type = MATBAIJ; 696 } else { 697 new_mat_type = MATAIJ; 698 } 699 } 700 701 ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr); 702 ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr); 703 ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr); 704 ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr); 705 ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr); 706 } else { 707 PetscInt mbs,mrows,mcols,mlrows,mlcols; 708 /* some checks */ 709 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 710 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 711 ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr); 712 if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 713 if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 714 if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows); 715 if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols); 716 if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 717 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 718 } 719 720 if (issbaij) { 721 ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr); 722 } else { 723 ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr); 724 local_mat = matis->A; 725 } 726 727 /* Set values */ 728 ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr); 729 if (isdense) { /* special case for dense local matrices */ 730 PetscInt i,*dummy_rows,*dummy_cols; 731 732 if (local_rows != local_cols) { 733 ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr); 734 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 735 for (i=0;i<local_cols;i++) dummy_cols[i] = i; 736 } else { 737 ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr); 738 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 739 dummy_cols = dummy_rows; 740 } 741 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 742 ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr); 743 ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr); 744 ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr); 745 if (dummy_rows != dummy_cols) { 746 ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr); 747 } else { 748 ierr = PetscFree(dummy_rows);CHKERRQ(ierr); 749 } 750 } else if (isseqaij) { 751 PetscInt i,nvtxs,*xadj,*adjncy; 752 PetscBool done; 753 754 ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 755 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 756 ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr); 757 for (i=0;i<nvtxs;i++) { 758 ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr); 759 } 760 ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 761 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 762 ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr); 763 } else { /* very basic values insertion for all other matrix types */ 764 PetscInt i; 765 766 for (i=0;i<local_rows;i++) { 767 PetscInt j; 768 const PetscInt *local_indices_cols; 769 770 ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 771 ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr); 772 ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 773 } 774 } 775 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 776 ierr = MatDestroy(&local_mat);CHKERRQ(ierr); 777 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 778 if (isdense) { 779 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 780 } 781 PetscFunctionReturn(0); 782 } 783 784 #undef __FUNCT__ 785 #define __FUNCT__ "MatISGetMPIXAIJ" 786 /*@ 787 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 788 789 Input Parameter: 790 . mat - the matrix (should be of type MATIS) 791 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 792 793 Output Parameter: 794 . newmat - the matrix in AIJ format 795 796 Level: developer 797 798 Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested. 799 800 .seealso: MATIS 801 @*/ 802 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 803 { 804 PetscErrorCode ierr; 805 806 PetscFunctionBegin; 807 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 808 PetscValidLogicalCollectiveEnum(mat,reuse,2); 809 PetscValidPointer(newmat,3); 810 if (reuse != MAT_INITIAL_MATRIX) { 811 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 812 PetscCheckSameComm(mat,1,*newmat,3); 813 if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix"); 814 } 815 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 816 PetscFunctionReturn(0); 817 } 818 819 #undef __FUNCT__ 820 #define __FUNCT__ "MatDuplicate_IS" 821 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 822 { 823 PetscErrorCode ierr; 824 Mat_IS *matis = (Mat_IS*)(mat->data); 825 PetscInt bs,m,n,M,N; 826 Mat B,localmat; 827 828 PetscFunctionBegin; 829 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 830 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 831 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 832 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr); 833 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 834 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 835 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 836 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 837 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 838 *newmat = B; 839 PetscFunctionReturn(0); 840 } 841 842 #undef __FUNCT__ 843 #define __FUNCT__ "MatIsHermitian_IS" 844 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 845 { 846 PetscErrorCode ierr; 847 Mat_IS *matis = (Mat_IS*)A->data; 848 PetscBool local_sym; 849 850 PetscFunctionBegin; 851 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 852 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 853 PetscFunctionReturn(0); 854 } 855 856 #undef __FUNCT__ 857 #define __FUNCT__ "MatIsSymmetric_IS" 858 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 859 { 860 PetscErrorCode ierr; 861 Mat_IS *matis = (Mat_IS*)A->data; 862 PetscBool local_sym; 863 864 PetscFunctionBegin; 865 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 866 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 867 PetscFunctionReturn(0); 868 } 869 870 #undef __FUNCT__ 871 #define __FUNCT__ "MatDestroy_IS" 872 static PetscErrorCode MatDestroy_IS(Mat A) 873 { 874 PetscErrorCode ierr; 875 Mat_IS *b = (Mat_IS*)A->data; 876 877 PetscFunctionBegin; 878 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 879 ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr); 880 ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr); 881 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 882 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 883 ierr = VecDestroy(&b->counter);CHKERRQ(ierr); 884 ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr); 885 ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr); 886 if (b->sf != b->csf) { 887 ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr); 888 ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr); 889 } 890 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 891 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 892 ierr = PetscFree(A->data);CHKERRQ(ierr); 893 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 894 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 895 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 896 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 897 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 898 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",NULL);CHKERRQ(ierr); 899 PetscFunctionReturn(0); 900 } 901 902 #undef __FUNCT__ 903 #define __FUNCT__ "MatMult_IS" 904 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 905 { 906 PetscErrorCode ierr; 907 Mat_IS *is = (Mat_IS*)A->data; 908 PetscScalar zero = 0.0; 909 910 PetscFunctionBegin; 911 /* scatter the global vector x into the local work vector */ 912 ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 913 ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 914 915 /* multiply the local matrix */ 916 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 917 918 /* scatter product back into global memory */ 919 ierr = VecSet(y,zero);CHKERRQ(ierr); 920 ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 921 ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 922 PetscFunctionReturn(0); 923 } 924 925 #undef __FUNCT__ 926 #define __FUNCT__ "MatMultAdd_IS" 927 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 928 { 929 Vec temp_vec; 930 PetscErrorCode ierr; 931 932 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 933 if (v3 != v2) { 934 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 935 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 936 } else { 937 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 938 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 939 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 940 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 941 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 942 } 943 PetscFunctionReturn(0); 944 } 945 946 #undef __FUNCT__ 947 #define __FUNCT__ "MatMultTranspose_IS" 948 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x) 949 { 950 Mat_IS *is = (Mat_IS*)A->data; 951 PetscErrorCode ierr; 952 953 PetscFunctionBegin; 954 /* scatter the global vector x into the local work vector */ 955 ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 956 ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 957 958 /* multiply the local matrix */ 959 ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr); 960 961 /* scatter product back into global vector */ 962 ierr = VecSet(x,0);CHKERRQ(ierr); 963 ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 964 ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 965 PetscFunctionReturn(0); 966 } 967 968 #undef __FUNCT__ 969 #define __FUNCT__ "MatMultTransposeAdd_IS" 970 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 971 { 972 Vec temp_vec; 973 PetscErrorCode ierr; 974 975 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 976 if (v3 != v2) { 977 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 978 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 979 } else { 980 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 981 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 982 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 983 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 984 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 985 } 986 PetscFunctionReturn(0); 987 } 988 989 #undef __FUNCT__ 990 #define __FUNCT__ "MatView_IS" 991 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 992 { 993 Mat_IS *a = (Mat_IS*)A->data; 994 PetscErrorCode ierr; 995 PetscViewer sviewer; 996 PetscBool isascii,view = PETSC_TRUE; 997 998 PetscFunctionBegin; 999 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 1000 if (isascii) { 1001 PetscViewerFormat format; 1002 1003 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1004 if (format == PETSC_VIEWER_ASCII_INFO) view = PETSC_FALSE; 1005 } 1006 if (!view) PetscFunctionReturn(0); 1007 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1008 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 1009 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1010 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1011 PetscFunctionReturn(0); 1012 } 1013 1014 #undef __FUNCT__ 1015 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 1016 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 1017 { 1018 PetscErrorCode ierr; 1019 PetscInt nr,rbs,nc,cbs; 1020 Mat_IS *is = (Mat_IS*)A->data; 1021 IS from,to; 1022 Vec cglobal,rglobal; 1023 1024 PetscFunctionBegin; 1025 PetscCheckSameComm(A,1,rmapping,2); 1026 PetscCheckSameComm(A,1,cmapping,3); 1027 /* Destroy any previous data */ 1028 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 1029 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 1030 ierr = VecDestroy(&is->counter);CHKERRQ(ierr); 1031 ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr); 1032 ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr); 1033 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1034 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 1035 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 1036 1037 /* Setup Layout and set local to global maps */ 1038 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1039 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1040 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 1041 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 1042 1043 /* Create the local matrix A */ 1044 ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr); 1045 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr); 1046 ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr); 1047 ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr); 1048 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 1049 ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr); 1050 ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr); 1051 ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr); 1052 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 1053 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 1054 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 1055 1056 if (!is->islocalref) { /* setup scatters and local vectors for MatMult */ 1057 /* Create the local work vectors */ 1058 ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr); 1059 1060 /* setup the global to local scatters */ 1061 ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr); 1062 ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr); 1063 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 1064 ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr); 1065 if (rmapping != cmapping) { 1066 ierr = ISDestroy(&to);CHKERRQ(ierr); 1067 ierr = ISDestroy(&from);CHKERRQ(ierr); 1068 ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr); 1069 ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr); 1070 ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr); 1071 } else { 1072 ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr); 1073 is->cctx = is->rctx; 1074 } 1075 1076 /* interface counter vector (local) */ 1077 ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr); 1078 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 1079 ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1080 ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1081 ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1082 ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1083 1084 /* free workspace */ 1085 ierr = VecDestroy(&rglobal);CHKERRQ(ierr); 1086 ierr = VecDestroy(&cglobal);CHKERRQ(ierr); 1087 ierr = ISDestroy(&to);CHKERRQ(ierr); 1088 ierr = ISDestroy(&from);CHKERRQ(ierr); 1089 } 1090 ierr = MatSetUp(A);CHKERRQ(ierr); 1091 PetscFunctionReturn(0); 1092 } 1093 1094 #undef __FUNCT__ 1095 #define __FUNCT__ "MatSetValues_IS" 1096 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1097 { 1098 Mat_IS *is = (Mat_IS*)mat->data; 1099 PetscErrorCode ierr; 1100 #if defined(PETSC_USE_DEBUG) 1101 PetscInt i,zm,zn; 1102 #endif 1103 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1104 1105 PetscFunctionBegin; 1106 #if defined(PETSC_USE_DEBUG) 1107 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1108 /* count negative indices */ 1109 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1110 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1111 #endif 1112 ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1113 ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1114 #if defined(PETSC_USE_DEBUG) 1115 /* count negative indices (should be the same as before) */ 1116 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1117 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1118 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1119 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1120 #endif 1121 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1122 PetscFunctionReturn(0); 1123 } 1124 1125 #undef __FUNCT__ 1126 #define __FUNCT__ "MatSetValuesBlocked_IS" 1127 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1128 { 1129 Mat_IS *is = (Mat_IS*)mat->data; 1130 PetscErrorCode ierr; 1131 #if defined(PETSC_USE_DEBUG) 1132 PetscInt i,zm,zn; 1133 #endif 1134 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1135 1136 PetscFunctionBegin; 1137 #if defined(PETSC_USE_DEBUG) 1138 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1139 /* count negative indices */ 1140 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1141 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1142 #endif 1143 ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1144 ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1145 #if defined(PETSC_USE_DEBUG) 1146 /* count negative indices (should be the same as before) */ 1147 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1148 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1149 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1150 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1151 #endif 1152 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1153 PetscFunctionReturn(0); 1154 } 1155 1156 #undef __FUNCT__ 1157 #define __FUNCT__ "MatSetValuesLocal_IS" 1158 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1159 { 1160 PetscErrorCode ierr; 1161 Mat_IS *is = (Mat_IS*)A->data; 1162 1163 PetscFunctionBegin; 1164 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1165 PetscFunctionReturn(0); 1166 } 1167 1168 #undef __FUNCT__ 1169 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 1170 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1171 { 1172 PetscErrorCode ierr; 1173 Mat_IS *is = (Mat_IS*)A->data; 1174 1175 PetscFunctionBegin; 1176 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1177 PetscFunctionReturn(0); 1178 } 1179 1180 #undef __FUNCT__ 1181 #define __FUNCT__ "MatISZeroRowsColumnsLocal_Private" 1182 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns) 1183 { 1184 Mat_IS *is = (Mat_IS*)A->data; 1185 PetscErrorCode ierr; 1186 1187 PetscFunctionBegin; 1188 if (!n) { 1189 is->pure_neumann = PETSC_TRUE; 1190 } else { 1191 PetscInt i; 1192 is->pure_neumann = PETSC_FALSE; 1193 1194 if (columns) { 1195 ierr = MatZeroRowsColumns(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1196 } else { 1197 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1198 } 1199 if (diag != 0.) { 1200 const PetscScalar *array; 1201 ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr); 1202 for (i=0; i<n; i++) { 1203 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 1204 } 1205 ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr); 1206 } 1207 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1208 ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1209 } 1210 PetscFunctionReturn(0); 1211 } 1212 1213 #undef __FUNCT__ 1214 #define __FUNCT__ "MatZeroRowsColumns_Private_IS" 1215 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns) 1216 { 1217 Mat_IS *matis = (Mat_IS*)A->data; 1218 PetscInt nr,nl,len,i; 1219 PetscInt *lrows; 1220 PetscErrorCode ierr; 1221 1222 PetscFunctionBegin; 1223 #if defined(PETSC_USE_DEBUG) 1224 if (columns || diag != 0. || (x && b)) { 1225 PetscBool cong; 1226 ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr); 1227 if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS"); 1228 if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS"); 1229 if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent"); 1230 } 1231 #endif 1232 /* get locally owned rows */ 1233 ierr = PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);CHKERRQ(ierr); 1234 /* fix right hand side if needed */ 1235 if (x && b) { 1236 const PetscScalar *xx; 1237 PetscScalar *bb; 1238 1239 ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr); 1240 ierr = VecGetArray(b, &bb);CHKERRQ(ierr); 1241 for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]]; 1242 ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr); 1243 ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr); 1244 } 1245 /* get rows associated to the local matrices */ 1246 ierr = MatISSetUpSF(A);CHKERRQ(ierr); 1247 ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr); 1248 ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr); 1249 ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr); 1250 for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1; 1251 ierr = PetscFree(lrows);CHKERRQ(ierr); 1252 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1253 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1254 ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr); 1255 for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i; 1256 ierr = MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);CHKERRQ(ierr); 1257 ierr = PetscFree(lrows);CHKERRQ(ierr); 1258 PetscFunctionReturn(0); 1259 } 1260 1261 #undef __FUNCT__ 1262 #define __FUNCT__ "MatZeroRows_IS" 1263 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1264 { 1265 PetscErrorCode ierr; 1266 1267 PetscFunctionBegin; 1268 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);CHKERRQ(ierr); 1269 PetscFunctionReturn(0); 1270 } 1271 1272 #undef __FUNCT__ 1273 #define __FUNCT__ "MatZeroRowsColumns_IS" 1274 static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1275 { 1276 PetscErrorCode ierr; 1277 1278 PetscFunctionBegin; 1279 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);CHKERRQ(ierr); 1280 PetscFunctionReturn(0); 1281 } 1282 1283 #undef __FUNCT__ 1284 #define __FUNCT__ "MatAssemblyBegin_IS" 1285 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 1286 { 1287 Mat_IS *is = (Mat_IS*)A->data; 1288 PetscErrorCode ierr; 1289 1290 PetscFunctionBegin; 1291 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 1292 PetscFunctionReturn(0); 1293 } 1294 1295 #undef __FUNCT__ 1296 #define __FUNCT__ "MatAssemblyEnd_IS" 1297 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 1298 { 1299 Mat_IS *is = (Mat_IS*)A->data; 1300 PetscErrorCode ierr; 1301 1302 PetscFunctionBegin; 1303 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 1304 PetscFunctionReturn(0); 1305 } 1306 1307 #undef __FUNCT__ 1308 #define __FUNCT__ "MatISGetLocalMat_IS" 1309 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 1310 { 1311 Mat_IS *is = (Mat_IS*)mat->data; 1312 1313 PetscFunctionBegin; 1314 *local = is->A; 1315 PetscFunctionReturn(0); 1316 } 1317 1318 #undef __FUNCT__ 1319 #define __FUNCT__ "MatISGetLocalMat" 1320 /*@ 1321 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 1322 1323 Input Parameter: 1324 . mat - the matrix 1325 1326 Output Parameter: 1327 . local - the local matrix 1328 1329 Level: advanced 1330 1331 Notes: 1332 This can be called if you have precomputed the nonzero structure of the 1333 matrix and want to provide it to the inner matrix object to improve the performance 1334 of the MatSetValues() operation. 1335 1336 .seealso: MATIS 1337 @*/ 1338 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 1339 { 1340 PetscErrorCode ierr; 1341 1342 PetscFunctionBegin; 1343 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1344 PetscValidPointer(local,2); 1345 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 1346 PetscFunctionReturn(0); 1347 } 1348 1349 #undef __FUNCT__ 1350 #define __FUNCT__ "MatISSetLocalMat_IS" 1351 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 1352 { 1353 Mat_IS *is = (Mat_IS*)mat->data; 1354 PetscInt nrows,ncols,orows,ocols; 1355 PetscErrorCode ierr; 1356 1357 PetscFunctionBegin; 1358 if (is->A) { 1359 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 1360 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 1361 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols); 1362 } 1363 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 1364 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1365 is->A = local; 1366 PetscFunctionReturn(0); 1367 } 1368 1369 #undef __FUNCT__ 1370 #define __FUNCT__ "MatISSetLocalMat" 1371 /*@ 1372 MatISSetLocalMat - Replace the local matrix stored inside a MATIS object. 1373 1374 Input Parameter: 1375 . mat - the matrix 1376 . local - the local matrix 1377 1378 Output Parameter: 1379 1380 Level: advanced 1381 1382 Notes: 1383 This can be called if you have precomputed the local matrix and 1384 want to provide it to the matrix object MATIS. 1385 1386 .seealso: MATIS 1387 @*/ 1388 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 1389 { 1390 PetscErrorCode ierr; 1391 1392 PetscFunctionBegin; 1393 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1394 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 1395 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 1396 PetscFunctionReturn(0); 1397 } 1398 1399 #undef __FUNCT__ 1400 #define __FUNCT__ "MatZeroEntries_IS" 1401 static PetscErrorCode MatZeroEntries_IS(Mat A) 1402 { 1403 Mat_IS *a = (Mat_IS*)A->data; 1404 PetscErrorCode ierr; 1405 1406 PetscFunctionBegin; 1407 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 1408 PetscFunctionReturn(0); 1409 } 1410 1411 #undef __FUNCT__ 1412 #define __FUNCT__ "MatScale_IS" 1413 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 1414 { 1415 Mat_IS *is = (Mat_IS*)A->data; 1416 PetscErrorCode ierr; 1417 1418 PetscFunctionBegin; 1419 ierr = MatScale(is->A,a);CHKERRQ(ierr); 1420 PetscFunctionReturn(0); 1421 } 1422 1423 #undef __FUNCT__ 1424 #define __FUNCT__ "MatGetDiagonal_IS" 1425 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 1426 { 1427 Mat_IS *is = (Mat_IS*)A->data; 1428 PetscErrorCode ierr; 1429 1430 PetscFunctionBegin; 1431 /* get diagonal of the local matrix */ 1432 ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr); 1433 1434 /* scatter diagonal back into global vector */ 1435 ierr = VecSet(v,0);CHKERRQ(ierr); 1436 ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1437 ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1438 PetscFunctionReturn(0); 1439 } 1440 1441 #undef __FUNCT__ 1442 #define __FUNCT__ "MatSetOption_IS" 1443 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 1444 { 1445 Mat_IS *a = (Mat_IS*)A->data; 1446 PetscErrorCode ierr; 1447 1448 PetscFunctionBegin; 1449 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1450 PetscFunctionReturn(0); 1451 } 1452 1453 #undef __FUNCT__ 1454 #define __FUNCT__ "MatAXPY_IS" 1455 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str) 1456 { 1457 Mat_IS *y = (Mat_IS*)Y->data; 1458 Mat_IS *x; 1459 #if defined(PETSC_USE_DEBUG) 1460 PetscBool ismatis; 1461 #endif 1462 PetscErrorCode ierr; 1463 1464 PetscFunctionBegin; 1465 #if defined(PETSC_USE_DEBUG) 1466 ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr); 1467 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS"); 1468 #endif 1469 x = (Mat_IS*)X->data; 1470 ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr); 1471 PetscFunctionReturn(0); 1472 } 1473 1474 #undef __FUNCT__ 1475 #define __FUNCT__ "MatGetLocalSubMatrix_IS" 1476 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat) 1477 { 1478 Mat lA; 1479 Mat_IS *matis; 1480 ISLocalToGlobalMapping rl2g,cl2g; 1481 IS is; 1482 const PetscInt *rg,*rl; 1483 PetscInt nrg; 1484 PetscInt N,M,nrl,i,*idxs; 1485 PetscErrorCode ierr; 1486 1487 PetscFunctionBegin; 1488 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1489 ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr); 1490 ierr = ISGetIndices(row,&rl);CHKERRQ(ierr); 1491 ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr); 1492 #if defined(PETSC_USE_DEBUG) 1493 for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg); 1494 #endif 1495 ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr); 1496 /* map from [0,nrl) to row */ 1497 for (i=0;i<nrl;i++) idxs[i] = rl[i]; 1498 #if defined(PETSC_USE_DEBUG) 1499 for (i=nrl;i<nrg;i++) idxs[i] = nrg; 1500 #else 1501 for (i=nrl;i<nrg;i++) idxs[i] = -1; 1502 #endif 1503 ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr); 1504 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1505 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1506 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 1507 ierr = ISDestroy(&is);CHKERRQ(ierr); 1508 /* compute new l2g map for columns */ 1509 if (col != row || A->rmap->mapping != A->cmap->mapping) { 1510 const PetscInt *cg,*cl; 1511 PetscInt ncg; 1512 PetscInt ncl; 1513 1514 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1515 ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr); 1516 ierr = ISGetIndices(col,&cl);CHKERRQ(ierr); 1517 ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr); 1518 #if defined(PETSC_USE_DEBUG) 1519 for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg); 1520 #endif 1521 ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr); 1522 /* map from [0,ncl) to col */ 1523 for (i=0;i<ncl;i++) idxs[i] = cl[i]; 1524 #if defined(PETSC_USE_DEBUG) 1525 for (i=ncl;i<ncg;i++) idxs[i] = ncg; 1526 #else 1527 for (i=ncl;i<ncg;i++) idxs[i] = -1; 1528 #endif 1529 ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr); 1530 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1531 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1532 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 1533 ierr = ISDestroy(&is);CHKERRQ(ierr); 1534 } else { 1535 ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr); 1536 cl2g = rl2g; 1537 } 1538 /* create the MATIS submatrix */ 1539 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1540 ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr); 1541 ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr); 1542 ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr); 1543 matis = (Mat_IS*)((*submat)->data); 1544 matis->islocalref = PETSC_TRUE; 1545 ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr); 1546 ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr); 1547 ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr); 1548 ierr = MatSetUp(*submat);CHKERRQ(ierr); 1549 ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1550 ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1551 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 1552 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 1553 /* remove unsupported ops */ 1554 ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1555 (*submat)->ops->destroy = MatDestroy_IS; 1556 (*submat)->ops->setvalueslocal = MatSetValuesLocal_SubMat_IS; 1557 (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS; 1558 (*submat)->ops->assemblybegin = MatAssemblyBegin_IS; 1559 (*submat)->ops->assemblyend = MatAssemblyEnd_IS; 1560 PetscFunctionReturn(0); 1561 } 1562 1563 #undef __FUNCT__ 1564 #define __FUNCT__ "MatCreateIS" 1565 /*@ 1566 MatCreateIS - Creates a "process" unassembled matrix, assembled on each 1567 process but not across processes. 1568 1569 Input Parameters: 1570 + comm - MPI communicator that will share the matrix 1571 . bs - block size of the matrix 1572 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 1573 . rmap - local to global map for rows 1574 - cmap - local to global map for cols 1575 1576 Output Parameter: 1577 . A - the resulting matrix 1578 1579 Level: advanced 1580 1581 Notes: See MATIS for more details. 1582 m and n are NOT related to the size of the map; they are the size of the part of the vector owned 1583 by that process. The sizes of rmap and cmap define the size of the local matrices. 1584 If either rmap or cmap are NULL, then the matrix is assumed to be square. 1585 1586 .seealso: MATIS, MatSetLocalToGlobalMapping() 1587 @*/ 1588 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A) 1589 { 1590 PetscErrorCode ierr; 1591 1592 PetscFunctionBegin; 1593 if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping"); 1594 ierr = MatCreate(comm,A);CHKERRQ(ierr); 1595 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 1596 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1597 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 1598 if (rmap && cmap) { 1599 ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr); 1600 } else if (!rmap) { 1601 ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr); 1602 } else { 1603 ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr); 1604 } 1605 PetscFunctionReturn(0); 1606 } 1607 1608 /*MC 1609 MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP). 1610 This stores the matrices in globally unassembled form. Each processor 1611 assembles only its local Neumann problem and the parallel matrix vector 1612 product is handled "implicitly". 1613 1614 Operations Provided: 1615 + MatMult() 1616 . MatMultAdd() 1617 . MatMultTranspose() 1618 . MatMultTransposeAdd() 1619 . MatZeroEntries() 1620 . MatSetOption() 1621 . MatZeroRows() 1622 . MatSetValues() 1623 . MatSetValuesBlocked() 1624 . MatSetValuesLocal() 1625 . MatSetValuesBlockedLocal() 1626 . MatScale() 1627 . MatGetDiagonal() 1628 . MatMissingDiagonal() 1629 . MatDuplicate() 1630 . MatCopy() 1631 . MatAXPY() 1632 . MatGetSubMatrix() 1633 . MatGetLocalSubMatrix() 1634 - MatSetLocalToGlobalMapping() 1635 1636 Options Database Keys: 1637 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 1638 1639 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 1640 1641 You must call MatSetLocalToGlobalMapping() before using this matrix type. 1642 1643 You can do matrix preallocation on the local matrix after you obtain it with 1644 MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation() 1645 1646 Level: advanced 1647 1648 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP 1649 1650 M*/ 1651 1652 #undef __FUNCT__ 1653 #define __FUNCT__ "MatCreate_IS" 1654 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 1655 { 1656 PetscErrorCode ierr; 1657 Mat_IS *b; 1658 1659 PetscFunctionBegin; 1660 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 1661 A->data = (void*)b; 1662 1663 /* matrix ops */ 1664 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1665 A->ops->mult = MatMult_IS; 1666 A->ops->multadd = MatMultAdd_IS; 1667 A->ops->multtranspose = MatMultTranspose_IS; 1668 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1669 A->ops->destroy = MatDestroy_IS; 1670 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1671 A->ops->setvalues = MatSetValues_IS; 1672 A->ops->setvaluesblocked = MatSetValuesBlocked_IS; 1673 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1674 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1675 A->ops->zerorows = MatZeroRows_IS; 1676 A->ops->zerorowscolumns = MatZeroRowsColumns_IS; 1677 A->ops->assemblybegin = MatAssemblyBegin_IS; 1678 A->ops->assemblyend = MatAssemblyEnd_IS; 1679 A->ops->view = MatView_IS; 1680 A->ops->zeroentries = MatZeroEntries_IS; 1681 A->ops->scale = MatScale_IS; 1682 A->ops->getdiagonal = MatGetDiagonal_IS; 1683 A->ops->setoption = MatSetOption_IS; 1684 A->ops->ishermitian = MatIsHermitian_IS; 1685 A->ops->issymmetric = MatIsSymmetric_IS; 1686 A->ops->duplicate = MatDuplicate_IS; 1687 A->ops->missingdiagonal = MatMissingDiagonal_IS; 1688 A->ops->copy = MatCopy_IS; 1689 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS; 1690 A->ops->getsubmatrix = MatGetSubMatrix_IS; 1691 A->ops->axpy = MatAXPY_IS; 1692 A->ops->diagonalset = MatDiagonalSet_IS; 1693 A->ops->shift = MatShift_IS; 1694 1695 /* special MATIS functions */ 1696 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1697 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1698 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1699 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1700 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",MatISSetUpSF_IS);CHKERRQ(ierr); 1701 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1702 PetscFunctionReturn(0); 1703 } 1704