1 /* 2 Creates a matrix class for using the Neumann-Neumann type preconditioners. 3 This stores the matrices in globally unassembled form. Each processor 4 assembles only its local Neumann problem and the parallel matrix vector 5 product is handled "implicitly". 6 7 Currently this allows for only one subdomain per processor. 8 */ 9 10 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 11 12 #define MATIS_MAX_ENTRIES_INSERTION 2048 13 14 static PetscErrorCode MatISComputeSF_Private(Mat); 15 16 #undef __FUNCT__ 17 #define __FUNCT__ "MatTranspose_IS" 18 PetscErrorCode MatTranspose_IS(Mat A,MatReuse reuse,Mat *B) 19 { 20 Mat C,lC,lA; 21 ISLocalToGlobalMapping rl2g,cl2g; 22 PetscBool notset = PETSC_FALSE,cong = PETSC_TRUE; 23 PetscErrorCode ierr; 24 25 PetscFunctionBegin; 26 if (reuse == MAT_REUSE_MATRIX) { 27 PetscBool rcong,ccong; 28 29 ierr = PetscLayoutCompare((*B)->cmap,A->rmap,&rcong);CHKERRQ(ierr); 30 ierr = PetscLayoutCompare((*B)->rmap,A->cmap,&ccong);CHKERRQ(ierr); 31 cong = (PetscBool)(rcong || ccong); 32 } 33 if (reuse == MAT_INITIAL_MATRIX || *B == A || !cong) { 34 ierr = MatCreate(PetscObjectComm((PetscObject)A),&C);CHKERRQ(ierr); 35 } else { 36 ierr = PetscObjectTypeCompare((PetscObject)*B,MATIS,¬set);CHKERRQ(ierr); 37 C = *B; 38 } 39 40 if (!notset) { 41 ierr = MatSetSizes(C,A->cmap->n,A->rmap->n,A->cmap->N,A->rmap->N);CHKERRQ(ierr); 42 ierr = MatSetBlockSizes(C,PetscAbs(A->cmap->bs),PetscAbs(A->rmap->bs));CHKERRQ(ierr); 43 ierr = MatSetType(C,MATIS);CHKERRQ(ierr); 44 ierr = MatGetLocalToGlobalMapping(A,&rl2g,&cl2g);CHKERRQ(ierr); 45 ierr = MatSetLocalToGlobalMapping(C,cl2g,rl2g);CHKERRQ(ierr); 46 } 47 48 /* perform local transposition */ 49 ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr); 50 ierr = MatTranspose(lA,MAT_INITIAL_MATRIX,&lC);CHKERRQ(ierr); 51 ierr = MatISSetLocalMat(C,lC);CHKERRQ(ierr); 52 ierr = MatDestroy(&lC);CHKERRQ(ierr); 53 54 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 55 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 56 57 if (reuse == MAT_INITIAL_MATRIX || *B != A) { 58 if (!cong) { 59 ierr = MatDestroy(B);CHKERRQ(ierr); 60 } 61 *B = C; 62 } else { 63 ierr = MatHeaderMerge(A,&C);CHKERRQ(ierr); 64 } 65 PetscFunctionReturn(0); 66 } 67 68 #undef __FUNCT__ 69 #define __FUNCT__ "MatDiagonalSet_IS" 70 PetscErrorCode MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode) 71 { 72 Mat_IS *is = (Mat_IS*)A->data; 73 PetscErrorCode ierr; 74 75 PetscFunctionBegin; 76 if (!D) { /* this code branch is used by MatShift_IS */ 77 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 78 } else { 79 ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 80 ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 81 } 82 ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr); 83 ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr); 84 PetscFunctionReturn(0); 85 } 86 87 #undef __FUNCT__ 88 #define __FUNCT__ "MatShift_IS" 89 PetscErrorCode MatShift_IS(Mat A,PetscScalar a) 90 { 91 PetscErrorCode ierr; 92 93 PetscFunctionBegin; 94 ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr); 95 PetscFunctionReturn(0); 96 } 97 98 #undef __FUNCT__ 99 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS" 100 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 101 { 102 PetscErrorCode ierr; 103 Mat_IS *is = (Mat_IS*)A->data; 104 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 105 106 PetscFunctionBegin; 107 #if defined(PETSC_USE_DEBUG) 108 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 109 #endif 110 ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 111 ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 112 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 113 PetscFunctionReturn(0); 114 } 115 116 #undef __FUNCT__ 117 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS" 118 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 119 { 120 PetscErrorCode ierr; 121 Mat_IS *is = (Mat_IS*)A->data; 122 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 123 124 PetscFunctionBegin; 125 #if defined(PETSC_USE_DEBUG) 126 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 127 #endif 128 ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 129 ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 130 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 131 PetscFunctionReturn(0); 132 } 133 134 #undef __FUNCT__ 135 #define __FUNCT__ "PetscLayoutMapLocal_Private" 136 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs) 137 { 138 PetscInt *owners = map->range; 139 PetscInt n = map->n; 140 PetscSF sf; 141 PetscInt *lidxs,*work = NULL; 142 PetscSFNode *ridxs; 143 PetscMPIInt rank; 144 PetscInt r, p = 0, len = 0; 145 PetscErrorCode ierr; 146 147 PetscFunctionBegin; 148 /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */ 149 ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr); 150 ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr); 151 for (r = 0; r < n; ++r) lidxs[r] = -1; 152 ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr); 153 for (r = 0; r < N; ++r) { 154 const PetscInt idx = idxs[r]; 155 if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N); 156 if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */ 157 ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr); 158 } 159 ridxs[r].rank = p; 160 ridxs[r].index = idxs[r] - owners[p]; 161 } 162 ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr); 163 ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr); 164 ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 165 ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 166 if (ogidxs) { /* communicate global idxs */ 167 PetscInt cum = 0,start,*work2; 168 169 ierr = PetscMalloc1(n,&work);CHKERRQ(ierr); 170 ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr); 171 for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++; 172 ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr); 173 start -= cum; 174 cum = 0; 175 for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++; 176 ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 177 ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 178 ierr = PetscFree(work2);CHKERRQ(ierr); 179 } 180 ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 181 /* Compress and put in indices */ 182 for (r = 0; r < n; ++r) 183 if (lidxs[r] >= 0) { 184 if (work) work[len] = work[r]; 185 lidxs[len++] = r; 186 } 187 if (on) *on = len; 188 if (oidxs) *oidxs = lidxs; 189 if (ogidxs) *ogidxs = work; 190 PetscFunctionReturn(0); 191 } 192 193 #undef __FUNCT__ 194 #define __FUNCT__ "MatGetSubMatrix_IS" 195 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat) 196 { 197 Mat locmat,newlocmat; 198 Mat_IS *newmatis; 199 #if defined(PETSC_USE_DEBUG) 200 Vec rtest,ltest; 201 const PetscScalar *array; 202 #endif 203 const PetscInt *idxs; 204 PetscInt i,m,n; 205 PetscErrorCode ierr; 206 207 PetscFunctionBegin; 208 if (scall == MAT_REUSE_MATRIX) { 209 PetscBool ismatis; 210 211 ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr); 212 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type"); 213 newmatis = (Mat_IS*)(*newmat)->data; 214 if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS"); 215 if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS"); 216 } 217 /* irow and icol may not have duplicate entries */ 218 #if defined(PETSC_USE_DEBUG) 219 ierr = MatCreateVecs(mat,<est,&rtest);CHKERRQ(ierr); 220 ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr); 221 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 222 for (i=0;i<n;i++) { 223 ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 224 } 225 ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr); 226 ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr); 227 ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr); 228 ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr); 229 ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr); 230 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 231 ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr); 232 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 233 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 234 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 235 for (i=0;i<n;i++) { 236 ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 237 } 238 ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr); 239 ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr); 240 ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr); 241 ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr); 242 ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr); 243 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 244 ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr); 245 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 246 ierr = VecDestroy(&rtest);CHKERRQ(ierr); 247 ierr = VecDestroy(<est);CHKERRQ(ierr); 248 #endif 249 if (scall == MAT_INITIAL_MATRIX) { 250 Mat_IS *matis = (Mat_IS*)mat->data; 251 ISLocalToGlobalMapping rl2g; 252 IS is; 253 PetscInt *lidxs,*lgidxs,*newgidxs; 254 PetscInt ll,newloc; 255 MPI_Comm comm; 256 257 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 258 ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr); 259 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 260 ierr = MatCreate(comm,newmat);CHKERRQ(ierr); 261 ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr); 262 ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 263 /* communicate irow to their owners in the layout */ 264 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 265 ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 266 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 267 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 268 ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr); 269 } 270 ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr); 271 for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1; 272 ierr = PetscFree(lidxs);CHKERRQ(ierr); 273 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 274 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 275 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 276 for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++; 277 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 278 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 279 for (i=0,newloc=0;i<matis->sf_nleaves;i++) 280 if (matis->sf_leafdata[i]) { 281 lidxs[newloc] = i; 282 newgidxs[newloc++] = matis->sf_leafdata[i]-1; 283 } 284 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 285 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 286 ierr = ISDestroy(&is);CHKERRQ(ierr); 287 /* local is to extract local submatrix */ 288 newmatis = (Mat_IS*)(*newmat)->data; 289 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr); 290 if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */ 291 PetscBool cong; 292 ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr); 293 if (cong) mat->congruentlayouts = 1; 294 else mat->congruentlayouts = 0; 295 } 296 if (mat->congruentlayouts && irow == icol) { 297 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr); 298 ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr); 299 newmatis->getsub_cis = newmatis->getsub_ris; 300 } else { 301 ISLocalToGlobalMapping cl2g; 302 303 /* communicate icol to their owners in the layout */ 304 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 305 ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 306 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 307 ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr); 308 for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1; 309 ierr = PetscFree(lidxs);CHKERRQ(ierr); 310 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 311 ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 312 ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 313 for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++; 314 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 315 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 316 for (i=0,newloc=0;i<matis->csf_nleaves;i++) 317 if (matis->csf_leafdata[i]) { 318 lidxs[newloc] = i; 319 newgidxs[newloc++] = matis->csf_leafdata[i]-1; 320 } 321 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 322 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 323 ierr = ISDestroy(&is);CHKERRQ(ierr); 324 /* local is to extract local submatrix */ 325 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr); 326 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr); 327 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 328 } 329 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 330 } else { 331 ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr); 332 } 333 ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr); 334 newmatis = (Mat_IS*)(*newmat)->data; 335 ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr); 336 if (scall == MAT_INITIAL_MATRIX) { 337 ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr); 338 ierr = MatDestroy(&newlocmat);CHKERRQ(ierr); 339 } 340 ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 341 ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 342 PetscFunctionReturn(0); 343 } 344 345 #undef __FUNCT__ 346 #define __FUNCT__ "MatCopy_IS" 347 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str) 348 { 349 Mat_IS *a = (Mat_IS*)A->data,*b; 350 PetscBool ismatis; 351 PetscErrorCode ierr; 352 353 PetscFunctionBegin; 354 ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr); 355 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented"); 356 b = (Mat_IS*)B->data; 357 ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 358 PetscFunctionReturn(0); 359 } 360 361 #undef __FUNCT__ 362 #define __FUNCT__ "MatMissingDiagonal_IS" 363 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool *missing,PetscInt *d) 364 { 365 Vec v; 366 const PetscScalar *array; 367 PetscInt i,n; 368 PetscErrorCode ierr; 369 370 PetscFunctionBegin; 371 *missing = PETSC_FALSE; 372 ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr); 373 ierr = MatGetDiagonal(A,v);CHKERRQ(ierr); 374 ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr); 375 ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr); 376 for (i=0;i<n;i++) if (array[i] == 0.) break; 377 ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr); 378 ierr = VecDestroy(&v);CHKERRQ(ierr); 379 if (i != n) *missing = PETSC_TRUE; 380 if (d) { 381 *d = -1; 382 if (*missing) { 383 PetscInt rstart; 384 ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr); 385 *d = i+rstart; 386 } 387 } 388 PetscFunctionReturn(0); 389 } 390 391 #undef __FUNCT__ 392 #define __FUNCT__ "MatISComputeSF_Private" 393 static PetscErrorCode MatISComputeSF_Private(Mat B) 394 { 395 Mat_IS *matis = (Mat_IS*)(B->data); 396 const PetscInt *gidxs; 397 PetscErrorCode ierr; 398 399 PetscFunctionBegin; 400 ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr); 401 ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr); 402 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 403 ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 404 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 405 ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 406 ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 407 if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */ 408 ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr); 409 ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr); 410 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr); 411 ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 412 ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 413 ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 414 ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr); 415 } else { 416 matis->csf = matis->sf; 417 matis->csf_nleaves = matis->sf_nleaves; 418 matis->csf_nroots = matis->sf_nroots; 419 matis->csf_leafdata = matis->sf_leafdata; 420 matis->csf_rootdata = matis->sf_rootdata; 421 } 422 PetscFunctionReturn(0); 423 } 424 425 #undef __FUNCT__ 426 #define __FUNCT__ "MatISSetPreallocation" 427 /*@ 428 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 429 430 Collective on MPI_Comm 431 432 Input Parameters: 433 + B - the matrix 434 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 435 (same value is used for all local rows) 436 . d_nnz - array containing the number of nonzeros in the various rows of the 437 DIAGONAL portion of the local submatrix (possibly different for each row) 438 or NULL, if d_nz is used to specify the nonzero structure. 439 The size of this array is equal to the number of local rows, i.e 'm'. 440 For matrices that will be factored, you must leave room for (and set) 441 the diagonal entry even if it is zero. 442 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 443 submatrix (same value is used for all local rows). 444 - o_nnz - array containing the number of nonzeros in the various rows of the 445 OFF-DIAGONAL portion of the local submatrix (possibly different for 446 each row) or NULL, if o_nz is used to specify the nonzero 447 structure. The size of this array is equal to the number 448 of local rows, i.e 'm'. 449 450 If the *_nnz parameter is given then the *_nz parameter is ignored 451 452 Level: intermediate 453 454 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 455 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 456 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 457 458 .keywords: matrix 459 460 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS 461 @*/ 462 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 463 { 464 PetscErrorCode ierr; 465 466 PetscFunctionBegin; 467 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 468 PetscValidType(B,1); 469 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 470 PetscFunctionReturn(0); 471 } 472 473 #undef __FUNCT__ 474 #define __FUNCT__ "MatISSetPreallocation_IS" 475 static PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 476 { 477 Mat_IS *matis = (Mat_IS*)(B->data); 478 PetscInt bs,i,nlocalcols; 479 PetscErrorCode ierr; 480 481 PetscFunctionBegin; 482 if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 483 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 484 ierr = MatISComputeSF_Private(B);CHKERRQ(ierr); 485 } 486 if (!d_nnz) { 487 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz; 488 } else { 489 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 490 } 491 if (!o_nnz) { 492 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz; 493 } else { 494 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 495 } 496 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 497 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 498 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 499 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 500 for (i=0;i<matis->sf_nleaves;i++) { 501 matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 502 } 503 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 504 for (i=0;i<matis->sf_nleaves/bs;i++) { 505 matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 506 } 507 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 508 for (i=0;i<matis->sf_nleaves/bs;i++) { 509 matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 510 } 511 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 512 PetscFunctionReturn(0); 513 } 514 515 #undef __FUNCT__ 516 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private" 517 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce) 518 { 519 Mat_IS *matis = (Mat_IS*)(A->data); 520 PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership; 521 const PetscInt *global_indices_r,*global_indices_c; 522 PetscInt i,j,bs,rows,cols; 523 PetscInt lrows,lcols; 524 PetscInt local_rows,local_cols; 525 PetscMPIInt nsubdomains; 526 PetscBool isdense,issbaij; 527 PetscErrorCode ierr; 528 529 PetscFunctionBegin; 530 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr); 531 ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr); 532 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 533 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 534 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 535 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 536 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 537 if (A->rmap->mapping != A->cmap->mapping) { 538 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 539 } else { 540 global_indices_c = global_indices_r; 541 } 542 543 if (issbaij) { 544 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 545 } 546 /* 547 An SF reduce is needed to sum up properly on shared rows. 548 Note that generally preallocation is not exact, since it overestimates nonzeros 549 */ 550 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 551 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 552 } 553 ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr); 554 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr); 555 /* All processes need to compute entire row ownership */ 556 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 557 ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 558 for (i=0;i<nsubdomains;i++) { 559 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 560 row_ownership[j] = i; 561 } 562 } 563 ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 564 565 /* 566 my_dnz and my_onz contains exact contribution to preallocation from each local mat 567 then, they will be summed up properly. This way, preallocation is always sufficient 568 */ 569 ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr); 570 /* preallocation as a MATAIJ */ 571 if (isdense) { /* special case for dense local matrices */ 572 for (i=0;i<local_rows;i++) { 573 PetscInt index_row = global_indices_r[i]; 574 for (j=i;j<local_rows;j++) { 575 PetscInt owner = row_ownership[index_row]; 576 PetscInt index_col = global_indices_c[j]; 577 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 578 my_dnz[i] += 1; 579 } else { /* offdiag block */ 580 my_onz[i] += 1; 581 } 582 /* same as before, interchanging rows and cols */ 583 if (i != j) { 584 owner = row_ownership[index_col]; 585 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 586 my_dnz[j] += 1; 587 } else { 588 my_onz[j] += 1; 589 } 590 } 591 } 592 } 593 } else if (matis->A->ops->getrowij) { 594 const PetscInt *ii,*jj,*jptr; 595 PetscBool done; 596 ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 597 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 598 jptr = jj; 599 for (i=0;i<local_rows;i++) { 600 PetscInt index_row = global_indices_r[i]; 601 for (j=0;j<ii[i+1]-ii[i];j++,jptr++) { 602 PetscInt owner = row_ownership[index_row]; 603 PetscInt index_col = global_indices_c[*jptr]; 604 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 605 my_dnz[i] += 1; 606 } else { /* offdiag block */ 607 my_onz[i] += 1; 608 } 609 /* same as before, interchanging rows and cols */ 610 if (issbaij && index_col != index_row) { 611 owner = row_ownership[index_col]; 612 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 613 my_dnz[*jptr] += 1; 614 } else { 615 my_onz[*jptr] += 1; 616 } 617 } 618 } 619 } 620 ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 621 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 622 } else { /* loop over rows and use MatGetRow */ 623 for (i=0;i<local_rows;i++) { 624 const PetscInt *cols; 625 PetscInt ncols,index_row = global_indices_r[i]; 626 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 627 for (j=0;j<ncols;j++) { 628 PetscInt owner = row_ownership[index_row]; 629 PetscInt index_col = global_indices_c[cols[j]]; 630 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 631 my_dnz[i] += 1; 632 } else { /* offdiag block */ 633 my_onz[i] += 1; 634 } 635 /* same as before, interchanging rows and cols */ 636 if (issbaij && index_col != index_row) { 637 owner = row_ownership[index_col]; 638 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 639 my_dnz[cols[j]] += 1; 640 } else { 641 my_onz[cols[j]] += 1; 642 } 643 } 644 } 645 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 646 } 647 } 648 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 649 if (global_indices_c != global_indices_r) { 650 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 651 } 652 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 653 654 /* Reduce my_dnz and my_onz */ 655 if (maxreduce) { 656 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 657 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 658 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 659 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 660 } else { 661 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 662 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 663 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 664 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 665 } 666 ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr); 667 668 /* Resize preallocation if overestimated */ 669 for (i=0;i<lrows;i++) { 670 dnz[i] = PetscMin(dnz[i],lcols); 671 onz[i] = PetscMin(onz[i],cols-lcols); 672 } 673 /* set preallocation */ 674 ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr); 675 for (i=0;i<lrows/bs;i++) { 676 dnz[i] = dnz[i*bs]/bs; 677 onz[i] = onz[i*bs]/bs; 678 } 679 ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 680 ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 681 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 682 if (issbaij) { 683 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 684 } 685 PetscFunctionReturn(0); 686 } 687 688 #undef __FUNCT__ 689 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 690 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 691 { 692 Mat_IS *matis = (Mat_IS*)(mat->data); 693 Mat local_mat; 694 /* info on mat */ 695 PetscInt bs,rows,cols,lrows,lcols; 696 PetscInt local_rows,local_cols; 697 PetscBool isdense,issbaij,isseqaij; 698 PetscMPIInt nsubdomains; 699 /* values insertion */ 700 PetscScalar *array; 701 /* work */ 702 PetscErrorCode ierr; 703 704 PetscFunctionBegin; 705 /* get info from mat */ 706 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 707 if (nsubdomains == 1) { 708 if (reuse == MAT_INITIAL_MATRIX) { 709 ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr); 710 } else { 711 ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 712 } 713 PetscFunctionReturn(0); 714 } 715 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 716 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 717 ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr); 718 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 719 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 720 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 721 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 722 723 if (reuse == MAT_INITIAL_MATRIX) { 724 MatType new_mat_type; 725 PetscBool issbaij_red; 726 727 /* determining new matrix type */ 728 ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 729 if (issbaij_red) { 730 new_mat_type = MATSBAIJ; 731 } else { 732 if (bs>1) { 733 new_mat_type = MATBAIJ; 734 } else { 735 new_mat_type = MATAIJ; 736 } 737 } 738 739 ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr); 740 ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr); 741 ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr); 742 ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr); 743 ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr); 744 } else { 745 PetscInt mbs,mrows,mcols,mlrows,mlcols; 746 /* some checks */ 747 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 748 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 749 ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr); 750 if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 751 if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 752 if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows); 753 if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols); 754 if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 755 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 756 } 757 758 if (issbaij) { 759 ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr); 760 } else { 761 ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr); 762 local_mat = matis->A; 763 } 764 765 /* Set values */ 766 ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr); 767 if (isdense) { /* special case for dense local matrices */ 768 PetscInt i,*dummy_rows,*dummy_cols; 769 770 if (local_rows != local_cols) { 771 ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr); 772 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 773 for (i=0;i<local_cols;i++) dummy_cols[i] = i; 774 } else { 775 ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr); 776 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 777 dummy_cols = dummy_rows; 778 } 779 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 780 ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr); 781 ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr); 782 ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr); 783 if (dummy_rows != dummy_cols) { 784 ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr); 785 } else { 786 ierr = PetscFree(dummy_rows);CHKERRQ(ierr); 787 } 788 } else if (isseqaij) { 789 PetscInt i,nvtxs,*xadj,*adjncy; 790 PetscBool done; 791 792 ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 793 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 794 ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr); 795 for (i=0;i<nvtxs;i++) { 796 ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr); 797 } 798 ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 799 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 800 ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr); 801 } else { /* very basic values insertion for all other matrix types */ 802 PetscInt i; 803 804 for (i=0;i<local_rows;i++) { 805 PetscInt j; 806 const PetscInt *local_indices_cols; 807 808 ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 809 ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr); 810 ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 811 } 812 } 813 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 814 ierr = MatDestroy(&local_mat);CHKERRQ(ierr); 815 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 816 if (isdense) { 817 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 818 } 819 PetscFunctionReturn(0); 820 } 821 822 #undef __FUNCT__ 823 #define __FUNCT__ "MatISGetMPIXAIJ" 824 /*@ 825 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 826 827 Input Parameter: 828 . mat - the matrix (should be of type MATIS) 829 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 830 831 Output Parameter: 832 . newmat - the matrix in AIJ format 833 834 Level: developer 835 836 Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested. 837 838 .seealso: MATIS 839 @*/ 840 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 841 { 842 PetscErrorCode ierr; 843 844 PetscFunctionBegin; 845 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 846 PetscValidLogicalCollectiveEnum(mat,reuse,2); 847 PetscValidPointer(newmat,3); 848 if (reuse != MAT_INITIAL_MATRIX) { 849 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 850 PetscCheckSameComm(mat,1,*newmat,3); 851 if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix"); 852 } 853 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 854 PetscFunctionReturn(0); 855 } 856 857 #undef __FUNCT__ 858 #define __FUNCT__ "MatDuplicate_IS" 859 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 860 { 861 PetscErrorCode ierr; 862 Mat_IS *matis = (Mat_IS*)(mat->data); 863 PetscInt bs,m,n,M,N; 864 Mat B,localmat; 865 866 PetscFunctionBegin; 867 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 868 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 869 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 870 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr); 871 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 872 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 873 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 874 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 875 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 876 *newmat = B; 877 PetscFunctionReturn(0); 878 } 879 880 #undef __FUNCT__ 881 #define __FUNCT__ "MatIsHermitian_IS" 882 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 883 { 884 PetscErrorCode ierr; 885 Mat_IS *matis = (Mat_IS*)A->data; 886 PetscBool local_sym; 887 888 PetscFunctionBegin; 889 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 890 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 891 PetscFunctionReturn(0); 892 } 893 894 #undef __FUNCT__ 895 #define __FUNCT__ "MatIsSymmetric_IS" 896 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 897 { 898 PetscErrorCode ierr; 899 Mat_IS *matis = (Mat_IS*)A->data; 900 PetscBool local_sym; 901 902 PetscFunctionBegin; 903 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 904 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 905 PetscFunctionReturn(0); 906 } 907 908 #undef __FUNCT__ 909 #define __FUNCT__ "MatDestroy_IS" 910 static PetscErrorCode MatDestroy_IS(Mat A) 911 { 912 PetscErrorCode ierr; 913 Mat_IS *b = (Mat_IS*)A->data; 914 915 PetscFunctionBegin; 916 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 917 ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr); 918 ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr); 919 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 920 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 921 ierr = VecDestroy(&b->counter);CHKERRQ(ierr); 922 ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr); 923 ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr); 924 if (b->sf != b->csf) { 925 ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr); 926 ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr); 927 } 928 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 929 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 930 ierr = PetscFree(A->data);CHKERRQ(ierr); 931 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 932 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 933 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 934 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 935 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 936 PetscFunctionReturn(0); 937 } 938 939 #undef __FUNCT__ 940 #define __FUNCT__ "MatMult_IS" 941 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 942 { 943 PetscErrorCode ierr; 944 Mat_IS *is = (Mat_IS*)A->data; 945 PetscScalar zero = 0.0; 946 947 PetscFunctionBegin; 948 /* scatter the global vector x into the local work vector */ 949 ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 950 ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 951 952 /* multiply the local matrix */ 953 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 954 955 /* scatter product back into global memory */ 956 ierr = VecSet(y,zero);CHKERRQ(ierr); 957 ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 958 ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 959 PetscFunctionReturn(0); 960 } 961 962 #undef __FUNCT__ 963 #define __FUNCT__ "MatMultAdd_IS" 964 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 965 { 966 Vec temp_vec; 967 PetscErrorCode ierr; 968 969 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 970 if (v3 != v2) { 971 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 972 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 973 } else { 974 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 975 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 976 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 977 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 978 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 979 } 980 PetscFunctionReturn(0); 981 } 982 983 #undef __FUNCT__ 984 #define __FUNCT__ "MatMultTranspose_IS" 985 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x) 986 { 987 Mat_IS *is = (Mat_IS*)A->data; 988 PetscErrorCode ierr; 989 990 PetscFunctionBegin; 991 /* scatter the global vector x into the local work vector */ 992 ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 993 ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 994 995 /* multiply the local matrix */ 996 ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr); 997 998 /* scatter product back into global vector */ 999 ierr = VecSet(x,0);CHKERRQ(ierr); 1000 ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1001 ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1002 PetscFunctionReturn(0); 1003 } 1004 1005 #undef __FUNCT__ 1006 #define __FUNCT__ "MatMultTransposeAdd_IS" 1007 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 1008 { 1009 Vec temp_vec; 1010 PetscErrorCode ierr; 1011 1012 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 1013 if (v3 != v2) { 1014 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 1015 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 1016 } else { 1017 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 1018 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 1019 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 1020 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 1021 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 1022 } 1023 PetscFunctionReturn(0); 1024 } 1025 1026 #undef __FUNCT__ 1027 #define __FUNCT__ "MatView_IS" 1028 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 1029 { 1030 Mat_IS *a = (Mat_IS*)A->data; 1031 PetscErrorCode ierr; 1032 PetscViewer sviewer; 1033 1034 PetscFunctionBegin; 1035 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1036 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 1037 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1038 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1039 PetscFunctionReturn(0); 1040 } 1041 1042 #undef __FUNCT__ 1043 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 1044 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 1045 { 1046 PetscErrorCode ierr; 1047 PetscInt nr,rbs,nc,cbs; 1048 Mat_IS *is = (Mat_IS*)A->data; 1049 IS from,to; 1050 Vec cglobal,rglobal; 1051 1052 PetscFunctionBegin; 1053 PetscCheckSameComm(A,1,rmapping,2); 1054 PetscCheckSameComm(A,1,cmapping,3); 1055 /* Destroy any previous data */ 1056 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 1057 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 1058 ierr = VecDestroy(&is->counter);CHKERRQ(ierr); 1059 ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr); 1060 ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr); 1061 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1062 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 1063 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 1064 1065 /* Setup Layout and set local to global maps */ 1066 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1067 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1068 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 1069 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 1070 1071 /* Create the local matrix A */ 1072 ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr); 1073 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr); 1074 ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr); 1075 ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr); 1076 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 1077 ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr); 1078 ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr); 1079 ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr); 1080 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 1081 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 1082 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 1083 1084 if (!is->islocalref) { /* setup scatters and local vectors for MatMult */ 1085 /* Create the local work vectors */ 1086 ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr); 1087 1088 /* setup the global to local scatters */ 1089 ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr); 1090 ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr); 1091 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 1092 ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr); 1093 if (rmapping != cmapping) { 1094 ierr = ISDestroy(&to);CHKERRQ(ierr); 1095 ierr = ISDestroy(&from);CHKERRQ(ierr); 1096 ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr); 1097 ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr); 1098 ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr); 1099 } else { 1100 ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr); 1101 is->cctx = is->rctx; 1102 } 1103 1104 /* interface counter vector (local) */ 1105 ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr); 1106 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 1107 ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1108 ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1109 ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1110 ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1111 1112 /* free workspace */ 1113 ierr = VecDestroy(&rglobal);CHKERRQ(ierr); 1114 ierr = VecDestroy(&cglobal);CHKERRQ(ierr); 1115 ierr = ISDestroy(&to);CHKERRQ(ierr); 1116 ierr = ISDestroy(&from);CHKERRQ(ierr); 1117 } 1118 ierr = MatSetUp(A);CHKERRQ(ierr); 1119 PetscFunctionReturn(0); 1120 } 1121 1122 #undef __FUNCT__ 1123 #define __FUNCT__ "MatSetValues_IS" 1124 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1125 { 1126 Mat_IS *is = (Mat_IS*)mat->data; 1127 PetscErrorCode ierr; 1128 #if defined(PETSC_USE_DEBUG) 1129 PetscInt i,zm,zn; 1130 #endif 1131 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1132 1133 PetscFunctionBegin; 1134 #if defined(PETSC_USE_DEBUG) 1135 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1136 /* count negative indices */ 1137 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1138 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1139 #endif 1140 ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1141 ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1142 #if defined(PETSC_USE_DEBUG) 1143 /* count negative indices (should be the same as before) */ 1144 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1145 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1146 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1147 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1148 #endif 1149 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1150 PetscFunctionReturn(0); 1151 } 1152 1153 #undef __FUNCT__ 1154 #define __FUNCT__ "MatSetValuesBlocked_IS" 1155 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1156 { 1157 Mat_IS *is = (Mat_IS*)mat->data; 1158 PetscErrorCode ierr; 1159 #if defined(PETSC_USE_DEBUG) 1160 PetscInt i,zm,zn; 1161 #endif 1162 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1163 1164 PetscFunctionBegin; 1165 #if defined(PETSC_USE_DEBUG) 1166 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1167 /* count negative indices */ 1168 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1169 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1170 #endif 1171 ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1172 ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1173 #if defined(PETSC_USE_DEBUG) 1174 /* count negative indices (should be the same as before) */ 1175 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1176 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1177 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1178 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1179 #endif 1180 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1181 PetscFunctionReturn(0); 1182 } 1183 1184 #undef __FUNCT__ 1185 #define __FUNCT__ "MatSetValuesLocal_IS" 1186 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1187 { 1188 PetscErrorCode ierr; 1189 Mat_IS *is = (Mat_IS*)A->data; 1190 1191 PetscFunctionBegin; 1192 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1193 PetscFunctionReturn(0); 1194 } 1195 1196 #undef __FUNCT__ 1197 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 1198 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1199 { 1200 PetscErrorCode ierr; 1201 Mat_IS *is = (Mat_IS*)A->data; 1202 1203 PetscFunctionBegin; 1204 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1205 PetscFunctionReturn(0); 1206 } 1207 1208 #undef __FUNCT__ 1209 #define __FUNCT__ "MatISZeroRowsColumnsLocal_Private" 1210 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns) 1211 { 1212 Mat_IS *is = (Mat_IS*)A->data; 1213 PetscErrorCode ierr; 1214 1215 PetscFunctionBegin; 1216 if (!n) { 1217 is->pure_neumann = PETSC_TRUE; 1218 } else { 1219 PetscInt i; 1220 is->pure_neumann = PETSC_FALSE; 1221 1222 if (columns) { 1223 ierr = MatZeroRowsColumns(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1224 } else { 1225 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1226 } 1227 if (diag != 0.) { 1228 const PetscScalar *array; 1229 ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr); 1230 for (i=0; i<n; i++) { 1231 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 1232 } 1233 ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr); 1234 } 1235 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1236 ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1237 } 1238 PetscFunctionReturn(0); 1239 } 1240 1241 #undef __FUNCT__ 1242 #define __FUNCT__ "MatZeroRowsColumns_Private_IS" 1243 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns) 1244 { 1245 Mat_IS *matis = (Mat_IS*)A->data; 1246 PetscInt nr,nl,len,i; 1247 PetscInt *lrows; 1248 PetscErrorCode ierr; 1249 1250 PetscFunctionBegin; 1251 #if defined(PETSC_USE_DEBUG) 1252 if (columns || diag != 0. || (x && b)) { 1253 PetscBool cong; 1254 ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr); 1255 if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS"); 1256 if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS"); 1257 if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent"); 1258 } 1259 #endif 1260 /* get locally owned rows */ 1261 ierr = PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);CHKERRQ(ierr); 1262 /* fix right hand side if needed */ 1263 if (x && b) { 1264 const PetscScalar *xx; 1265 PetscScalar *bb; 1266 1267 ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr); 1268 ierr = VecGetArray(b, &bb);CHKERRQ(ierr); 1269 for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]]; 1270 ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr); 1271 ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr); 1272 } 1273 /* get rows associated to the local matrices */ 1274 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 1275 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 1276 } 1277 ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr); 1278 ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr); 1279 ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr); 1280 for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1; 1281 ierr = PetscFree(lrows);CHKERRQ(ierr); 1282 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1283 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1284 ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr); 1285 for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i; 1286 ierr = MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);CHKERRQ(ierr); 1287 ierr = PetscFree(lrows);CHKERRQ(ierr); 1288 PetscFunctionReturn(0); 1289 } 1290 1291 #undef __FUNCT__ 1292 #define __FUNCT__ "MatZeroRows_IS" 1293 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1294 { 1295 PetscErrorCode ierr; 1296 1297 PetscFunctionBegin; 1298 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);CHKERRQ(ierr); 1299 PetscFunctionReturn(0); 1300 } 1301 1302 #undef __FUNCT__ 1303 #define __FUNCT__ "MatZeroRowsColumns_IS" 1304 static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1305 { 1306 PetscErrorCode ierr; 1307 1308 PetscFunctionBegin; 1309 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);CHKERRQ(ierr); 1310 PetscFunctionReturn(0); 1311 } 1312 1313 #undef __FUNCT__ 1314 #define __FUNCT__ "MatAssemblyBegin_IS" 1315 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 1316 { 1317 Mat_IS *is = (Mat_IS*)A->data; 1318 PetscErrorCode ierr; 1319 1320 PetscFunctionBegin; 1321 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 1322 PetscFunctionReturn(0); 1323 } 1324 1325 #undef __FUNCT__ 1326 #define __FUNCT__ "MatAssemblyEnd_IS" 1327 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 1328 { 1329 Mat_IS *is = (Mat_IS*)A->data; 1330 PetscErrorCode ierr; 1331 1332 PetscFunctionBegin; 1333 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 1334 PetscFunctionReturn(0); 1335 } 1336 1337 #undef __FUNCT__ 1338 #define __FUNCT__ "MatISGetLocalMat_IS" 1339 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 1340 { 1341 Mat_IS *is = (Mat_IS*)mat->data; 1342 1343 PetscFunctionBegin; 1344 *local = is->A; 1345 PetscFunctionReturn(0); 1346 } 1347 1348 #undef __FUNCT__ 1349 #define __FUNCT__ "MatISGetLocalMat" 1350 /*@ 1351 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 1352 1353 Input Parameter: 1354 . mat - the matrix 1355 1356 Output Parameter: 1357 . local - the local matrix 1358 1359 Level: advanced 1360 1361 Notes: 1362 This can be called if you have precomputed the nonzero structure of the 1363 matrix and want to provide it to the inner matrix object to improve the performance 1364 of the MatSetValues() operation. 1365 1366 .seealso: MATIS 1367 @*/ 1368 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 1369 { 1370 PetscErrorCode ierr; 1371 1372 PetscFunctionBegin; 1373 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1374 PetscValidPointer(local,2); 1375 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 1376 PetscFunctionReturn(0); 1377 } 1378 1379 #undef __FUNCT__ 1380 #define __FUNCT__ "MatISSetLocalMat_IS" 1381 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 1382 { 1383 Mat_IS *is = (Mat_IS*)mat->data; 1384 PetscInt nrows,ncols,orows,ocols; 1385 PetscErrorCode ierr; 1386 1387 PetscFunctionBegin; 1388 if (is->A) { 1389 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 1390 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 1391 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols); 1392 } 1393 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 1394 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1395 is->A = local; 1396 PetscFunctionReturn(0); 1397 } 1398 1399 #undef __FUNCT__ 1400 #define __FUNCT__ "MatISSetLocalMat" 1401 /*@ 1402 MatISSetLocalMat - Replace the local matrix stored inside a MATIS object. 1403 1404 Input Parameter: 1405 . mat - the matrix 1406 . local - the local matrix 1407 1408 Output Parameter: 1409 1410 Level: advanced 1411 1412 Notes: 1413 This can be called if you have precomputed the local matrix and 1414 want to provide it to the matrix object MATIS. 1415 1416 .seealso: MATIS 1417 @*/ 1418 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 1419 { 1420 PetscErrorCode ierr; 1421 1422 PetscFunctionBegin; 1423 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1424 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 1425 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 1426 PetscFunctionReturn(0); 1427 } 1428 1429 #undef __FUNCT__ 1430 #define __FUNCT__ "MatZeroEntries_IS" 1431 static PetscErrorCode MatZeroEntries_IS(Mat A) 1432 { 1433 Mat_IS *a = (Mat_IS*)A->data; 1434 PetscErrorCode ierr; 1435 1436 PetscFunctionBegin; 1437 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 1438 PetscFunctionReturn(0); 1439 } 1440 1441 #undef __FUNCT__ 1442 #define __FUNCT__ "MatScale_IS" 1443 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 1444 { 1445 Mat_IS *is = (Mat_IS*)A->data; 1446 PetscErrorCode ierr; 1447 1448 PetscFunctionBegin; 1449 ierr = MatScale(is->A,a);CHKERRQ(ierr); 1450 PetscFunctionReturn(0); 1451 } 1452 1453 #undef __FUNCT__ 1454 #define __FUNCT__ "MatGetDiagonal_IS" 1455 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 1456 { 1457 Mat_IS *is = (Mat_IS*)A->data; 1458 PetscErrorCode ierr; 1459 1460 PetscFunctionBegin; 1461 /* get diagonal of the local matrix */ 1462 ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr); 1463 1464 /* scatter diagonal back into global vector */ 1465 ierr = VecSet(v,0);CHKERRQ(ierr); 1466 ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1467 ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1468 PetscFunctionReturn(0); 1469 } 1470 1471 #undef __FUNCT__ 1472 #define __FUNCT__ "MatSetOption_IS" 1473 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 1474 { 1475 Mat_IS *a = (Mat_IS*)A->data; 1476 PetscErrorCode ierr; 1477 1478 PetscFunctionBegin; 1479 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1480 PetscFunctionReturn(0); 1481 } 1482 1483 #undef __FUNCT__ 1484 #define __FUNCT__ "MatAXPY_IS" 1485 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str) 1486 { 1487 Mat_IS *y = (Mat_IS*)Y->data; 1488 Mat_IS *x; 1489 #if defined(PETSC_USE_DEBUG) 1490 PetscBool ismatis; 1491 #endif 1492 PetscErrorCode ierr; 1493 1494 PetscFunctionBegin; 1495 #if defined(PETSC_USE_DEBUG) 1496 ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr); 1497 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS"); 1498 #endif 1499 x = (Mat_IS*)X->data; 1500 ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr); 1501 PetscFunctionReturn(0); 1502 } 1503 1504 #undef __FUNCT__ 1505 #define __FUNCT__ "MatGetLocalSubMatrix_IS" 1506 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat) 1507 { 1508 Mat lA; 1509 Mat_IS *matis; 1510 ISLocalToGlobalMapping rl2g,cl2g; 1511 IS is; 1512 const PetscInt *rg,*rl; 1513 PetscInt nrg; 1514 PetscInt N,M,nrl,i,*idxs; 1515 PetscErrorCode ierr; 1516 1517 PetscFunctionBegin; 1518 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1519 ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr); 1520 ierr = ISGetIndices(row,&rl);CHKERRQ(ierr); 1521 ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr); 1522 #if defined(PETSC_USE_DEBUG) 1523 for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg); 1524 #endif 1525 ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr); 1526 /* map from [0,nrl) to row */ 1527 for (i=0;i<nrl;i++) idxs[i] = rl[i]; 1528 #if defined(PETSC_USE_DEBUG) 1529 for (i=nrl;i<nrg;i++) idxs[i] = nrg; 1530 #else 1531 for (i=nrl;i<nrg;i++) idxs[i] = -1; 1532 #endif 1533 ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr); 1534 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1535 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1536 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 1537 ierr = ISDestroy(&is);CHKERRQ(ierr); 1538 /* compute new l2g map for columns */ 1539 if (col != row || A->rmap->mapping != A->cmap->mapping) { 1540 const PetscInt *cg,*cl; 1541 PetscInt ncg; 1542 PetscInt ncl; 1543 1544 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1545 ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr); 1546 ierr = ISGetIndices(col,&cl);CHKERRQ(ierr); 1547 ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr); 1548 #if defined(PETSC_USE_DEBUG) 1549 for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg); 1550 #endif 1551 ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr); 1552 /* map from [0,ncl) to col */ 1553 for (i=0;i<ncl;i++) idxs[i] = cl[i]; 1554 #if defined(PETSC_USE_DEBUG) 1555 for (i=ncl;i<ncg;i++) idxs[i] = ncg; 1556 #else 1557 for (i=ncl;i<ncg;i++) idxs[i] = -1; 1558 #endif 1559 ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr); 1560 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1561 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1562 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 1563 ierr = ISDestroy(&is);CHKERRQ(ierr); 1564 } else { 1565 ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr); 1566 cl2g = rl2g; 1567 } 1568 /* create the MATIS submatrix */ 1569 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1570 ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr); 1571 ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr); 1572 ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr); 1573 matis = (Mat_IS*)((*submat)->data); 1574 matis->islocalref = PETSC_TRUE; 1575 ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr); 1576 ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr); 1577 ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr); 1578 ierr = MatSetUp(*submat);CHKERRQ(ierr); 1579 ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1580 ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1581 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 1582 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 1583 /* remove unsupported ops */ 1584 ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1585 (*submat)->ops->destroy = MatDestroy_IS; 1586 (*submat)->ops->setvalueslocal = MatSetValuesLocal_SubMat_IS; 1587 (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS; 1588 (*submat)->ops->assemblybegin = MatAssemblyBegin_IS; 1589 (*submat)->ops->assemblyend = MatAssemblyEnd_IS; 1590 PetscFunctionReturn(0); 1591 } 1592 1593 #undef __FUNCT__ 1594 #define __FUNCT__ "MatCreateIS" 1595 /*@ 1596 MatCreateIS - Creates a "process" unassembled matrix, assembled on each 1597 process but not across processes. 1598 1599 Input Parameters: 1600 + comm - MPI communicator that will share the matrix 1601 . bs - block size of the matrix 1602 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 1603 . rmap - local to global map for rows 1604 - cmap - local to global map for cols 1605 1606 Output Parameter: 1607 . A - the resulting matrix 1608 1609 Level: advanced 1610 1611 Notes: See MATIS for more details. 1612 m and n are NOT related to the size of the map; they represent the size of the local parts of the vectors 1613 used in MatMult operations. The sizes of rmap and cmap define the size of the local matrices. 1614 If either rmap or cmap are NULL, then the matrix is assumed to be square. 1615 1616 .seealso: MATIS, MatSetLocalToGlobalMapping() 1617 @*/ 1618 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A) 1619 { 1620 PetscErrorCode ierr; 1621 1622 PetscFunctionBegin; 1623 if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mappings"); 1624 ierr = MatCreate(comm,A);CHKERRQ(ierr); 1625 if (bs > 0) { 1626 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 1627 } 1628 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1629 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 1630 if (rmap && cmap) { 1631 ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr); 1632 } else if (!rmap) { 1633 ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr); 1634 } else { 1635 ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr); 1636 } 1637 PetscFunctionReturn(0); 1638 } 1639 1640 /*MC 1641 MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP). 1642 This stores the matrices in globally unassembled form. Each processor 1643 assembles only its local Neumann problem and the parallel matrix vector 1644 product is handled "implicitly". 1645 1646 Operations Provided: 1647 + MatMult() 1648 . MatMultAdd() 1649 . MatMultTranspose() 1650 . MatMultTransposeAdd() 1651 . MatZeroEntries() 1652 . MatSetOption() 1653 . MatZeroRows() 1654 . MatSetValues() 1655 . MatSetValuesBlocked() 1656 . MatSetValuesLocal() 1657 . MatSetValuesBlockedLocal() 1658 . MatScale() 1659 . MatGetDiagonal() 1660 . MatMissingDiagonal() 1661 . MatDuplicate() 1662 . MatCopy() 1663 . MatAXPY() 1664 . MatGetSubMatrix() 1665 . MatGetLocalSubMatrix() 1666 . MatTranspose() 1667 - MatSetLocalToGlobalMapping() 1668 1669 Options Database Keys: 1670 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 1671 1672 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 1673 1674 You must call MatSetLocalToGlobalMapping() before using this matrix type. 1675 1676 You can do matrix preallocation on the local matrix after you obtain it with 1677 MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation() 1678 1679 Level: advanced 1680 1681 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP 1682 1683 M*/ 1684 1685 #undef __FUNCT__ 1686 #define __FUNCT__ "MatCreate_IS" 1687 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 1688 { 1689 PetscErrorCode ierr; 1690 Mat_IS *b; 1691 1692 PetscFunctionBegin; 1693 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 1694 A->data = (void*)b; 1695 1696 /* matrix ops */ 1697 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1698 A->ops->mult = MatMult_IS; 1699 A->ops->multadd = MatMultAdd_IS; 1700 A->ops->multtranspose = MatMultTranspose_IS; 1701 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1702 A->ops->destroy = MatDestroy_IS; 1703 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1704 A->ops->setvalues = MatSetValues_IS; 1705 A->ops->setvaluesblocked = MatSetValuesBlocked_IS; 1706 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1707 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1708 A->ops->zerorows = MatZeroRows_IS; 1709 A->ops->zerorowscolumns = MatZeroRowsColumns_IS; 1710 A->ops->assemblybegin = MatAssemblyBegin_IS; 1711 A->ops->assemblyend = MatAssemblyEnd_IS; 1712 A->ops->view = MatView_IS; 1713 A->ops->zeroentries = MatZeroEntries_IS; 1714 A->ops->scale = MatScale_IS; 1715 A->ops->getdiagonal = MatGetDiagonal_IS; 1716 A->ops->setoption = MatSetOption_IS; 1717 A->ops->ishermitian = MatIsHermitian_IS; 1718 A->ops->issymmetric = MatIsSymmetric_IS; 1719 A->ops->duplicate = MatDuplicate_IS; 1720 A->ops->missingdiagonal = MatMissingDiagonal_IS; 1721 A->ops->copy = MatCopy_IS; 1722 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS; 1723 A->ops->getsubmatrix = MatGetSubMatrix_IS; 1724 A->ops->axpy = MatAXPY_IS; 1725 A->ops->diagonalset = MatDiagonalSet_IS; 1726 A->ops->shift = MatShift_IS; 1727 A->ops->transpose = MatTranspose_IS; 1728 1729 /* special MATIS functions */ 1730 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1731 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1732 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1733 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1734 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1735 PetscFunctionReturn(0); 1736 } 1737