1 /* 2 Creates a matrix class for using the Neumann-Neumann type preconditioners. 3 This stores the matrices in globally unassembled form. Each processor 4 assembles only its local Neumann problem and the parallel matrix vector 5 product is handled "implicitly". 6 7 Currently this allows for only one subdomain per processor. 8 */ 9 10 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 11 12 #define MATIS_MAX_ENTRIES_INSERTION 2048 13 14 static PetscErrorCode MatISComputeSF_Private(Mat); 15 16 #undef __FUNCT__ 17 #define __FUNCT__ "MatGetInfo_IS" 18 static PetscErrorCode MatGetInfo_IS(Mat A,MatInfoType flag,MatInfo *ginfo) 19 { 20 Mat_IS *matis = (Mat_IS*)A->data; 21 MatInfo info; 22 PetscReal isend[6],irecv[6]; 23 PetscInt bs; 24 PetscErrorCode ierr; 25 26 PetscFunctionBegin; 27 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 28 if (matis->A->ops->getinfo) { 29 ierr = MatGetInfo(matis->A,MAT_LOCAL,&info);CHKERRQ(ierr); 30 isend[0] = info.nz_used; 31 isend[1] = info.nz_allocated; 32 isend[2] = info.nz_unneeded; 33 isend[3] = info.memory; 34 isend[4] = info.mallocs; 35 } else { 36 isend[0] = 0.; 37 isend[1] = 0.; 38 isend[2] = 0.; 39 isend[3] = 0.; 40 isend[4] = 0.; 41 } 42 isend[5] = matis->A->num_ass; 43 if (flag == MAT_LOCAL) { 44 ginfo->nz_used = isend[0]; 45 ginfo->nz_allocated = isend[1]; 46 ginfo->nz_unneeded = isend[2]; 47 ginfo->memory = isend[3]; 48 ginfo->mallocs = isend[4]; 49 ginfo->assemblies = isend[5]; 50 } else if (flag == MAT_GLOBAL_MAX) { 51 ierr = MPIU_Allreduce(isend,irecv,6,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 52 53 ginfo->nz_used = irecv[0]; 54 ginfo->nz_allocated = irecv[1]; 55 ginfo->nz_unneeded = irecv[2]; 56 ginfo->memory = irecv[3]; 57 ginfo->mallocs = irecv[4]; 58 ginfo->assemblies = irecv[5]; 59 } else if (flag == MAT_GLOBAL_SUM) { 60 ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 61 62 ginfo->nz_used = irecv[0]; 63 ginfo->nz_allocated = irecv[1]; 64 ginfo->nz_unneeded = irecv[2]; 65 ginfo->memory = irecv[3]; 66 ginfo->mallocs = irecv[4]; 67 ginfo->assemblies = A->num_ass; 68 } 69 ginfo->block_size = bs; 70 ginfo->fill_ratio_given = 0; 71 ginfo->fill_ratio_needed = 0; 72 ginfo->factor_mallocs = 0; 73 PetscFunctionReturn(0); 74 } 75 76 #undef __FUNCT__ 77 #define __FUNCT__ "MatTranspose_IS" 78 PetscErrorCode MatTranspose_IS(Mat A,MatReuse reuse,Mat *B) 79 { 80 Mat C,lC,lA; 81 ISLocalToGlobalMapping rl2g,cl2g; 82 PetscBool notset = PETSC_FALSE,cong = PETSC_TRUE; 83 PetscErrorCode ierr; 84 85 PetscFunctionBegin; 86 if (reuse == MAT_REUSE_MATRIX) { 87 PetscBool rcong,ccong; 88 89 ierr = PetscLayoutCompare((*B)->cmap,A->rmap,&rcong);CHKERRQ(ierr); 90 ierr = PetscLayoutCompare((*B)->rmap,A->cmap,&ccong);CHKERRQ(ierr); 91 cong = (PetscBool)(rcong || ccong); 92 } 93 if (reuse == MAT_INITIAL_MATRIX || *B == A || !cong) { 94 ierr = MatCreate(PetscObjectComm((PetscObject)A),&C);CHKERRQ(ierr); 95 } else { 96 ierr = PetscObjectTypeCompare((PetscObject)*B,MATIS,¬set);CHKERRQ(ierr); 97 C = *B; 98 } 99 100 if (!notset) { 101 ierr = MatSetSizes(C,A->cmap->n,A->rmap->n,A->cmap->N,A->rmap->N);CHKERRQ(ierr); 102 ierr = MatSetBlockSizes(C,PetscAbs(A->cmap->bs),PetscAbs(A->rmap->bs));CHKERRQ(ierr); 103 ierr = MatSetType(C,MATIS);CHKERRQ(ierr); 104 ierr = MatGetLocalToGlobalMapping(A,&rl2g,&cl2g);CHKERRQ(ierr); 105 ierr = MatSetLocalToGlobalMapping(C,cl2g,rl2g);CHKERRQ(ierr); 106 } 107 108 /* perform local transposition */ 109 ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr); 110 ierr = MatTranspose(lA,MAT_INITIAL_MATRIX,&lC);CHKERRQ(ierr); 111 ierr = MatISSetLocalMat(C,lC);CHKERRQ(ierr); 112 ierr = MatDestroy(&lC);CHKERRQ(ierr); 113 114 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 115 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 116 117 if (reuse == MAT_INITIAL_MATRIX || *B != A) { 118 if (!cong) { 119 ierr = MatDestroy(B);CHKERRQ(ierr); 120 } 121 *B = C; 122 } else { 123 ierr = MatHeaderMerge(A,&C);CHKERRQ(ierr); 124 } 125 PetscFunctionReturn(0); 126 } 127 128 #undef __FUNCT__ 129 #define __FUNCT__ "MatDiagonalSet_IS" 130 PetscErrorCode MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode) 131 { 132 Mat_IS *is = (Mat_IS*)A->data; 133 PetscErrorCode ierr; 134 135 PetscFunctionBegin; 136 if (!D) { /* this code branch is used by MatShift_IS */ 137 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 138 } else { 139 ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 140 ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 141 } 142 ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr); 143 ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr); 144 PetscFunctionReturn(0); 145 } 146 147 #undef __FUNCT__ 148 #define __FUNCT__ "MatShift_IS" 149 PetscErrorCode MatShift_IS(Mat A,PetscScalar a) 150 { 151 PetscErrorCode ierr; 152 153 PetscFunctionBegin; 154 ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr); 155 PetscFunctionReturn(0); 156 } 157 158 #undef __FUNCT__ 159 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS" 160 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 161 { 162 PetscErrorCode ierr; 163 Mat_IS *is = (Mat_IS*)A->data; 164 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 165 166 PetscFunctionBegin; 167 #if defined(PETSC_USE_DEBUG) 168 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 169 #endif 170 ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 171 ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 172 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 173 PetscFunctionReturn(0); 174 } 175 176 #undef __FUNCT__ 177 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS" 178 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 179 { 180 PetscErrorCode ierr; 181 Mat_IS *is = (Mat_IS*)A->data; 182 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 183 184 PetscFunctionBegin; 185 #if defined(PETSC_USE_DEBUG) 186 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 187 #endif 188 ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 189 ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 190 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 191 PetscFunctionReturn(0); 192 } 193 194 #undef __FUNCT__ 195 #define __FUNCT__ "PetscLayoutMapLocal_Private" 196 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs) 197 { 198 PetscInt *owners = map->range; 199 PetscInt n = map->n; 200 PetscSF sf; 201 PetscInt *lidxs,*work = NULL; 202 PetscSFNode *ridxs; 203 PetscMPIInt rank; 204 PetscInt r, p = 0, len = 0; 205 PetscErrorCode ierr; 206 207 PetscFunctionBegin; 208 if (on) *on = 0; /* squelch -Wmaybe-uninitialized */ 209 /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */ 210 ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr); 211 ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr); 212 for (r = 0; r < n; ++r) lidxs[r] = -1; 213 ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr); 214 for (r = 0; r < N; ++r) { 215 const PetscInt idx = idxs[r]; 216 if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N); 217 if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */ 218 ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr); 219 } 220 ridxs[r].rank = p; 221 ridxs[r].index = idxs[r] - owners[p]; 222 } 223 ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr); 224 ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr); 225 ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 226 ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 227 if (ogidxs) { /* communicate global idxs */ 228 PetscInt cum = 0,start,*work2; 229 230 ierr = PetscMalloc1(n,&work);CHKERRQ(ierr); 231 ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr); 232 for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++; 233 ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr); 234 start -= cum; 235 cum = 0; 236 for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++; 237 ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 238 ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 239 ierr = PetscFree(work2);CHKERRQ(ierr); 240 } 241 ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 242 /* Compress and put in indices */ 243 for (r = 0; r < n; ++r) 244 if (lidxs[r] >= 0) { 245 if (work) work[len] = work[r]; 246 lidxs[len++] = r; 247 } 248 if (on) *on = len; 249 if (oidxs) *oidxs = lidxs; 250 if (ogidxs) *ogidxs = work; 251 PetscFunctionReturn(0); 252 } 253 254 #undef __FUNCT__ 255 #define __FUNCT__ "MatGetSubMatrix_IS" 256 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat) 257 { 258 Mat locmat,newlocmat; 259 Mat_IS *newmatis; 260 #if defined(PETSC_USE_DEBUG) 261 Vec rtest,ltest; 262 const PetscScalar *array; 263 #endif 264 const PetscInt *idxs; 265 PetscInt i,m,n; 266 PetscErrorCode ierr; 267 268 PetscFunctionBegin; 269 if (scall == MAT_REUSE_MATRIX) { 270 PetscBool ismatis; 271 272 ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr); 273 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type"); 274 newmatis = (Mat_IS*)(*newmat)->data; 275 if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS"); 276 if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS"); 277 } 278 /* irow and icol may not have duplicate entries */ 279 #if defined(PETSC_USE_DEBUG) 280 ierr = MatCreateVecs(mat,<est,&rtest);CHKERRQ(ierr); 281 ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr); 282 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 283 for (i=0;i<n;i++) { 284 ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 285 } 286 ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr); 287 ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr); 288 ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr); 289 ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr); 290 ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr); 291 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 292 ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr); 293 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 294 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 295 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 296 for (i=0;i<n;i++) { 297 ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 298 } 299 ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr); 300 ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr); 301 ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr); 302 ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr); 303 ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr); 304 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 305 ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr); 306 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 307 ierr = VecDestroy(&rtest);CHKERRQ(ierr); 308 ierr = VecDestroy(<est);CHKERRQ(ierr); 309 #endif 310 if (scall == MAT_INITIAL_MATRIX) { 311 Mat_IS *matis = (Mat_IS*)mat->data; 312 ISLocalToGlobalMapping rl2g; 313 IS is; 314 PetscInt *lidxs,*lgidxs,*newgidxs; 315 PetscInt ll,newloc; 316 MPI_Comm comm; 317 318 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 319 ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr); 320 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 321 ierr = MatCreate(comm,newmat);CHKERRQ(ierr); 322 ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr); 323 ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 324 /* communicate irow to their owners in the layout */ 325 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 326 ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 327 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 328 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 329 ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr); 330 } 331 ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr); 332 for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1; 333 ierr = PetscFree(lidxs);CHKERRQ(ierr); 334 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 335 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 336 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 337 for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++; 338 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 339 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 340 for (i=0,newloc=0;i<matis->sf_nleaves;i++) 341 if (matis->sf_leafdata[i]) { 342 lidxs[newloc] = i; 343 newgidxs[newloc++] = matis->sf_leafdata[i]-1; 344 } 345 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 346 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 347 ierr = ISDestroy(&is);CHKERRQ(ierr); 348 /* local is to extract local submatrix */ 349 newmatis = (Mat_IS*)(*newmat)->data; 350 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr); 351 if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */ 352 PetscBool cong; 353 ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr); 354 if (cong) mat->congruentlayouts = 1; 355 else mat->congruentlayouts = 0; 356 } 357 if (mat->congruentlayouts && irow == icol) { 358 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr); 359 ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr); 360 newmatis->getsub_cis = newmatis->getsub_ris; 361 } else { 362 ISLocalToGlobalMapping cl2g; 363 364 /* communicate icol to their owners in the layout */ 365 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 366 ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 367 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 368 ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr); 369 for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1; 370 ierr = PetscFree(lidxs);CHKERRQ(ierr); 371 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 372 ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 373 ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 374 for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++; 375 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 376 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 377 for (i=0,newloc=0;i<matis->csf_nleaves;i++) 378 if (matis->csf_leafdata[i]) { 379 lidxs[newloc] = i; 380 newgidxs[newloc++] = matis->csf_leafdata[i]-1; 381 } 382 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 383 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 384 ierr = ISDestroy(&is);CHKERRQ(ierr); 385 /* local is to extract local submatrix */ 386 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr); 387 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr); 388 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 389 } 390 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 391 } else { 392 ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr); 393 } 394 ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr); 395 newmatis = (Mat_IS*)(*newmat)->data; 396 ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr); 397 if (scall == MAT_INITIAL_MATRIX) { 398 ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr); 399 ierr = MatDestroy(&newlocmat);CHKERRQ(ierr); 400 } 401 ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 402 ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 403 PetscFunctionReturn(0); 404 } 405 406 #undef __FUNCT__ 407 #define __FUNCT__ "MatCopy_IS" 408 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str) 409 { 410 Mat_IS *a = (Mat_IS*)A->data,*b; 411 PetscBool ismatis; 412 PetscErrorCode ierr; 413 414 PetscFunctionBegin; 415 ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr); 416 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented"); 417 b = (Mat_IS*)B->data; 418 ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 419 PetscFunctionReturn(0); 420 } 421 422 #undef __FUNCT__ 423 #define __FUNCT__ "MatMissingDiagonal_IS" 424 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool *missing,PetscInt *d) 425 { 426 Vec v; 427 const PetscScalar *array; 428 PetscInt i,n; 429 PetscErrorCode ierr; 430 431 PetscFunctionBegin; 432 *missing = PETSC_FALSE; 433 ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr); 434 ierr = MatGetDiagonal(A,v);CHKERRQ(ierr); 435 ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr); 436 ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr); 437 for (i=0;i<n;i++) if (array[i] == 0.) break; 438 ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr); 439 ierr = VecDestroy(&v);CHKERRQ(ierr); 440 if (i != n) *missing = PETSC_TRUE; 441 if (d) { 442 *d = -1; 443 if (*missing) { 444 PetscInt rstart; 445 ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr); 446 *d = i+rstart; 447 } 448 } 449 PetscFunctionReturn(0); 450 } 451 452 #undef __FUNCT__ 453 #define __FUNCT__ "MatISComputeSF_Private" 454 static PetscErrorCode MatISComputeSF_Private(Mat B) 455 { 456 Mat_IS *matis = (Mat_IS*)(B->data); 457 const PetscInt *gidxs; 458 PetscErrorCode ierr; 459 460 PetscFunctionBegin; 461 ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr); 462 ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr); 463 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 464 ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 465 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 466 ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 467 ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 468 if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */ 469 ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr); 470 ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr); 471 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr); 472 ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 473 ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 474 ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 475 ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr); 476 } else { 477 matis->csf = matis->sf; 478 matis->csf_nleaves = matis->sf_nleaves; 479 matis->csf_nroots = matis->sf_nroots; 480 matis->csf_leafdata = matis->sf_leafdata; 481 matis->csf_rootdata = matis->sf_rootdata; 482 } 483 PetscFunctionReturn(0); 484 } 485 486 #undef __FUNCT__ 487 #define __FUNCT__ "MatISSetPreallocation" 488 /*@ 489 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 490 491 Collective on MPI_Comm 492 493 Input Parameters: 494 + B - the matrix 495 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 496 (same value is used for all local rows) 497 . d_nnz - array containing the number of nonzeros in the various rows of the 498 DIAGONAL portion of the local submatrix (possibly different for each row) 499 or NULL, if d_nz is used to specify the nonzero structure. 500 The size of this array is equal to the number of local rows, i.e 'm'. 501 For matrices that will be factored, you must leave room for (and set) 502 the diagonal entry even if it is zero. 503 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 504 submatrix (same value is used for all local rows). 505 - o_nnz - array containing the number of nonzeros in the various rows of the 506 OFF-DIAGONAL portion of the local submatrix (possibly different for 507 each row) or NULL, if o_nz is used to specify the nonzero 508 structure. The size of this array is equal to the number 509 of local rows, i.e 'm'. 510 511 If the *_nnz parameter is given then the *_nz parameter is ignored 512 513 Level: intermediate 514 515 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 516 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 517 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 518 519 .keywords: matrix 520 521 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS 522 @*/ 523 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 524 { 525 PetscErrorCode ierr; 526 527 PetscFunctionBegin; 528 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 529 PetscValidType(B,1); 530 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 531 PetscFunctionReturn(0); 532 } 533 534 #undef __FUNCT__ 535 #define __FUNCT__ "MatISSetPreallocation_IS" 536 static PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 537 { 538 Mat_IS *matis = (Mat_IS*)(B->data); 539 PetscInt bs,i,nlocalcols; 540 PetscErrorCode ierr; 541 542 PetscFunctionBegin; 543 if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 544 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 545 ierr = MatISComputeSF_Private(B);CHKERRQ(ierr); 546 } 547 if (!d_nnz) { 548 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz; 549 } else { 550 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 551 } 552 if (!o_nnz) { 553 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz; 554 } else { 555 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 556 } 557 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 558 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 559 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 560 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 561 for (i=0;i<matis->sf_nleaves;i++) { 562 matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 563 } 564 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 565 for (i=0;i<matis->sf_nleaves/bs;i++) { 566 matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 567 } 568 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 569 for (i=0;i<matis->sf_nleaves/bs;i++) { 570 matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 571 } 572 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 573 PetscFunctionReturn(0); 574 } 575 576 #undef __FUNCT__ 577 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private" 578 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce) 579 { 580 Mat_IS *matis = (Mat_IS*)(A->data); 581 PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership; 582 const PetscInt *global_indices_r,*global_indices_c; 583 PetscInt i,j,bs,rows,cols; 584 PetscInt lrows,lcols; 585 PetscInt local_rows,local_cols; 586 PetscMPIInt nsubdomains; 587 PetscBool isdense,issbaij; 588 PetscErrorCode ierr; 589 590 PetscFunctionBegin; 591 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr); 592 ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr); 593 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 594 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 595 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 596 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 597 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 598 if (A->rmap->mapping != A->cmap->mapping) { 599 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 600 } else { 601 global_indices_c = global_indices_r; 602 } 603 604 if (issbaij) { 605 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 606 } 607 /* 608 An SF reduce is needed to sum up properly on shared rows. 609 Note that generally preallocation is not exact, since it overestimates nonzeros 610 */ 611 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 612 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 613 } 614 ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr); 615 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr); 616 /* All processes need to compute entire row ownership */ 617 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 618 ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 619 for (i=0;i<nsubdomains;i++) { 620 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 621 row_ownership[j] = i; 622 } 623 } 624 ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 625 626 /* 627 my_dnz and my_onz contains exact contribution to preallocation from each local mat 628 then, they will be summed up properly. This way, preallocation is always sufficient 629 */ 630 ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr); 631 /* preallocation as a MATAIJ */ 632 if (isdense) { /* special case for dense local matrices */ 633 for (i=0;i<local_rows;i++) { 634 PetscInt index_row = global_indices_r[i]; 635 for (j=i;j<local_rows;j++) { 636 PetscInt owner = row_ownership[index_row]; 637 PetscInt index_col = global_indices_c[j]; 638 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 639 my_dnz[i] += 1; 640 } else { /* offdiag block */ 641 my_onz[i] += 1; 642 } 643 /* same as before, interchanging rows and cols */ 644 if (i != j) { 645 owner = row_ownership[index_col]; 646 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 647 my_dnz[j] += 1; 648 } else { 649 my_onz[j] += 1; 650 } 651 } 652 } 653 } 654 } else if (matis->A->ops->getrowij) { 655 const PetscInt *ii,*jj,*jptr; 656 PetscBool done; 657 ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 658 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 659 jptr = jj; 660 for (i=0;i<local_rows;i++) { 661 PetscInt index_row = global_indices_r[i]; 662 for (j=0;j<ii[i+1]-ii[i];j++,jptr++) { 663 PetscInt owner = row_ownership[index_row]; 664 PetscInt index_col = global_indices_c[*jptr]; 665 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 666 my_dnz[i] += 1; 667 } else { /* offdiag block */ 668 my_onz[i] += 1; 669 } 670 /* same as before, interchanging rows and cols */ 671 if (issbaij && index_col != index_row) { 672 owner = row_ownership[index_col]; 673 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 674 my_dnz[*jptr] += 1; 675 } else { 676 my_onz[*jptr] += 1; 677 } 678 } 679 } 680 } 681 ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 682 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 683 } else { /* loop over rows and use MatGetRow */ 684 for (i=0;i<local_rows;i++) { 685 const PetscInt *cols; 686 PetscInt ncols,index_row = global_indices_r[i]; 687 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 688 for (j=0;j<ncols;j++) { 689 PetscInt owner = row_ownership[index_row]; 690 PetscInt index_col = global_indices_c[cols[j]]; 691 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 692 my_dnz[i] += 1; 693 } else { /* offdiag block */ 694 my_onz[i] += 1; 695 } 696 /* same as before, interchanging rows and cols */ 697 if (issbaij && index_col != index_row) { 698 owner = row_ownership[index_col]; 699 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 700 my_dnz[cols[j]] += 1; 701 } else { 702 my_onz[cols[j]] += 1; 703 } 704 } 705 } 706 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 707 } 708 } 709 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 710 if (global_indices_c != global_indices_r) { 711 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 712 } 713 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 714 715 /* Reduce my_dnz and my_onz */ 716 if (maxreduce) { 717 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 718 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 719 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 720 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 721 } else { 722 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 723 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 724 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 725 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 726 } 727 ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr); 728 729 /* Resize preallocation if overestimated */ 730 for (i=0;i<lrows;i++) { 731 dnz[i] = PetscMin(dnz[i],lcols); 732 onz[i] = PetscMin(onz[i],cols-lcols); 733 } 734 /* set preallocation */ 735 ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr); 736 for (i=0;i<lrows/bs;i++) { 737 dnz[i] = dnz[i*bs]/bs; 738 onz[i] = onz[i*bs]/bs; 739 } 740 ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 741 ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 742 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 743 if (issbaij) { 744 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 745 } 746 PetscFunctionReturn(0); 747 } 748 749 #undef __FUNCT__ 750 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 751 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 752 { 753 Mat_IS *matis = (Mat_IS*)(mat->data); 754 Mat local_mat; 755 /* info on mat */ 756 PetscInt bs,rows,cols,lrows,lcols; 757 PetscInt local_rows,local_cols; 758 PetscBool isdense,issbaij,isseqaij; 759 PetscMPIInt nsubdomains; 760 /* values insertion */ 761 PetscScalar *array; 762 /* work */ 763 PetscErrorCode ierr; 764 765 PetscFunctionBegin; 766 /* get info from mat */ 767 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 768 if (nsubdomains == 1) { 769 if (reuse == MAT_INITIAL_MATRIX) { 770 ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr); 771 } else { 772 ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 773 } 774 PetscFunctionReturn(0); 775 } 776 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 777 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 778 ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr); 779 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 780 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 781 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 782 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 783 784 if (reuse == MAT_INITIAL_MATRIX) { 785 MatType new_mat_type; 786 PetscBool issbaij_red; 787 788 /* determining new matrix type */ 789 ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 790 if (issbaij_red) { 791 new_mat_type = MATSBAIJ; 792 } else { 793 if (bs>1) { 794 new_mat_type = MATBAIJ; 795 } else { 796 new_mat_type = MATAIJ; 797 } 798 } 799 800 ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr); 801 ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr); 802 ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr); 803 ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr); 804 ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr); 805 } else { 806 PetscInt mbs,mrows,mcols,mlrows,mlcols; 807 /* some checks */ 808 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 809 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 810 ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr); 811 if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 812 if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 813 if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows); 814 if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols); 815 if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 816 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 817 } 818 819 if (issbaij) { 820 ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr); 821 } else { 822 ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr); 823 local_mat = matis->A; 824 } 825 826 /* Set values */ 827 ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr); 828 if (isdense) { /* special case for dense local matrices */ 829 PetscInt i,*dummy_rows,*dummy_cols; 830 831 if (local_rows != local_cols) { 832 ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr); 833 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 834 for (i=0;i<local_cols;i++) dummy_cols[i] = i; 835 } else { 836 ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr); 837 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 838 dummy_cols = dummy_rows; 839 } 840 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 841 ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr); 842 ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr); 843 ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr); 844 if (dummy_rows != dummy_cols) { 845 ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr); 846 } else { 847 ierr = PetscFree(dummy_rows);CHKERRQ(ierr); 848 } 849 } else if (isseqaij) { 850 PetscInt i,nvtxs,*xadj,*adjncy; 851 PetscBool done; 852 853 ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 854 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 855 ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr); 856 for (i=0;i<nvtxs;i++) { 857 ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr); 858 } 859 ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 860 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 861 ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr); 862 } else { /* very basic values insertion for all other matrix types */ 863 PetscInt i; 864 865 for (i=0;i<local_rows;i++) { 866 PetscInt j; 867 const PetscInt *local_indices_cols; 868 869 ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 870 ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr); 871 ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 872 } 873 } 874 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 875 ierr = MatDestroy(&local_mat);CHKERRQ(ierr); 876 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 877 if (isdense) { 878 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 879 } 880 PetscFunctionReturn(0); 881 } 882 883 #undef __FUNCT__ 884 #define __FUNCT__ "MatISGetMPIXAIJ" 885 /*@ 886 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 887 888 Input Parameter: 889 . mat - the matrix (should be of type MATIS) 890 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 891 892 Output Parameter: 893 . newmat - the matrix in AIJ format 894 895 Level: developer 896 897 Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested. 898 899 .seealso: MATIS 900 @*/ 901 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 902 { 903 PetscErrorCode ierr; 904 905 PetscFunctionBegin; 906 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 907 PetscValidLogicalCollectiveEnum(mat,reuse,2); 908 PetscValidPointer(newmat,3); 909 if (reuse != MAT_INITIAL_MATRIX) { 910 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 911 PetscCheckSameComm(mat,1,*newmat,3); 912 if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix"); 913 } 914 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 915 PetscFunctionReturn(0); 916 } 917 918 #undef __FUNCT__ 919 #define __FUNCT__ "MatDuplicate_IS" 920 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 921 { 922 PetscErrorCode ierr; 923 Mat_IS *matis = (Mat_IS*)(mat->data); 924 PetscInt bs,m,n,M,N; 925 Mat B,localmat; 926 927 PetscFunctionBegin; 928 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 929 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 930 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 931 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr); 932 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 933 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 934 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 935 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 936 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 937 *newmat = B; 938 PetscFunctionReturn(0); 939 } 940 941 #undef __FUNCT__ 942 #define __FUNCT__ "MatIsHermitian_IS" 943 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 944 { 945 PetscErrorCode ierr; 946 Mat_IS *matis = (Mat_IS*)A->data; 947 PetscBool local_sym; 948 949 PetscFunctionBegin; 950 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 951 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 952 PetscFunctionReturn(0); 953 } 954 955 #undef __FUNCT__ 956 #define __FUNCT__ "MatIsSymmetric_IS" 957 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 958 { 959 PetscErrorCode ierr; 960 Mat_IS *matis = (Mat_IS*)A->data; 961 PetscBool local_sym; 962 963 PetscFunctionBegin; 964 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 965 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 966 PetscFunctionReturn(0); 967 } 968 969 #undef __FUNCT__ 970 #define __FUNCT__ "MatDestroy_IS" 971 static PetscErrorCode MatDestroy_IS(Mat A) 972 { 973 PetscErrorCode ierr; 974 Mat_IS *b = (Mat_IS*)A->data; 975 976 PetscFunctionBegin; 977 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 978 ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr); 979 ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr); 980 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 981 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 982 ierr = VecDestroy(&b->counter);CHKERRQ(ierr); 983 ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr); 984 ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr); 985 if (b->sf != b->csf) { 986 ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr); 987 ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr); 988 } 989 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 990 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 991 ierr = PetscFree(A->data);CHKERRQ(ierr); 992 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 993 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 994 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 995 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 996 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 997 PetscFunctionReturn(0); 998 } 999 1000 #undef __FUNCT__ 1001 #define __FUNCT__ "MatMult_IS" 1002 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 1003 { 1004 PetscErrorCode ierr; 1005 Mat_IS *is = (Mat_IS*)A->data; 1006 PetscScalar zero = 0.0; 1007 1008 PetscFunctionBegin; 1009 /* scatter the global vector x into the local work vector */ 1010 ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1011 ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1012 1013 /* multiply the local matrix */ 1014 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 1015 1016 /* scatter product back into global memory */ 1017 ierr = VecSet(y,zero);CHKERRQ(ierr); 1018 ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1019 ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1020 PetscFunctionReturn(0); 1021 } 1022 1023 #undef __FUNCT__ 1024 #define __FUNCT__ "MatMultAdd_IS" 1025 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 1026 { 1027 Vec temp_vec; 1028 PetscErrorCode ierr; 1029 1030 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 1031 if (v3 != v2) { 1032 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 1033 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 1034 } else { 1035 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 1036 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 1037 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 1038 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 1039 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 1040 } 1041 PetscFunctionReturn(0); 1042 } 1043 1044 #undef __FUNCT__ 1045 #define __FUNCT__ "MatMultTranspose_IS" 1046 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x) 1047 { 1048 Mat_IS *is = (Mat_IS*)A->data; 1049 PetscErrorCode ierr; 1050 1051 PetscFunctionBegin; 1052 /* scatter the global vector x into the local work vector */ 1053 ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1054 ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1055 1056 /* multiply the local matrix */ 1057 ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr); 1058 1059 /* scatter product back into global vector */ 1060 ierr = VecSet(x,0);CHKERRQ(ierr); 1061 ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1062 ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1063 PetscFunctionReturn(0); 1064 } 1065 1066 #undef __FUNCT__ 1067 #define __FUNCT__ "MatMultTransposeAdd_IS" 1068 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 1069 { 1070 Vec temp_vec; 1071 PetscErrorCode ierr; 1072 1073 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 1074 if (v3 != v2) { 1075 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 1076 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 1077 } else { 1078 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 1079 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 1080 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 1081 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 1082 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 1083 } 1084 PetscFunctionReturn(0); 1085 } 1086 1087 #undef __FUNCT__ 1088 #define __FUNCT__ "MatView_IS" 1089 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 1090 { 1091 Mat_IS *a = (Mat_IS*)A->data; 1092 PetscErrorCode ierr; 1093 PetscViewer sviewer; 1094 1095 PetscFunctionBegin; 1096 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1097 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 1098 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1099 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1100 PetscFunctionReturn(0); 1101 } 1102 1103 #undef __FUNCT__ 1104 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 1105 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 1106 { 1107 PetscErrorCode ierr; 1108 PetscInt nr,rbs,nc,cbs; 1109 Mat_IS *is = (Mat_IS*)A->data; 1110 IS from,to; 1111 Vec cglobal,rglobal; 1112 1113 PetscFunctionBegin; 1114 PetscCheckSameComm(A,1,rmapping,2); 1115 PetscCheckSameComm(A,1,cmapping,3); 1116 /* Destroy any previous data */ 1117 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 1118 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 1119 ierr = VecDestroy(&is->counter);CHKERRQ(ierr); 1120 ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr); 1121 ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr); 1122 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1123 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 1124 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 1125 1126 /* Setup Layout and set local to global maps */ 1127 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1128 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1129 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 1130 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 1131 1132 /* Create the local matrix A */ 1133 ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr); 1134 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr); 1135 ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr); 1136 ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr); 1137 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 1138 ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr); 1139 ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr); 1140 ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr); 1141 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 1142 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 1143 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 1144 1145 if (!is->islocalref) { /* setup scatters and local vectors for MatMult */ 1146 /* Create the local work vectors */ 1147 ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr); 1148 1149 /* setup the global to local scatters */ 1150 ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr); 1151 ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr); 1152 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 1153 ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr); 1154 if (rmapping != cmapping) { 1155 ierr = ISDestroy(&to);CHKERRQ(ierr); 1156 ierr = ISDestroy(&from);CHKERRQ(ierr); 1157 ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr); 1158 ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr); 1159 ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr); 1160 } else { 1161 ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr); 1162 is->cctx = is->rctx; 1163 } 1164 1165 /* interface counter vector (local) */ 1166 ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr); 1167 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 1168 ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1169 ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1170 ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1171 ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1172 1173 /* free workspace */ 1174 ierr = VecDestroy(&rglobal);CHKERRQ(ierr); 1175 ierr = VecDestroy(&cglobal);CHKERRQ(ierr); 1176 ierr = ISDestroy(&to);CHKERRQ(ierr); 1177 ierr = ISDestroy(&from);CHKERRQ(ierr); 1178 } 1179 ierr = MatSetUp(A);CHKERRQ(ierr); 1180 PetscFunctionReturn(0); 1181 } 1182 1183 #undef __FUNCT__ 1184 #define __FUNCT__ "MatSetValues_IS" 1185 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1186 { 1187 Mat_IS *is = (Mat_IS*)mat->data; 1188 PetscErrorCode ierr; 1189 #if defined(PETSC_USE_DEBUG) 1190 PetscInt i,zm,zn; 1191 #endif 1192 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1193 1194 PetscFunctionBegin; 1195 #if defined(PETSC_USE_DEBUG) 1196 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1197 /* count negative indices */ 1198 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1199 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1200 #endif 1201 ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1202 ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1203 #if defined(PETSC_USE_DEBUG) 1204 /* count negative indices (should be the same as before) */ 1205 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1206 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1207 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1208 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1209 #endif 1210 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1211 PetscFunctionReturn(0); 1212 } 1213 1214 #undef __FUNCT__ 1215 #define __FUNCT__ "MatSetValuesBlocked_IS" 1216 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1217 { 1218 Mat_IS *is = (Mat_IS*)mat->data; 1219 PetscErrorCode ierr; 1220 #if defined(PETSC_USE_DEBUG) 1221 PetscInt i,zm,zn; 1222 #endif 1223 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1224 1225 PetscFunctionBegin; 1226 #if defined(PETSC_USE_DEBUG) 1227 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1228 /* count negative indices */ 1229 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1230 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1231 #endif 1232 ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1233 ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1234 #if defined(PETSC_USE_DEBUG) 1235 /* count negative indices (should be the same as before) */ 1236 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1237 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1238 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1239 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1240 #endif 1241 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1242 PetscFunctionReturn(0); 1243 } 1244 1245 #undef __FUNCT__ 1246 #define __FUNCT__ "MatSetValuesLocal_IS" 1247 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1248 { 1249 PetscErrorCode ierr; 1250 Mat_IS *is = (Mat_IS*)A->data; 1251 1252 PetscFunctionBegin; 1253 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1254 PetscFunctionReturn(0); 1255 } 1256 1257 #undef __FUNCT__ 1258 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 1259 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1260 { 1261 PetscErrorCode ierr; 1262 Mat_IS *is = (Mat_IS*)A->data; 1263 1264 PetscFunctionBegin; 1265 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1266 PetscFunctionReturn(0); 1267 } 1268 1269 #undef __FUNCT__ 1270 #define __FUNCT__ "MatISZeroRowsColumnsLocal_Private" 1271 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns) 1272 { 1273 Mat_IS *is = (Mat_IS*)A->data; 1274 PetscErrorCode ierr; 1275 1276 PetscFunctionBegin; 1277 if (!n) { 1278 is->pure_neumann = PETSC_TRUE; 1279 } else { 1280 PetscInt i; 1281 is->pure_neumann = PETSC_FALSE; 1282 1283 if (columns) { 1284 ierr = MatZeroRowsColumns(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1285 } else { 1286 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1287 } 1288 if (diag != 0.) { 1289 const PetscScalar *array; 1290 ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr); 1291 for (i=0; i<n; i++) { 1292 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 1293 } 1294 ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr); 1295 } 1296 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1297 ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1298 } 1299 PetscFunctionReturn(0); 1300 } 1301 1302 #undef __FUNCT__ 1303 #define __FUNCT__ "MatZeroRowsColumns_Private_IS" 1304 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns) 1305 { 1306 Mat_IS *matis = (Mat_IS*)A->data; 1307 PetscInt nr,nl,len,i; 1308 PetscInt *lrows; 1309 PetscErrorCode ierr; 1310 1311 PetscFunctionBegin; 1312 #if defined(PETSC_USE_DEBUG) 1313 if (columns || diag != 0. || (x && b)) { 1314 PetscBool cong; 1315 ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr); 1316 if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS"); 1317 if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS"); 1318 if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent"); 1319 } 1320 #endif 1321 /* get locally owned rows */ 1322 ierr = PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);CHKERRQ(ierr); 1323 /* fix right hand side if needed */ 1324 if (x && b) { 1325 const PetscScalar *xx; 1326 PetscScalar *bb; 1327 1328 ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr); 1329 ierr = VecGetArray(b, &bb);CHKERRQ(ierr); 1330 for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]]; 1331 ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr); 1332 ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr); 1333 } 1334 /* get rows associated to the local matrices */ 1335 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 1336 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 1337 } 1338 ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr); 1339 ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr); 1340 ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr); 1341 for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1; 1342 ierr = PetscFree(lrows);CHKERRQ(ierr); 1343 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1344 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1345 ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr); 1346 for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i; 1347 ierr = MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);CHKERRQ(ierr); 1348 ierr = PetscFree(lrows);CHKERRQ(ierr); 1349 PetscFunctionReturn(0); 1350 } 1351 1352 #undef __FUNCT__ 1353 #define __FUNCT__ "MatZeroRows_IS" 1354 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1355 { 1356 PetscErrorCode ierr; 1357 1358 PetscFunctionBegin; 1359 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);CHKERRQ(ierr); 1360 PetscFunctionReturn(0); 1361 } 1362 1363 #undef __FUNCT__ 1364 #define __FUNCT__ "MatZeroRowsColumns_IS" 1365 static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1366 { 1367 PetscErrorCode ierr; 1368 1369 PetscFunctionBegin; 1370 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);CHKERRQ(ierr); 1371 PetscFunctionReturn(0); 1372 } 1373 1374 #undef __FUNCT__ 1375 #define __FUNCT__ "MatAssemblyBegin_IS" 1376 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 1377 { 1378 Mat_IS *is = (Mat_IS*)A->data; 1379 PetscErrorCode ierr; 1380 1381 PetscFunctionBegin; 1382 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 1383 PetscFunctionReturn(0); 1384 } 1385 1386 #undef __FUNCT__ 1387 #define __FUNCT__ "MatAssemblyEnd_IS" 1388 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 1389 { 1390 Mat_IS *is = (Mat_IS*)A->data; 1391 PetscErrorCode ierr; 1392 1393 PetscFunctionBegin; 1394 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 1395 PetscFunctionReturn(0); 1396 } 1397 1398 #undef __FUNCT__ 1399 #define __FUNCT__ "MatISGetLocalMat_IS" 1400 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 1401 { 1402 Mat_IS *is = (Mat_IS*)mat->data; 1403 1404 PetscFunctionBegin; 1405 *local = is->A; 1406 PetscFunctionReturn(0); 1407 } 1408 1409 #undef __FUNCT__ 1410 #define __FUNCT__ "MatISGetLocalMat" 1411 /*@ 1412 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 1413 1414 Input Parameter: 1415 . mat - the matrix 1416 1417 Output Parameter: 1418 . local - the local matrix 1419 1420 Level: advanced 1421 1422 Notes: 1423 This can be called if you have precomputed the nonzero structure of the 1424 matrix and want to provide it to the inner matrix object to improve the performance 1425 of the MatSetValues() operation. 1426 1427 .seealso: MATIS 1428 @*/ 1429 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 1430 { 1431 PetscErrorCode ierr; 1432 1433 PetscFunctionBegin; 1434 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1435 PetscValidPointer(local,2); 1436 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 1437 PetscFunctionReturn(0); 1438 } 1439 1440 #undef __FUNCT__ 1441 #define __FUNCT__ "MatISSetLocalMat_IS" 1442 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 1443 { 1444 Mat_IS *is = (Mat_IS*)mat->data; 1445 PetscInt nrows,ncols,orows,ocols; 1446 PetscErrorCode ierr; 1447 1448 PetscFunctionBegin; 1449 if (is->A) { 1450 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 1451 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 1452 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols); 1453 } 1454 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 1455 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1456 is->A = local; 1457 PetscFunctionReturn(0); 1458 } 1459 1460 #undef __FUNCT__ 1461 #define __FUNCT__ "MatISSetLocalMat" 1462 /*@ 1463 MatISSetLocalMat - Replace the local matrix stored inside a MATIS object. 1464 1465 Input Parameter: 1466 . mat - the matrix 1467 . local - the local matrix 1468 1469 Output Parameter: 1470 1471 Level: advanced 1472 1473 Notes: 1474 This can be called if you have precomputed the local matrix and 1475 want to provide it to the matrix object MATIS. 1476 1477 .seealso: MATIS 1478 @*/ 1479 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 1480 { 1481 PetscErrorCode ierr; 1482 1483 PetscFunctionBegin; 1484 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1485 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 1486 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 1487 PetscFunctionReturn(0); 1488 } 1489 1490 #undef __FUNCT__ 1491 #define __FUNCT__ "MatZeroEntries_IS" 1492 static PetscErrorCode MatZeroEntries_IS(Mat A) 1493 { 1494 Mat_IS *a = (Mat_IS*)A->data; 1495 PetscErrorCode ierr; 1496 1497 PetscFunctionBegin; 1498 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 1499 PetscFunctionReturn(0); 1500 } 1501 1502 #undef __FUNCT__ 1503 #define __FUNCT__ "MatScale_IS" 1504 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 1505 { 1506 Mat_IS *is = (Mat_IS*)A->data; 1507 PetscErrorCode ierr; 1508 1509 PetscFunctionBegin; 1510 ierr = MatScale(is->A,a);CHKERRQ(ierr); 1511 PetscFunctionReturn(0); 1512 } 1513 1514 #undef __FUNCT__ 1515 #define __FUNCT__ "MatGetDiagonal_IS" 1516 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 1517 { 1518 Mat_IS *is = (Mat_IS*)A->data; 1519 PetscErrorCode ierr; 1520 1521 PetscFunctionBegin; 1522 /* get diagonal of the local matrix */ 1523 ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr); 1524 1525 /* scatter diagonal back into global vector */ 1526 ierr = VecSet(v,0);CHKERRQ(ierr); 1527 ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1528 ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1529 PetscFunctionReturn(0); 1530 } 1531 1532 #undef __FUNCT__ 1533 #define __FUNCT__ "MatSetOption_IS" 1534 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 1535 { 1536 Mat_IS *a = (Mat_IS*)A->data; 1537 PetscErrorCode ierr; 1538 1539 PetscFunctionBegin; 1540 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1541 PetscFunctionReturn(0); 1542 } 1543 1544 #undef __FUNCT__ 1545 #define __FUNCT__ "MatAXPY_IS" 1546 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str) 1547 { 1548 Mat_IS *y = (Mat_IS*)Y->data; 1549 Mat_IS *x; 1550 #if defined(PETSC_USE_DEBUG) 1551 PetscBool ismatis; 1552 #endif 1553 PetscErrorCode ierr; 1554 1555 PetscFunctionBegin; 1556 #if defined(PETSC_USE_DEBUG) 1557 ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr); 1558 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS"); 1559 #endif 1560 x = (Mat_IS*)X->data; 1561 ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr); 1562 PetscFunctionReturn(0); 1563 } 1564 1565 #undef __FUNCT__ 1566 #define __FUNCT__ "MatGetLocalSubMatrix_IS" 1567 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat) 1568 { 1569 Mat lA; 1570 Mat_IS *matis; 1571 ISLocalToGlobalMapping rl2g,cl2g; 1572 IS is; 1573 const PetscInt *rg,*rl; 1574 PetscInt nrg; 1575 PetscInt N,M,nrl,i,*idxs; 1576 PetscErrorCode ierr; 1577 1578 PetscFunctionBegin; 1579 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1580 ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr); 1581 ierr = ISGetIndices(row,&rl);CHKERRQ(ierr); 1582 ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr); 1583 #if defined(PETSC_USE_DEBUG) 1584 for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg); 1585 #endif 1586 ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr); 1587 /* map from [0,nrl) to row */ 1588 for (i=0;i<nrl;i++) idxs[i] = rl[i]; 1589 #if defined(PETSC_USE_DEBUG) 1590 for (i=nrl;i<nrg;i++) idxs[i] = nrg; 1591 #else 1592 for (i=nrl;i<nrg;i++) idxs[i] = -1; 1593 #endif 1594 ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr); 1595 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1596 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1597 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 1598 ierr = ISDestroy(&is);CHKERRQ(ierr); 1599 /* compute new l2g map for columns */ 1600 if (col != row || A->rmap->mapping != A->cmap->mapping) { 1601 const PetscInt *cg,*cl; 1602 PetscInt ncg; 1603 PetscInt ncl; 1604 1605 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1606 ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr); 1607 ierr = ISGetIndices(col,&cl);CHKERRQ(ierr); 1608 ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr); 1609 #if defined(PETSC_USE_DEBUG) 1610 for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg); 1611 #endif 1612 ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr); 1613 /* map from [0,ncl) to col */ 1614 for (i=0;i<ncl;i++) idxs[i] = cl[i]; 1615 #if defined(PETSC_USE_DEBUG) 1616 for (i=ncl;i<ncg;i++) idxs[i] = ncg; 1617 #else 1618 for (i=ncl;i<ncg;i++) idxs[i] = -1; 1619 #endif 1620 ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr); 1621 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1622 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1623 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 1624 ierr = ISDestroy(&is);CHKERRQ(ierr); 1625 } else { 1626 ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr); 1627 cl2g = rl2g; 1628 } 1629 /* create the MATIS submatrix */ 1630 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1631 ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr); 1632 ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr); 1633 ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr); 1634 matis = (Mat_IS*)((*submat)->data); 1635 matis->islocalref = PETSC_TRUE; 1636 ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr); 1637 ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr); 1638 ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr); 1639 ierr = MatSetUp(*submat);CHKERRQ(ierr); 1640 ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1641 ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1642 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 1643 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 1644 /* remove unsupported ops */ 1645 ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1646 (*submat)->ops->destroy = MatDestroy_IS; 1647 (*submat)->ops->setvalueslocal = MatSetValuesLocal_SubMat_IS; 1648 (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS; 1649 (*submat)->ops->assemblybegin = MatAssemblyBegin_IS; 1650 (*submat)->ops->assemblyend = MatAssemblyEnd_IS; 1651 PetscFunctionReturn(0); 1652 } 1653 1654 #undef __FUNCT__ 1655 #define __FUNCT__ "MatCreateIS" 1656 /*@ 1657 MatCreateIS - Creates a "process" unassembled matrix, assembled on each 1658 process but not across processes. 1659 1660 Input Parameters: 1661 + comm - MPI communicator that will share the matrix 1662 . bs - block size of the matrix 1663 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 1664 . rmap - local to global map for rows 1665 - cmap - local to global map for cols 1666 1667 Output Parameter: 1668 . A - the resulting matrix 1669 1670 Level: advanced 1671 1672 Notes: See MATIS for more details. 1673 m and n are NOT related to the size of the map; they represent the size of the local parts of the vectors 1674 used in MatMult operations. The sizes of rmap and cmap define the size of the local matrices. 1675 If either rmap or cmap are NULL, then the matrix is assumed to be square. 1676 1677 .seealso: MATIS, MatSetLocalToGlobalMapping() 1678 @*/ 1679 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A) 1680 { 1681 PetscErrorCode ierr; 1682 1683 PetscFunctionBegin; 1684 if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mappings"); 1685 ierr = MatCreate(comm,A);CHKERRQ(ierr); 1686 if (bs > 0) { 1687 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 1688 } 1689 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1690 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 1691 if (rmap && cmap) { 1692 ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr); 1693 } else if (!rmap) { 1694 ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr); 1695 } else { 1696 ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr); 1697 } 1698 PetscFunctionReturn(0); 1699 } 1700 1701 /*MC 1702 MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP). 1703 This stores the matrices in globally unassembled form. Each processor 1704 assembles only its local Neumann problem and the parallel matrix vector 1705 product is handled "implicitly". 1706 1707 Operations Provided: 1708 + MatMult() 1709 . MatMultAdd() 1710 . MatMultTranspose() 1711 . MatMultTransposeAdd() 1712 . MatZeroEntries() 1713 . MatSetOption() 1714 . MatZeroRows() 1715 . MatSetValues() 1716 . MatSetValuesBlocked() 1717 . MatSetValuesLocal() 1718 . MatSetValuesBlockedLocal() 1719 . MatScale() 1720 . MatGetDiagonal() 1721 . MatMissingDiagonal() 1722 . MatDuplicate() 1723 . MatCopy() 1724 . MatAXPY() 1725 . MatGetSubMatrix() 1726 . MatGetLocalSubMatrix() 1727 . MatTranspose() 1728 - MatSetLocalToGlobalMapping() 1729 1730 Options Database Keys: 1731 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 1732 1733 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 1734 1735 You must call MatSetLocalToGlobalMapping() before using this matrix type. 1736 1737 You can do matrix preallocation on the local matrix after you obtain it with 1738 MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation() 1739 1740 Level: advanced 1741 1742 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP 1743 1744 M*/ 1745 1746 #undef __FUNCT__ 1747 #define __FUNCT__ "MatCreate_IS" 1748 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 1749 { 1750 PetscErrorCode ierr; 1751 Mat_IS *b; 1752 1753 PetscFunctionBegin; 1754 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 1755 A->data = (void*)b; 1756 1757 /* matrix ops */ 1758 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1759 A->ops->mult = MatMult_IS; 1760 A->ops->multadd = MatMultAdd_IS; 1761 A->ops->multtranspose = MatMultTranspose_IS; 1762 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1763 A->ops->destroy = MatDestroy_IS; 1764 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1765 A->ops->setvalues = MatSetValues_IS; 1766 A->ops->setvaluesblocked = MatSetValuesBlocked_IS; 1767 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1768 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1769 A->ops->zerorows = MatZeroRows_IS; 1770 A->ops->zerorowscolumns = MatZeroRowsColumns_IS; 1771 A->ops->assemblybegin = MatAssemblyBegin_IS; 1772 A->ops->assemblyend = MatAssemblyEnd_IS; 1773 A->ops->view = MatView_IS; 1774 A->ops->zeroentries = MatZeroEntries_IS; 1775 A->ops->scale = MatScale_IS; 1776 A->ops->getdiagonal = MatGetDiagonal_IS; 1777 A->ops->setoption = MatSetOption_IS; 1778 A->ops->ishermitian = MatIsHermitian_IS; 1779 A->ops->issymmetric = MatIsSymmetric_IS; 1780 A->ops->duplicate = MatDuplicate_IS; 1781 A->ops->missingdiagonal = MatMissingDiagonal_IS; 1782 A->ops->copy = MatCopy_IS; 1783 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS; 1784 A->ops->getsubmatrix = MatGetSubMatrix_IS; 1785 A->ops->axpy = MatAXPY_IS; 1786 A->ops->diagonalset = MatDiagonalSet_IS; 1787 A->ops->shift = MatShift_IS; 1788 A->ops->transpose = MatTranspose_IS; 1789 A->ops->getinfo = MatGetInfo_IS; 1790 1791 /* special MATIS functions */ 1792 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1793 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1794 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1795 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1796 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1797 PetscFunctionReturn(0); 1798 } 1799