1 /* 2 Creates a matrix class for using the Neumann-Neumann type preconditioners. 3 This stores the matrices in globally unassembled form. Each processor 4 assembles only its local Neumann problem and the parallel matrix vector 5 product is handled "implicitly". 6 7 Currently this allows for only one subdomain per processor. 8 */ 9 10 #include <petsc/private/matisimpl.h> /*I "petscmat.h" I*/ 11 #include <../src/mat/impls/aij/mpi/mpiaij.h> 12 #include <petsc/private/sfimpl.h> 13 #include <petsc/private/vecimpl.h> 14 #include <petsc/private/hashseti.h> 15 16 #define MATIS_MAX_ENTRIES_INSERTION 2048 17 static PetscErrorCode MatSetValuesLocal_IS(Mat, PetscInt, const PetscInt *, PetscInt, const PetscInt *, const PetscScalar *, InsertMode); 18 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat, PetscInt, const PetscInt *, PetscInt, const PetscInt *, const PetscScalar *, InsertMode); 19 static PetscErrorCode MatISSetUpScatters_Private(Mat); 20 21 static PetscErrorCode MatISContainerDestroyPtAP_Private(void **ptr) 22 { 23 MatISPtAP ptap = (MatISPtAP)*ptr; 24 25 PetscFunctionBegin; 26 PetscCall(MatDestroySubMatrices(ptap->ris1 ? 2 : 1, &ptap->lP)); 27 PetscCall(ISDestroy(&ptap->cis0)); 28 PetscCall(ISDestroy(&ptap->cis1)); 29 PetscCall(ISDestroy(&ptap->ris0)); 30 PetscCall(ISDestroy(&ptap->ris1)); 31 PetscCall(PetscFree(ptap)); 32 PetscFunctionReturn(PETSC_SUCCESS); 33 } 34 35 static PetscErrorCode MatPtAPNumeric_IS_XAIJ(Mat A, Mat P, Mat C) 36 { 37 MatISPtAP ptap; 38 Mat_IS *matis = (Mat_IS *)A->data; 39 Mat lA, lC; 40 MatReuse reuse; 41 IS ris[2], cis[2]; 42 PetscContainer c; 43 PetscInt n; 44 45 PetscFunctionBegin; 46 PetscCall(PetscObjectQuery((PetscObject)C, "_MatIS_PtAP", (PetscObject *)&c)); 47 PetscCheck(c, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Missing PtAP information"); 48 PetscCall(PetscContainerGetPointer(c, (void **)&ptap)); 49 ris[0] = ptap->ris0; 50 ris[1] = ptap->ris1; 51 cis[0] = ptap->cis0; 52 cis[1] = ptap->cis1; 53 n = ptap->ris1 ? 2 : 1; 54 reuse = ptap->lP ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX; 55 PetscCall(MatCreateSubMatrices(P, n, ris, cis, reuse, &ptap->lP)); 56 57 PetscCall(MatISGetLocalMat(A, &lA)); 58 PetscCall(MatISGetLocalMat(C, &lC)); 59 if (ptap->ris1) { /* unsymmetric A mapping */ 60 Mat lPt; 61 62 PetscCall(MatTranspose(ptap->lP[1], MAT_INITIAL_MATRIX, &lPt)); 63 PetscCall(MatMatMatMult(lPt, lA, ptap->lP[0], reuse, ptap->fill, &lC)); 64 if (matis->storel2l) PetscCall(PetscObjectCompose((PetscObject)A, "_MatIS_PtAP_l2l", (PetscObject)lPt)); 65 PetscCall(MatDestroy(&lPt)); 66 } else { 67 PetscCall(MatPtAP(lA, ptap->lP[0], reuse, ptap->fill, &lC)); 68 if (matis->storel2l) PetscCall(PetscObjectCompose((PetscObject)C, "_MatIS_PtAP_l2l", (PetscObject)ptap->lP[0])); 69 } 70 if (reuse == MAT_INITIAL_MATRIX) { 71 PetscCall(MatISSetLocalMat(C, lC)); 72 PetscCall(MatDestroy(&lC)); 73 } 74 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 75 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 76 PetscFunctionReturn(PETSC_SUCCESS); 77 } 78 79 static PetscErrorCode MatGetNonzeroColumnsLocal_Private(Mat PT, IS *cis) 80 { 81 Mat Po, Pd; 82 IS zd, zo; 83 const PetscInt *garray; 84 PetscInt *aux, i, bs; 85 PetscInt dc, stc, oc, ctd, cto; 86 PetscBool ismpiaij, ismpibaij, isseqaij, isseqbaij; 87 MPI_Comm comm; 88 89 PetscFunctionBegin; 90 PetscValidHeaderSpecific(PT, MAT_CLASSID, 1); 91 PetscAssertPointer(cis, 2); 92 PetscCall(PetscObjectGetComm((PetscObject)PT, &comm)); 93 bs = 1; 94 PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATMPIAIJ, &ismpiaij)); 95 PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATMPIBAIJ, &ismpibaij)); 96 PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATSEQAIJ, &isseqaij)); 97 PetscCall(PetscObjectTypeCompare((PetscObject)PT, MATSEQBAIJ, &isseqbaij)); 98 if (isseqaij || isseqbaij) { 99 Pd = PT; 100 Po = NULL; 101 garray = NULL; 102 } else if (ismpiaij) { 103 PetscCall(MatMPIAIJGetSeqAIJ(PT, &Pd, &Po, &garray)); 104 } else if (ismpibaij) { 105 PetscCall(MatMPIBAIJGetSeqBAIJ(PT, &Pd, &Po, &garray)); 106 PetscCall(MatGetBlockSize(PT, &bs)); 107 } else SETERRQ(comm, PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)PT)->type_name); 108 109 /* identify any null columns in Pd or Po */ 110 /* We use a tolerance comparison since it may happen that, with geometric multigrid, 111 some of the columns are not really zero, but very close to */ 112 zo = zd = NULL; 113 if (Po) PetscCall(MatFindNonzeroRowsOrCols_Basic(Po, PETSC_TRUE, PETSC_SMALL, &zo)); 114 PetscCall(MatFindNonzeroRowsOrCols_Basic(Pd, PETSC_TRUE, PETSC_SMALL, &zd)); 115 116 PetscCall(MatGetLocalSize(PT, NULL, &dc)); 117 PetscCall(MatGetOwnershipRangeColumn(PT, &stc, NULL)); 118 if (Po) PetscCall(MatGetLocalSize(Po, NULL, &oc)); 119 else oc = 0; 120 PetscCall(PetscMalloc1((dc + oc) / bs, &aux)); 121 if (zd) { 122 const PetscInt *idxs; 123 PetscInt nz; 124 125 /* this will throw an error if bs is not valid */ 126 PetscCall(ISSetBlockSize(zd, bs)); 127 PetscCall(ISGetLocalSize(zd, &nz)); 128 PetscCall(ISGetIndices(zd, &idxs)); 129 ctd = nz / bs; 130 for (i = 0; i < ctd; i++) aux[i] = (idxs[bs * i] + stc) / bs; 131 PetscCall(ISRestoreIndices(zd, &idxs)); 132 } else { 133 ctd = dc / bs; 134 for (i = 0; i < ctd; i++) aux[i] = i + stc / bs; 135 } 136 if (zo) { 137 const PetscInt *idxs; 138 PetscInt nz; 139 140 /* this will throw an error if bs is not valid */ 141 PetscCall(ISSetBlockSize(zo, bs)); 142 PetscCall(ISGetLocalSize(zo, &nz)); 143 PetscCall(ISGetIndices(zo, &idxs)); 144 cto = nz / bs; 145 for (i = 0; i < cto; i++) aux[i + ctd] = garray[idxs[bs * i] / bs]; 146 PetscCall(ISRestoreIndices(zo, &idxs)); 147 } else { 148 cto = oc / bs; 149 for (i = 0; i < cto; i++) aux[i + ctd] = garray[i]; 150 } 151 PetscCall(ISCreateBlock(comm, bs, ctd + cto, aux, PETSC_OWN_POINTER, cis)); 152 PetscCall(ISDestroy(&zd)); 153 PetscCall(ISDestroy(&zo)); 154 PetscFunctionReturn(PETSC_SUCCESS); 155 } 156 157 static PetscErrorCode MatPtAPSymbolic_IS_XAIJ(Mat A, Mat P, PetscReal fill, Mat C) 158 { 159 Mat PT, lA; 160 MatISPtAP ptap; 161 ISLocalToGlobalMapping Crl2g, Ccl2g, rl2g, cl2g; 162 PetscContainer c; 163 MatType lmtype; 164 const PetscInt *garray; 165 PetscInt ibs, N, dc; 166 MPI_Comm comm; 167 168 PetscFunctionBegin; 169 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 170 PetscCall(MatSetType(C, MATIS)); 171 PetscCall(MatISGetLocalMat(A, &lA)); 172 PetscCall(MatGetType(lA, &lmtype)); 173 PetscCall(MatISSetLocalMatType(C, lmtype)); 174 PetscCall(MatGetSize(P, NULL, &N)); 175 PetscCall(MatGetLocalSize(P, NULL, &dc)); 176 PetscCall(MatSetSizes(C, dc, dc, N, N)); 177 /* Not sure about this 178 PetscCall(MatGetBlockSizes(P,NULL,&ibs)); 179 PetscCall(MatSetBlockSize(*C,ibs)); 180 */ 181 182 PetscCall(PetscNew(&ptap)); 183 PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &c)); 184 PetscCall(PetscContainerSetPointer(c, ptap)); 185 PetscCall(PetscContainerSetCtxDestroy(c, MatISContainerDestroyPtAP_Private)); 186 PetscCall(PetscObjectCompose((PetscObject)C, "_MatIS_PtAP", (PetscObject)c)); 187 PetscCall(PetscContainerDestroy(&c)); 188 ptap->fill = fill; 189 190 PetscCall(MatISGetLocalToGlobalMapping(A, &rl2g, &cl2g)); 191 192 PetscCall(ISLocalToGlobalMappingGetBlockSize(cl2g, &ibs)); 193 PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &N)); 194 PetscCall(ISLocalToGlobalMappingGetBlockIndices(cl2g, &garray)); 195 PetscCall(ISCreateBlock(comm, ibs, N / ibs, garray, PETSC_COPY_VALUES, &ptap->ris0)); 196 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(cl2g, &garray)); 197 198 PetscCall(MatCreateSubMatrix(P, ptap->ris0, NULL, MAT_INITIAL_MATRIX, &PT)); 199 PetscCall(MatGetNonzeroColumnsLocal_Private(PT, &ptap->cis0)); 200 PetscCall(ISLocalToGlobalMappingCreateIS(ptap->cis0, &Ccl2g)); 201 PetscCall(MatDestroy(&PT)); 202 203 Crl2g = NULL; 204 if (rl2g != cl2g) { /* unsymmetric A mapping */ 205 PetscBool same, lsame = PETSC_FALSE; 206 PetscInt N1, ibs1; 207 208 PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &N1)); 209 PetscCall(ISLocalToGlobalMappingGetBlockSize(rl2g, &ibs1)); 210 PetscCall(ISLocalToGlobalMappingGetBlockIndices(rl2g, &garray)); 211 PetscCall(ISCreateBlock(comm, ibs, N1 / ibs, garray, PETSC_COPY_VALUES, &ptap->ris1)); 212 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(rl2g, &garray)); 213 if (ibs1 == ibs && N1 == N) { /* check if the l2gmaps are the same */ 214 const PetscInt *i1, *i2; 215 216 PetscCall(ISBlockGetIndices(ptap->ris0, &i1)); 217 PetscCall(ISBlockGetIndices(ptap->ris1, &i2)); 218 PetscCall(PetscArraycmp(i1, i2, N, &lsame)); 219 } 220 PetscCallMPI(MPIU_Allreduce(&lsame, &same, 1, MPIU_BOOL, MPI_LAND, comm)); 221 if (same) { 222 PetscCall(ISDestroy(&ptap->ris1)); 223 } else { 224 PetscCall(MatCreateSubMatrix(P, ptap->ris1, NULL, MAT_INITIAL_MATRIX, &PT)); 225 PetscCall(MatGetNonzeroColumnsLocal_Private(PT, &ptap->cis1)); 226 PetscCall(ISLocalToGlobalMappingCreateIS(ptap->cis1, &Crl2g)); 227 PetscCall(MatDestroy(&PT)); 228 } 229 } 230 /* Not sure about this 231 if (!Crl2g) { 232 PetscCall(MatGetBlockSize(C,&ibs)); 233 PetscCall(ISLocalToGlobalMappingSetBlockSize(Ccl2g,ibs)); 234 } 235 */ 236 PetscCall(MatSetLocalToGlobalMapping(C, Crl2g ? Crl2g : Ccl2g, Ccl2g)); 237 PetscCall(ISLocalToGlobalMappingDestroy(&Crl2g)); 238 PetscCall(ISLocalToGlobalMappingDestroy(&Ccl2g)); 239 240 C->ops->ptapnumeric = MatPtAPNumeric_IS_XAIJ; 241 PetscFunctionReturn(PETSC_SUCCESS); 242 } 243 244 static PetscErrorCode MatProductSymbolic_PtAP_IS_XAIJ(Mat C) 245 { 246 Mat_Product *product = C->product; 247 Mat A = product->A, P = product->B; 248 PetscReal fill = product->fill; 249 250 PetscFunctionBegin; 251 PetscCall(MatPtAPSymbolic_IS_XAIJ(A, P, fill, C)); 252 C->ops->productnumeric = MatProductNumeric_PtAP; 253 PetscFunctionReturn(PETSC_SUCCESS); 254 } 255 256 static PetscErrorCode MatProductSetFromOptions_IS_XAIJ_PtAP(Mat C) 257 { 258 PetscFunctionBegin; 259 C->ops->productsymbolic = MatProductSymbolic_PtAP_IS_XAIJ; 260 PetscFunctionReturn(PETSC_SUCCESS); 261 } 262 263 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_IS_XAIJ(Mat C) 264 { 265 Mat_Product *product = C->product; 266 267 PetscFunctionBegin; 268 if (product->type == MATPRODUCT_PtAP) PetscCall(MatProductSetFromOptions_IS_XAIJ_PtAP(C)); 269 PetscFunctionReturn(PETSC_SUCCESS); 270 } 271 272 static PetscErrorCode MatISContainerDestroyFields_Private(void **ptr) 273 { 274 MatISLocalFields lf = (MatISLocalFields)*ptr; 275 PetscInt i; 276 277 PetscFunctionBegin; 278 for (i = 0; i < lf->nr; i++) PetscCall(ISDestroy(&lf->rf[i])); 279 for (i = 0; i < lf->nc; i++) PetscCall(ISDestroy(&lf->cf[i])); 280 PetscCall(PetscFree2(lf->rf, lf->cf)); 281 PetscCall(PetscFree(lf)); 282 PetscFunctionReturn(PETSC_SUCCESS); 283 } 284 285 static PetscErrorCode MatConvert_SeqXAIJ_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat) 286 { 287 Mat B, lB; 288 289 PetscFunctionBegin; 290 if (reuse != MAT_REUSE_MATRIX) { 291 ISLocalToGlobalMapping rl2g, cl2g; 292 PetscInt bs; 293 IS is; 294 295 PetscCall(MatGetBlockSize(A, &bs)); 296 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->rmap->n / bs, 0, 1, &is)); 297 if (bs > 1) { 298 IS is2; 299 PetscInt i, *aux; 300 301 PetscCall(ISGetLocalSize(is, &i)); 302 PetscCall(ISGetIndices(is, (const PetscInt **)&aux)); 303 PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, i, aux, PETSC_COPY_VALUES, &is2)); 304 PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux)); 305 PetscCall(ISDestroy(&is)); 306 is = is2; 307 } 308 PetscCall(ISSetIdentity(is)); 309 PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g)); 310 PetscCall(ISDestroy(&is)); 311 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->cmap->n / bs, 0, 1, &is)); 312 if (bs > 1) { 313 IS is2; 314 PetscInt i, *aux; 315 316 PetscCall(ISGetLocalSize(is, &i)); 317 PetscCall(ISGetIndices(is, (const PetscInt **)&aux)); 318 PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, i, aux, PETSC_COPY_VALUES, &is2)); 319 PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux)); 320 PetscCall(ISDestroy(&is)); 321 is = is2; 322 } 323 PetscCall(ISSetIdentity(is)); 324 PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g)); 325 PetscCall(ISDestroy(&is)); 326 PetscCall(MatCreateIS(PetscObjectComm((PetscObject)A), bs, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N, rl2g, cl2g, &B)); 327 PetscCall(ISLocalToGlobalMappingDestroy(&rl2g)); 328 PetscCall(ISLocalToGlobalMappingDestroy(&cl2g)); 329 PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &lB)); 330 if (reuse == MAT_INITIAL_MATRIX) *newmat = B; 331 } else { 332 B = *newmat; 333 PetscCall(PetscObjectReference((PetscObject)A)); 334 lB = A; 335 } 336 PetscCall(MatISSetLocalMat(B, lB)); 337 PetscCall(MatDestroy(&lB)); 338 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 339 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 340 if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &B)); 341 PetscFunctionReturn(PETSC_SUCCESS); 342 } 343 344 static PetscErrorCode MatISScaleDisassembling_Private(Mat A) 345 { 346 Mat_IS *matis = (Mat_IS *)A->data; 347 PetscScalar *aa; 348 const PetscInt *ii, *jj; 349 PetscInt i, n, m; 350 PetscInt *ecount, **eneighs; 351 PetscBool flg; 352 353 PetscFunctionBegin; 354 PetscCall(MatGetRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg)); 355 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure"); 356 PetscCall(ISLocalToGlobalMappingGetNodeInfo(matis->rmapping, &n, &ecount, &eneighs)); 357 PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %" PetscInt_FMT " != %" PetscInt_FMT, m, n); 358 PetscCall(MatSeqAIJGetArray(matis->A, &aa)); 359 for (i = 0; i < n; i++) { 360 if (ecount[i] > 1) { 361 PetscInt j; 362 363 for (j = ii[i]; j < ii[i + 1]; j++) { 364 PetscInt i2 = jj[j], p, p2; 365 PetscReal scal = 0.0; 366 367 for (p = 0; p < ecount[i]; p++) { 368 for (p2 = 0; p2 < ecount[i2]; p2++) { 369 if (eneighs[i][p] == eneighs[i2][p2]) { 370 scal += 1.0; 371 break; 372 } 373 } 374 } 375 if (scal) aa[j] /= scal; 376 } 377 } 378 } 379 PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(matis->rmapping, &n, &ecount, &eneighs)); 380 PetscCall(MatSeqAIJRestoreArray(matis->A, &aa)); 381 PetscCall(MatRestoreRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg)); 382 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure"); 383 PetscFunctionReturn(PETSC_SUCCESS); 384 } 385 386 typedef enum { 387 MAT_IS_DISASSEMBLE_L2G_NATURAL, 388 MAT_IS_DISASSEMBLE_L2G_MAT, 389 MAT_IS_DISASSEMBLE_L2G_ND 390 } MatISDisassemblel2gType; 391 392 static PetscErrorCode MatMPIXAIJComputeLocalToGlobalMapping_Private(Mat A, ISLocalToGlobalMapping *l2g) 393 { 394 Mat Ad, Ao; 395 IS is, ndmap, ndsub; 396 MPI_Comm comm; 397 const PetscInt *garray, *ndmapi; 398 PetscInt bs, i, cnt, nl, *ncount, *ndmapc; 399 PetscBool ismpiaij, ismpibaij; 400 const char *const MatISDisassemblel2gTypes[] = {"NATURAL", "MAT", "ND", "MatISDisassemblel2gType", "MAT_IS_DISASSEMBLE_L2G_", NULL}; 401 MatISDisassemblel2gType mode = MAT_IS_DISASSEMBLE_L2G_NATURAL; 402 MatPartitioning part; 403 PetscSF sf; 404 PetscObject dm; 405 406 PetscFunctionBegin; 407 PetscOptionsBegin(PetscObjectComm((PetscObject)A), ((PetscObject)A)->prefix, "MatIS l2g disassembling options", "Mat"); 408 PetscCall(PetscOptionsEnum("-mat_is_disassemble_l2g_type", "Type of local-to-global mapping to be used for disassembling", "MatISDisassemblel2gType", MatISDisassemblel2gTypes, (PetscEnum)mode, (PetscEnum *)&mode, NULL)); 409 PetscOptionsEnd(); 410 if (mode == MAT_IS_DISASSEMBLE_L2G_MAT) { 411 PetscCall(MatGetLocalToGlobalMapping(A, l2g, NULL)); 412 PetscFunctionReturn(PETSC_SUCCESS); 413 } 414 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 415 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij)); 416 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &ismpibaij)); 417 PetscCall(MatGetBlockSize(A, &bs)); 418 switch (mode) { 419 case MAT_IS_DISASSEMBLE_L2G_ND: 420 PetscCall(MatPartitioningCreate(comm, &part)); 421 PetscCall(MatPartitioningSetAdjacency(part, A)); 422 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part, ((PetscObject)A)->prefix)); 423 PetscCall(MatPartitioningSetFromOptions(part)); 424 PetscCall(MatPartitioningApplyND(part, &ndmap)); 425 PetscCall(MatPartitioningDestroy(&part)); 426 PetscCall(ISBuildTwoSided(ndmap, NULL, &ndsub)); 427 PetscCall(MatMPIAIJSetUseScalableIncreaseOverlap(A, PETSC_TRUE)); 428 PetscCall(MatIncreaseOverlap(A, 1, &ndsub, 1)); 429 PetscCall(ISLocalToGlobalMappingCreateIS(ndsub, l2g)); 430 431 /* it may happen that a separator node is not properly shared */ 432 PetscCall(ISLocalToGlobalMappingGetNodeInfo(*l2g, &nl, &ncount, NULL)); 433 PetscCall(PetscSFCreate(comm, &sf)); 434 PetscCall(ISLocalToGlobalMappingGetIndices(*l2g, &garray)); 435 PetscCall(PetscSFSetGraphLayout(sf, A->rmap, nl, NULL, PETSC_OWN_POINTER, garray)); 436 PetscCall(ISLocalToGlobalMappingRestoreIndices(*l2g, &garray)); 437 PetscCall(PetscCalloc1(A->rmap->n, &ndmapc)); 438 PetscCall(PetscSFReduceBegin(sf, MPIU_INT, ncount, ndmapc, MPI_REPLACE)); 439 PetscCall(PetscSFReduceEnd(sf, MPIU_INT, ncount, ndmapc, MPI_REPLACE)); 440 PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(*l2g, NULL, &ncount, NULL)); 441 PetscCall(ISGetIndices(ndmap, &ndmapi)); 442 for (i = 0, cnt = 0; i < A->rmap->n; i++) 443 if (ndmapi[i] < 0 && ndmapc[i] < 2) cnt++; 444 445 PetscCallMPI(MPIU_Allreduce(&cnt, &i, 1, MPIU_INT, MPI_MAX, comm)); 446 if (i) { /* we detected isolated separator nodes */ 447 Mat A2, A3; 448 IS *workis, is2; 449 PetscScalar *vals; 450 PetscInt gcnt = i, *dnz, *onz, j, *lndmapi; 451 ISLocalToGlobalMapping ll2g; 452 PetscBool flg; 453 const PetscInt *ii, *jj; 454 455 /* communicate global id of separators */ 456 MatPreallocateBegin(comm, A->rmap->n, A->cmap->n, dnz, onz); 457 for (i = 0, cnt = 0; i < A->rmap->n; i++) dnz[i] = ndmapi[i] < 0 ? i + A->rmap->rstart : -1; 458 459 PetscCall(PetscMalloc1(nl, &lndmapi)); 460 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, dnz, lndmapi, MPI_REPLACE)); 461 462 /* compute adjacency of isolated separators node */ 463 PetscCall(PetscMalloc1(gcnt, &workis)); 464 for (i = 0, cnt = 0; i < A->rmap->n; i++) { 465 if (ndmapi[i] < 0 && ndmapc[i] < 2) PetscCall(ISCreateStride(comm, 1, i + A->rmap->rstart, 1, &workis[cnt++])); 466 } 467 for (i = cnt; i < gcnt; i++) PetscCall(ISCreateStride(comm, 0, 0, 1, &workis[i])); 468 for (i = 0; i < gcnt; i++) { 469 PetscCall(PetscObjectSetName((PetscObject)workis[i], "ISOLATED")); 470 PetscCall(ISViewFromOptions(workis[i], NULL, "-view_isolated_separators")); 471 } 472 473 /* no communications since all the ISes correspond to locally owned rows */ 474 PetscCall(MatIncreaseOverlap(A, gcnt, workis, 1)); 475 476 /* end communicate global id of separators */ 477 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, dnz, lndmapi, MPI_REPLACE)); 478 479 /* communicate new layers : create a matrix and transpose it */ 480 PetscCall(PetscArrayzero(dnz, A->rmap->n)); 481 PetscCall(PetscArrayzero(onz, A->rmap->n)); 482 for (i = 0, j = 0; i < A->rmap->n; i++) { 483 if (ndmapi[i] < 0 && ndmapc[i] < 2) { 484 const PetscInt *idxs; 485 PetscInt s; 486 487 PetscCall(ISGetLocalSize(workis[j], &s)); 488 PetscCall(ISGetIndices(workis[j], &idxs)); 489 PetscCall(MatPreallocateSet(i + A->rmap->rstart, s, idxs, dnz, onz)); 490 j++; 491 } 492 } 493 PetscCheck(j == cnt, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected local count %" PetscInt_FMT " != %" PetscInt_FMT, j, cnt); 494 495 for (i = 0; i < gcnt; i++) { 496 PetscCall(PetscObjectSetName((PetscObject)workis[i], "EXTENDED")); 497 PetscCall(ISViewFromOptions(workis[i], NULL, "-view_isolated_separators")); 498 } 499 500 for (i = 0, j = 0; i < A->rmap->n; i++) j = PetscMax(j, dnz[i] + onz[i]); 501 PetscCall(PetscMalloc1(j, &vals)); 502 for (i = 0; i < j; i++) vals[i] = 1.0; 503 504 PetscCall(MatCreate(comm, &A2)); 505 PetscCall(MatSetType(A2, MATMPIAIJ)); 506 PetscCall(MatSetSizes(A2, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N)); 507 PetscCall(MatMPIAIJSetPreallocation(A2, 0, dnz, 0, onz)); 508 PetscCall(MatSetOption(A2, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 509 for (i = 0, j = 0; i < A2->rmap->n; i++) { 510 PetscInt row = i + A2->rmap->rstart, s = dnz[i] + onz[i]; 511 const PetscInt *idxs; 512 513 if (s) { 514 PetscCall(ISGetIndices(workis[j], &idxs)); 515 PetscCall(MatSetValues(A2, 1, &row, s, idxs, vals, INSERT_VALUES)); 516 PetscCall(ISRestoreIndices(workis[j], &idxs)); 517 j++; 518 } 519 } 520 PetscCheck(j == cnt, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected local count %" PetscInt_FMT " != %" PetscInt_FMT, j, cnt); 521 PetscCall(PetscFree(vals)); 522 PetscCall(MatAssemblyBegin(A2, MAT_FINAL_ASSEMBLY)); 523 PetscCall(MatAssemblyEnd(A2, MAT_FINAL_ASSEMBLY)); 524 PetscCall(MatTranspose(A2, MAT_INPLACE_MATRIX, &A2)); 525 526 /* extract submatrix corresponding to the coupling "owned separators" x "isolated separators" */ 527 for (i = 0, j = 0; i < nl; i++) 528 if (lndmapi[i] >= 0) lndmapi[j++] = lndmapi[i]; 529 PetscCall(ISCreateGeneral(comm, j, lndmapi, PETSC_USE_POINTER, &is)); 530 PetscCall(MatMPIAIJGetLocalMatCondensed(A2, MAT_INITIAL_MATRIX, &is, NULL, &A3)); 531 PetscCall(ISDestroy(&is)); 532 PetscCall(MatDestroy(&A2)); 533 534 /* extend local to global map to include connected isolated separators */ 535 PetscCall(PetscObjectQuery((PetscObject)A3, "_petsc_GetLocalMatCondensed_iscol", (PetscObject *)&is)); 536 PetscCheck(is, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing column map"); 537 PetscCall(ISLocalToGlobalMappingCreateIS(is, &ll2g)); 538 PetscCall(MatGetRowIJ(A3, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &flg)); 539 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure"); 540 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ii[i], jj, PETSC_COPY_VALUES, &is)); 541 PetscCall(MatRestoreRowIJ(A3, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &flg)); 542 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure"); 543 PetscCall(ISLocalToGlobalMappingApplyIS(ll2g, is, &is2)); 544 PetscCall(ISDestroy(&is)); 545 PetscCall(ISLocalToGlobalMappingDestroy(&ll2g)); 546 547 /* add new nodes to the local-to-global map */ 548 PetscCall(ISLocalToGlobalMappingDestroy(l2g)); 549 PetscCall(ISExpand(ndsub, is2, &is)); 550 PetscCall(ISDestroy(&is2)); 551 PetscCall(ISLocalToGlobalMappingCreateIS(is, l2g)); 552 PetscCall(ISDestroy(&is)); 553 554 PetscCall(MatDestroy(&A3)); 555 PetscCall(PetscFree(lndmapi)); 556 MatPreallocateEnd(dnz, onz); 557 for (i = 0; i < gcnt; i++) PetscCall(ISDestroy(&workis[i])); 558 PetscCall(PetscFree(workis)); 559 } 560 PetscCall(ISRestoreIndices(ndmap, &ndmapi)); 561 PetscCall(PetscSFDestroy(&sf)); 562 PetscCall(PetscFree(ndmapc)); 563 PetscCall(ISDestroy(&ndmap)); 564 PetscCall(ISDestroy(&ndsub)); 565 PetscCall(ISLocalToGlobalMappingSetBlockSize(*l2g, bs)); 566 PetscCall(ISLocalToGlobalMappingViewFromOptions(*l2g, NULL, "-mat_is_nd_l2g_view")); 567 break; 568 case MAT_IS_DISASSEMBLE_L2G_NATURAL: 569 PetscCall(PetscObjectQuery((PetscObject)A, "__PETSc_dm", &dm)); 570 if (dm) { /* if a matrix comes from a DM, most likely we can use the l2gmap if any */ 571 PetscCall(MatGetLocalToGlobalMapping(A, l2g, NULL)); 572 PetscCall(PetscObjectReference((PetscObject)*l2g)); 573 if (*l2g) PetscFunctionReturn(PETSC_SUCCESS); 574 } 575 if (ismpiaij) { 576 PetscCall(MatMPIAIJGetSeqAIJ(A, &Ad, &Ao, &garray)); 577 } else if (ismpibaij) { 578 PetscCall(MatMPIBAIJGetSeqBAIJ(A, &Ad, &Ao, &garray)); 579 } else SETERRQ(comm, PETSC_ERR_SUP, "Type %s", ((PetscObject)A)->type_name); 580 if (A->rmap->n) { 581 PetscInt dc, oc, stc, *aux; 582 583 PetscCall(MatGetLocalSize(Ad, NULL, &dc)); 584 PetscCall(MatGetLocalSize(Ao, NULL, &oc)); 585 PetscCheck(!oc || garray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "garray not present"); 586 PetscCall(MatGetOwnershipRangeColumn(A, &stc, NULL)); 587 PetscCall(PetscMalloc1((dc + oc) / bs, &aux)); 588 for (i = 0; i < dc / bs; i++) aux[i] = i + stc / bs; 589 for (i = 0; i < oc / bs; i++) aux[i + dc / bs] = (ismpiaij ? garray[i * bs] / bs : garray[i]); 590 PetscCall(ISCreateBlock(comm, bs, (dc + oc) / bs, aux, PETSC_OWN_POINTER, &is)); 591 } else { 592 PetscCall(ISCreateBlock(comm, 1, 0, NULL, PETSC_OWN_POINTER, &is)); 593 } 594 PetscCall(ISLocalToGlobalMappingCreateIS(is, l2g)); 595 PetscCall(ISDestroy(&is)); 596 break; 597 default: 598 SETERRQ(comm, PETSC_ERR_ARG_WRONG, "Unsupported l2g disassembling type %d", mode); 599 } 600 PetscFunctionReturn(PETSC_SUCCESS); 601 } 602 603 PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat) 604 { 605 Mat lA, Ad, Ao, B = NULL; 606 ISLocalToGlobalMapping rl2g, cl2g; 607 IS is; 608 MPI_Comm comm; 609 void *ptrs[2]; 610 const char *names[2] = {"_convert_csr_aux", "_convert_csr_data"}; 611 const PetscInt *garray; 612 PetscScalar *dd, *od, *aa, *data; 613 const PetscInt *di, *dj, *oi, *oj; 614 const PetscInt *odi, *odj, *ooi, *ooj; 615 PetscInt *aux, *ii, *jj; 616 PetscInt rbs, cbs, lc, dr, dc, oc, str, stc, nnz, i, jd, jo, cum; 617 PetscBool flg, ismpiaij, ismpibaij, was_inplace = PETSC_FALSE, cong; 618 PetscMPIInt size; 619 620 PetscFunctionBegin; 621 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 622 PetscCallMPI(MPI_Comm_size(comm, &size)); 623 if (size == 1) { 624 PetscCall(MatConvert_SeqXAIJ_IS(A, type, reuse, newmat)); 625 PetscFunctionReturn(PETSC_SUCCESS); 626 } 627 PetscCall(MatGetBlockSizes(A, &rbs, &cbs)); 628 PetscCall(MatHasCongruentLayouts(A, &cong)); 629 if (reuse != MAT_REUSE_MATRIX && cong && rbs == cbs) { 630 PetscCall(MatMPIXAIJComputeLocalToGlobalMapping_Private(A, &rl2g)); 631 PetscCall(MatCreate(comm, &B)); 632 PetscCall(MatSetType(B, MATIS)); 633 PetscCall(MatSetSizes(B, A->rmap->n, A->rmap->n, A->rmap->N, A->rmap->N)); 634 PetscCall(MatSetLocalToGlobalMapping(B, rl2g, rl2g)); 635 PetscCall(MatSetBlockSizes(B, rbs, rbs)); 636 PetscCall(ISLocalToGlobalMappingDestroy(&rl2g)); 637 if (reuse == MAT_INPLACE_MATRIX) was_inplace = PETSC_TRUE; 638 reuse = MAT_REUSE_MATRIX; 639 } 640 if (reuse == MAT_REUSE_MATRIX) { 641 Mat *newlA, lA; 642 IS rows, cols; 643 const PetscInt *ridx, *cidx; 644 PetscInt nr, nc; 645 646 if (!B) B = *newmat; 647 PetscCall(MatISGetLocalToGlobalMapping(B, &rl2g, &cl2g)); 648 PetscCall(ISLocalToGlobalMappingGetBlockIndices(rl2g, &ridx)); 649 PetscCall(ISLocalToGlobalMappingGetBlockIndices(cl2g, &cidx)); 650 PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &nr)); 651 PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &nc)); 652 PetscCall(ISLocalToGlobalMappingGetBlockSize(rl2g, &rbs)); 653 PetscCall(ISLocalToGlobalMappingGetBlockSize(cl2g, &cbs)); 654 PetscCall(ISCreateBlock(comm, rbs, nr / rbs, ridx, PETSC_USE_POINTER, &rows)); 655 if (rl2g != cl2g) { 656 PetscCall(ISCreateBlock(comm, cbs, nc / cbs, cidx, PETSC_USE_POINTER, &cols)); 657 } else { 658 PetscCall(PetscObjectReference((PetscObject)rows)); 659 cols = rows; 660 } 661 PetscCall(MatISGetLocalMat(B, &lA)); 662 PetscCall(MatCreateSubMatrices(A, 1, &rows, &cols, MAT_INITIAL_MATRIX, &newlA)); 663 PetscCall(MatConvert(newlA[0], MATSEQAIJ, MAT_INPLACE_MATRIX, &newlA[0])); 664 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(rl2g, &ridx)); 665 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(cl2g, &cidx)); 666 PetscCall(ISDestroy(&rows)); 667 PetscCall(ISDestroy(&cols)); 668 if (!lA->preallocated) { /* first time */ 669 PetscCall(MatDuplicate(newlA[0], MAT_COPY_VALUES, &lA)); 670 PetscCall(MatISSetLocalMat(B, lA)); 671 PetscCall(PetscObjectDereference((PetscObject)lA)); 672 } 673 PetscCall(MatCopy(newlA[0], lA, SAME_NONZERO_PATTERN)); 674 PetscCall(MatDestroySubMatrices(1, &newlA)); 675 PetscCall(MatISScaleDisassembling_Private(B)); 676 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 677 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 678 if (was_inplace) PetscCall(MatHeaderReplace(A, &B)); 679 else *newmat = B; 680 PetscFunctionReturn(PETSC_SUCCESS); 681 } 682 /* general case, just compress out the column space */ 683 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij)); 684 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &ismpibaij)); 685 if (ismpiaij) { 686 cbs = 1; /* We cannot guarantee the off-process matrix will respect the column block size */ 687 PetscCall(MatMPIAIJGetSeqAIJ(A, &Ad, &Ao, &garray)); 688 } else if (ismpibaij) { 689 PetscCall(MatMPIBAIJGetSeqBAIJ(A, &Ad, &Ao, &garray)); 690 PetscCall(MatConvert(Ad, MATSEQAIJ, MAT_INITIAL_MATRIX, &Ad)); 691 PetscCall(MatConvert(Ao, MATSEQAIJ, MAT_INITIAL_MATRIX, &Ao)); 692 } else SETERRQ(comm, PETSC_ERR_SUP, "Type %s", ((PetscObject)A)->type_name); 693 PetscCall(MatSeqAIJGetArray(Ad, &dd)); 694 PetscCall(MatSeqAIJGetArray(Ao, &od)); 695 696 /* access relevant information from MPIAIJ */ 697 PetscCall(MatGetOwnershipRange(A, &str, NULL)); 698 PetscCall(MatGetOwnershipRangeColumn(A, &stc, NULL)); 699 PetscCall(MatGetLocalSize(Ad, &dr, &dc)); 700 PetscCall(MatGetLocalSize(Ao, NULL, &oc)); 701 PetscCheck(!oc || garray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "garray not present"); 702 703 PetscCall(MatGetRowIJ(Ad, 0, PETSC_FALSE, PETSC_FALSE, &i, &di, &dj, &flg)); 704 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure"); 705 PetscCall(MatGetRowIJ(Ao, 0, PETSC_FALSE, PETSC_FALSE, &i, &oi, &oj, &flg)); 706 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure"); 707 nnz = di[dr] + oi[dr]; 708 /* store original pointers to be restored later */ 709 odi = di; 710 odj = dj; 711 ooi = oi; 712 ooj = oj; 713 714 /* generate l2g maps for rows and cols */ 715 PetscCall(ISCreateStride(comm, dr / rbs, str / rbs, 1, &is)); 716 if (rbs > 1) { 717 IS is2; 718 719 PetscCall(ISGetLocalSize(is, &i)); 720 PetscCall(ISGetIndices(is, (const PetscInt **)&aux)); 721 PetscCall(ISCreateBlock(comm, rbs, i, aux, PETSC_COPY_VALUES, &is2)); 722 PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux)); 723 PetscCall(ISDestroy(&is)); 724 is = is2; 725 } 726 PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g)); 727 PetscCall(ISDestroy(&is)); 728 if (dr) { 729 PetscCall(PetscMalloc1((dc + oc) / cbs, &aux)); 730 for (i = 0; i < dc / cbs; i++) aux[i] = i + stc / cbs; 731 for (i = 0; i < oc / cbs; i++) aux[i + dc / cbs] = garray[i]; 732 PetscCall(ISCreateBlock(comm, cbs, (dc + oc) / cbs, aux, PETSC_OWN_POINTER, &is)); 733 lc = dc + oc; 734 } else { 735 PetscCall(ISCreateBlock(comm, cbs, 0, NULL, PETSC_OWN_POINTER, &is)); 736 lc = 0; 737 } 738 PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g)); 739 PetscCall(ISDestroy(&is)); 740 741 /* create MATIS object */ 742 PetscCall(MatCreate(comm, &B)); 743 PetscCall(MatSetSizes(B, dr, dc, PETSC_DECIDE, PETSC_DECIDE)); 744 PetscCall(MatSetType(B, MATIS)); 745 PetscCall(MatSetBlockSizes(B, rbs, cbs)); 746 PetscCall(MatSetLocalToGlobalMapping(B, rl2g, cl2g)); 747 PetscCall(ISLocalToGlobalMappingDestroy(&rl2g)); 748 PetscCall(ISLocalToGlobalMappingDestroy(&cl2g)); 749 750 /* merge local matrices */ 751 PetscCall(PetscMalloc1(nnz + dr + 1, &aux)); 752 PetscCall(PetscMalloc1(nnz, &data)); 753 ii = aux; 754 jj = aux + dr + 1; 755 aa = data; 756 *ii = *(di++) + *(oi++); 757 for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) { 758 for (; jd < *di; jd++) { 759 *jj++ = *dj++; 760 *aa++ = *dd++; 761 } 762 for (; jo < *oi; jo++) { 763 *jj++ = *oj++ + dc; 764 *aa++ = *od++; 765 } 766 *(++ii) = *(di++) + *(oi++); 767 } 768 for (; cum < dr; cum++) *(++ii) = nnz; 769 770 PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_FALSE, PETSC_FALSE, &i, &odi, &odj, &flg)); 771 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure"); 772 PetscCall(MatRestoreRowIJ(Ao, 0, PETSC_FALSE, PETSC_FALSE, &i, &ooi, &ooj, &flg)); 773 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure"); 774 PetscCall(MatSeqAIJRestoreArray(Ad, &dd)); 775 PetscCall(MatSeqAIJRestoreArray(Ao, &od)); 776 777 ii = aux; 778 jj = aux + dr + 1; 779 aa = data; 780 PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, lc, ii, jj, aa, &lA)); 781 782 /* create containers to destroy the data */ 783 ptrs[0] = aux; 784 ptrs[1] = data; 785 for (i = 0; i < 2; i++) PetscCall(PetscObjectContainerCompose((PetscObject)lA, names[i], ptrs[i], PetscCtxDestroyDefault)); 786 if (ismpibaij) { /* destroy converted local matrices */ 787 PetscCall(MatDestroy(&Ad)); 788 PetscCall(MatDestroy(&Ao)); 789 } 790 791 /* finalize matrix */ 792 PetscCall(MatISSetLocalMat(B, lA)); 793 PetscCall(MatDestroy(&lA)); 794 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 795 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 796 if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &B)); 797 else *newmat = B; 798 PetscFunctionReturn(PETSC_SUCCESS); 799 } 800 801 PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat) 802 { 803 Mat **nest, *snest, **rnest, lA, B; 804 IS *iscol, *isrow, *islrow, *islcol; 805 ISLocalToGlobalMapping rl2g, cl2g; 806 MPI_Comm comm; 807 PetscInt *lr, *lc, *l2gidxs; 808 PetscInt i, j, nr, nc, rbs, cbs; 809 PetscBool convert, lreuse, *istrans; 810 PetscBool3 allow_repeated = PETSC_BOOL3_UNKNOWN; 811 812 PetscFunctionBegin; 813 PetscCall(MatNestGetSubMats(A, &nr, &nc, &nest)); 814 lreuse = PETSC_FALSE; 815 rnest = NULL; 816 if (reuse == MAT_REUSE_MATRIX) { 817 PetscBool ismatis, isnest; 818 819 PetscCall(PetscObjectTypeCompare((PetscObject)*newmat, MATIS, &ismatis)); 820 PetscCheck(ismatis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_USER, "Cannot reuse matrix of type %s", ((PetscObject)*newmat)->type_name); 821 PetscCall(MatISGetLocalMat(*newmat, &lA)); 822 PetscCall(PetscObjectTypeCompare((PetscObject)lA, MATNEST, &isnest)); 823 if (isnest) { 824 PetscCall(MatNestGetSubMats(lA, &i, &j, &rnest)); 825 lreuse = (PetscBool)(i == nr && j == nc); 826 if (!lreuse) rnest = NULL; 827 } 828 } 829 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 830 PetscCall(PetscCalloc2(nr, &lr, nc, &lc)); 831 PetscCall(PetscCalloc6(nr, &isrow, nc, &iscol, nr, &islrow, nc, &islcol, nr * nc, &snest, nr * nc, &istrans)); 832 PetscCall(MatNestGetISs(A, isrow, iscol)); 833 for (i = 0; i < nr; i++) { 834 for (j = 0; j < nc; j++) { 835 PetscBool ismatis, sallow; 836 PetscInt l1, l2, lb1, lb2, ij = i * nc + j; 837 838 /* Null matrix pointers are allowed in MATNEST */ 839 if (!nest[i][j]) continue; 840 841 /* Nested matrices should be of type MATIS */ 842 PetscCall(PetscObjectTypeCompare((PetscObject)nest[i][j], MATTRANSPOSEVIRTUAL, &istrans[ij])); 843 if (istrans[ij]) { 844 Mat T, lT; 845 PetscCall(MatTransposeGetMat(nest[i][j], &T)); 846 PetscCall(PetscObjectTypeCompare((PetscObject)T, MATIS, &ismatis)); 847 PetscCheck(ismatis, comm, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") (transposed) is not of type MATIS", i, j); 848 PetscCall(MatISGetAllowRepeated(T, &sallow)); 849 PetscCall(MatISGetLocalMat(T, &lT)); 850 PetscCall(MatCreateTranspose(lT, &snest[ij])); 851 } else { 852 PetscCall(PetscObjectTypeCompare((PetscObject)nest[i][j], MATIS, &ismatis)); 853 PetscCheck(ismatis, comm, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") is not of type MATIS", i, j); 854 PetscCall(MatISGetAllowRepeated(nest[i][j], &sallow)); 855 PetscCall(MatISGetLocalMat(nest[i][j], &snest[ij])); 856 } 857 if (allow_repeated == PETSC_BOOL3_UNKNOWN) allow_repeated = PetscBoolToBool3(sallow); 858 PetscCheck(sallow == PetscBool3ToBool(allow_repeated), comm, PETSC_ERR_SUP, "Cannot mix repeated and non repeated maps"); 859 860 /* Check compatibility of local sizes */ 861 PetscCall(MatGetSize(snest[ij], &l1, &l2)); 862 PetscCall(MatGetBlockSizes(snest[ij], &lb1, &lb2)); 863 if (!l1 || !l2) continue; 864 PetscCheck(!lr[i] || l1 == lr[i], PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid local size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, lr[i], l1); 865 PetscCheck(!lc[j] || l2 == lc[j], PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid local size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, lc[j], l2); 866 lr[i] = l1; 867 lc[j] = l2; 868 869 /* check compatibility for local matrix reusage */ 870 if (rnest && !rnest[i][j] != !snest[ij]) lreuse = PETSC_FALSE; 871 } 872 } 873 874 if (PetscDefined(USE_DEBUG)) { 875 /* Check compatibility of l2g maps for rows */ 876 for (i = 0; i < nr; i++) { 877 rl2g = NULL; 878 for (j = 0; j < nc; j++) { 879 PetscInt n1, n2; 880 881 if (!nest[i][j]) continue; 882 if (istrans[i * nc + j]) { 883 Mat T; 884 885 PetscCall(MatTransposeGetMat(nest[i][j], &T)); 886 PetscCall(MatISGetLocalToGlobalMapping(T, NULL, &cl2g)); 887 } else { 888 PetscCall(MatISGetLocalToGlobalMapping(nest[i][j], &cl2g, NULL)); 889 } 890 PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &n1)); 891 if (!n1) continue; 892 if (!rl2g) { 893 rl2g = cl2g; 894 } else { 895 const PetscInt *idxs1, *idxs2; 896 PetscBool same; 897 898 PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &n2)); 899 PetscCheck(n1 == n2, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid row l2gmap size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, n1, n2); 900 PetscCall(ISLocalToGlobalMappingGetIndices(cl2g, &idxs1)); 901 PetscCall(ISLocalToGlobalMappingGetIndices(rl2g, &idxs2)); 902 PetscCall(PetscArraycmp(idxs1, idxs2, n1, &same)); 903 PetscCall(ISLocalToGlobalMappingRestoreIndices(cl2g, &idxs1)); 904 PetscCall(ISLocalToGlobalMappingRestoreIndices(rl2g, &idxs2)); 905 PetscCheck(same, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid row l2gmap", i, j); 906 } 907 } 908 } 909 /* Check compatibility of l2g maps for columns */ 910 for (i = 0; i < nc; i++) { 911 rl2g = NULL; 912 for (j = 0; j < nr; j++) { 913 PetscInt n1, n2; 914 915 if (!nest[j][i]) continue; 916 if (istrans[j * nc + i]) { 917 Mat T; 918 919 PetscCall(MatTransposeGetMat(nest[j][i], &T)); 920 PetscCall(MatISGetLocalToGlobalMapping(T, &cl2g, NULL)); 921 } else { 922 PetscCall(MatISGetLocalToGlobalMapping(nest[j][i], NULL, &cl2g)); 923 } 924 PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &n1)); 925 if (!n1) continue; 926 if (!rl2g) { 927 rl2g = cl2g; 928 } else { 929 const PetscInt *idxs1, *idxs2; 930 PetscBool same; 931 932 PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &n2)); 933 PetscCheck(n1 == n2, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid column l2gmap size %" PetscInt_FMT " != %" PetscInt_FMT, j, i, n1, n2); 934 PetscCall(ISLocalToGlobalMappingGetIndices(cl2g, &idxs1)); 935 PetscCall(ISLocalToGlobalMappingGetIndices(rl2g, &idxs2)); 936 PetscCall(PetscArraycmp(idxs1, idxs2, n1, &same)); 937 PetscCall(ISLocalToGlobalMappingRestoreIndices(cl2g, &idxs1)); 938 PetscCall(ISLocalToGlobalMappingRestoreIndices(rl2g, &idxs2)); 939 PetscCheck(same, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid column l2gmap", j, i); 940 } 941 } 942 } 943 } 944 945 B = NULL; 946 if (reuse != MAT_REUSE_MATRIX) { 947 PetscInt stl; 948 949 /* Create l2g map for the rows of the new matrix and index sets for the local MATNEST */ 950 for (i = 0, stl = 0; i < nr; i++) stl += lr[i]; 951 PetscCall(PetscMalloc1(stl, &l2gidxs)); 952 for (i = 0, stl = 0; i < nr; i++) { 953 Mat usedmat; 954 Mat_IS *matis; 955 const PetscInt *idxs; 956 957 /* local IS for local NEST */ 958 PetscCall(ISCreateStride(PETSC_COMM_SELF, lr[i], stl, 1, &islrow[i])); 959 960 /* l2gmap */ 961 j = 0; 962 usedmat = nest[i][j]; 963 while (!usedmat && j < nc - 1) usedmat = nest[i][++j]; 964 PetscCheck(usedmat, comm, PETSC_ERR_SUP, "Cannot find valid row mat"); 965 966 if (istrans[i * nc + j]) { 967 Mat T; 968 PetscCall(MatTransposeGetMat(usedmat, &T)); 969 usedmat = T; 970 } 971 matis = (Mat_IS *)usedmat->data; 972 PetscCall(ISGetIndices(isrow[i], &idxs)); 973 if (istrans[i * nc + j]) { 974 PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE)); 975 PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE)); 976 } else { 977 PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE)); 978 PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE)); 979 } 980 PetscCall(ISRestoreIndices(isrow[i], &idxs)); 981 stl += lr[i]; 982 } 983 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, stl, l2gidxs, PETSC_OWN_POINTER, &rl2g)); 984 985 /* Create l2g map for columns of the new matrix and index sets for the local MATNEST */ 986 for (i = 0, stl = 0; i < nc; i++) stl += lc[i]; 987 PetscCall(PetscMalloc1(stl, &l2gidxs)); 988 for (i = 0, stl = 0; i < nc; i++) { 989 Mat usedmat; 990 Mat_IS *matis; 991 const PetscInt *idxs; 992 993 /* local IS for local NEST */ 994 PetscCall(ISCreateStride(PETSC_COMM_SELF, lc[i], stl, 1, &islcol[i])); 995 996 /* l2gmap */ 997 j = 0; 998 usedmat = nest[j][i]; 999 while (!usedmat && j < nr - 1) usedmat = nest[++j][i]; 1000 PetscCheck(usedmat, comm, PETSC_ERR_SUP, "Cannot find valid column mat"); 1001 if (istrans[j * nc + i]) { 1002 Mat T; 1003 PetscCall(MatTransposeGetMat(usedmat, &T)); 1004 usedmat = T; 1005 } 1006 matis = (Mat_IS *)usedmat->data; 1007 PetscCall(ISGetIndices(iscol[i], &idxs)); 1008 if (istrans[j * nc + i]) { 1009 PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE)); 1010 PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE)); 1011 } else { 1012 PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE)); 1013 PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE)); 1014 } 1015 PetscCall(ISRestoreIndices(iscol[i], &idxs)); 1016 stl += lc[i]; 1017 } 1018 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, stl, l2gidxs, PETSC_OWN_POINTER, &cl2g)); 1019 1020 /* Create MATIS */ 1021 PetscCall(MatCreate(comm, &B)); 1022 PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N)); 1023 PetscCall(MatGetBlockSizes(A, &rbs, &cbs)); 1024 PetscCall(MatSetBlockSizes(B, rbs, cbs)); 1025 PetscCall(MatSetType(B, MATIS)); 1026 PetscCall(MatISSetLocalMatType(B, MATNEST)); 1027 PetscCall(MatISSetAllowRepeated(B, PetscBool3ToBool(allow_repeated))); 1028 { /* hack : avoid setup of scatters */ 1029 Mat_IS *matis = (Mat_IS *)B->data; 1030 matis->islocalref = PETSC_TRUE; 1031 } 1032 PetscCall(MatSetLocalToGlobalMapping(B, rl2g, cl2g)); 1033 PetscCall(ISLocalToGlobalMappingDestroy(&rl2g)); 1034 PetscCall(ISLocalToGlobalMappingDestroy(&cl2g)); 1035 PetscCall(MatCreateNest(PETSC_COMM_SELF, nr, islrow, nc, islcol, snest, &lA)); 1036 PetscCall(MatNestSetVecType(lA, VECNEST)); 1037 for (i = 0; i < nr * nc; i++) { 1038 if (istrans[i]) PetscCall(MatDestroy(&snest[i])); 1039 } 1040 PetscCall(MatISSetLocalMat(B, lA)); 1041 PetscCall(MatDestroy(&lA)); 1042 { /* hack : setup of scatters done here */ 1043 Mat_IS *matis = (Mat_IS *)B->data; 1044 1045 matis->islocalref = PETSC_FALSE; 1046 PetscCall(MatISSetUpScatters_Private(B)); 1047 } 1048 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 1049 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 1050 if (reuse == MAT_INPLACE_MATRIX) { 1051 PetscCall(MatHeaderReplace(A, &B)); 1052 } else { 1053 *newmat = B; 1054 } 1055 } else { 1056 if (lreuse) { 1057 PetscCall(MatISGetLocalMat(*newmat, &lA)); 1058 for (i = 0; i < nr; i++) { 1059 for (j = 0; j < nc; j++) { 1060 if (snest[i * nc + j]) { 1061 PetscCall(MatNestSetSubMat(lA, i, j, snest[i * nc + j])); 1062 if (istrans[i * nc + j]) PetscCall(MatDestroy(&snest[i * nc + j])); 1063 } 1064 } 1065 } 1066 } else { 1067 PetscInt stl; 1068 for (i = 0, stl = 0; i < nr; i++) { 1069 PetscCall(ISCreateStride(PETSC_COMM_SELF, lr[i], stl, 1, &islrow[i])); 1070 stl += lr[i]; 1071 } 1072 for (i = 0, stl = 0; i < nc; i++) { 1073 PetscCall(ISCreateStride(PETSC_COMM_SELF, lc[i], stl, 1, &islcol[i])); 1074 stl += lc[i]; 1075 } 1076 PetscCall(MatCreateNest(PETSC_COMM_SELF, nr, islrow, nc, islcol, snest, &lA)); 1077 for (i = 0; i < nr * nc; i++) { 1078 if (istrans[i]) PetscCall(MatDestroy(&snest[i])); 1079 } 1080 PetscCall(MatISSetLocalMat(*newmat, lA)); 1081 PetscCall(MatDestroy(&lA)); 1082 } 1083 PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY)); 1084 PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY)); 1085 } 1086 1087 /* Create local matrix in MATNEST format */ 1088 convert = PETSC_FALSE; 1089 PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)A)->prefix, "-mat_is_convert_local_nest", &convert, NULL)); 1090 if (convert) { 1091 Mat M; 1092 MatISLocalFields lf; 1093 1094 PetscCall(MatISGetLocalMat(*newmat, &lA)); 1095 PetscCall(MatConvert(lA, MATAIJ, MAT_INITIAL_MATRIX, &M)); 1096 PetscCall(MatISSetLocalMat(*newmat, M)); 1097 PetscCall(MatDestroy(&M)); 1098 1099 /* attach local fields to the matrix */ 1100 PetscCall(PetscNew(&lf)); 1101 PetscCall(PetscMalloc2(nr, &lf->rf, nc, &lf->cf)); 1102 for (i = 0; i < nr; i++) { 1103 PetscInt n, st; 1104 1105 PetscCall(ISGetLocalSize(islrow[i], &n)); 1106 PetscCall(ISStrideGetInfo(islrow[i], &st, NULL)); 1107 PetscCall(ISCreateStride(comm, n, st, 1, &lf->rf[i])); 1108 } 1109 for (i = 0; i < nc; i++) { 1110 PetscInt n, st; 1111 1112 PetscCall(ISGetLocalSize(islcol[i], &n)); 1113 PetscCall(ISStrideGetInfo(islcol[i], &st, NULL)); 1114 PetscCall(ISCreateStride(comm, n, st, 1, &lf->cf[i])); 1115 } 1116 lf->nr = nr; 1117 lf->nc = nc; 1118 PetscCall(PetscObjectContainerCompose((PetscObject)*newmat, "_convert_nest_lfields", lf, MatISContainerDestroyFields_Private)); 1119 } 1120 1121 /* Free workspace */ 1122 for (i = 0; i < nr; i++) PetscCall(ISDestroy(&islrow[i])); 1123 for (i = 0; i < nc; i++) PetscCall(ISDestroy(&islcol[i])); 1124 PetscCall(PetscFree6(isrow, iscol, islrow, islcol, snest, istrans)); 1125 PetscCall(PetscFree2(lr, lc)); 1126 PetscFunctionReturn(PETSC_SUCCESS); 1127 } 1128 1129 static PetscErrorCode MatDiagonalScale_IS(Mat A, Vec l, Vec r) 1130 { 1131 Mat_IS *matis = (Mat_IS *)A->data; 1132 Vec ll, rr; 1133 const PetscScalar *Y, *X; 1134 PetscScalar *x, *y; 1135 1136 PetscFunctionBegin; 1137 if (l) { 1138 ll = matis->y; 1139 PetscCall(VecGetArrayRead(l, &Y)); 1140 PetscCall(VecGetArray(ll, &y)); 1141 PetscCall(PetscSFBcastBegin(matis->sf, MPIU_SCALAR, Y, y, MPI_REPLACE)); 1142 } else { 1143 ll = NULL; 1144 } 1145 if (r) { 1146 rr = matis->x; 1147 PetscCall(VecGetArrayRead(r, &X)); 1148 PetscCall(VecGetArray(rr, &x)); 1149 PetscCall(PetscSFBcastBegin(matis->csf, MPIU_SCALAR, X, x, MPI_REPLACE)); 1150 } else { 1151 rr = NULL; 1152 } 1153 if (ll) { 1154 PetscCall(PetscSFBcastEnd(matis->sf, MPIU_SCALAR, Y, y, MPI_REPLACE)); 1155 PetscCall(VecRestoreArrayRead(l, &Y)); 1156 PetscCall(VecRestoreArray(ll, &y)); 1157 } 1158 if (rr) { 1159 PetscCall(PetscSFBcastEnd(matis->csf, MPIU_SCALAR, X, x, MPI_REPLACE)); 1160 PetscCall(VecRestoreArrayRead(r, &X)); 1161 PetscCall(VecRestoreArray(rr, &x)); 1162 } 1163 PetscCall(MatDiagonalScale(matis->A, ll, rr)); 1164 PetscFunctionReturn(PETSC_SUCCESS); 1165 } 1166 1167 static PetscErrorCode MatGetInfo_IS(Mat A, MatInfoType flag, MatInfo *ginfo) 1168 { 1169 Mat_IS *matis = (Mat_IS *)A->data; 1170 MatInfo info; 1171 PetscLogDouble isend[6], irecv[6]; 1172 PetscInt bs; 1173 1174 PetscFunctionBegin; 1175 PetscCall(MatGetBlockSize(A, &bs)); 1176 if (matis->A->ops->getinfo) { 1177 PetscCall(MatGetInfo(matis->A, MAT_LOCAL, &info)); 1178 isend[0] = info.nz_used; 1179 isend[1] = info.nz_allocated; 1180 isend[2] = info.nz_unneeded; 1181 isend[3] = info.memory; 1182 isend[4] = info.mallocs; 1183 } else { 1184 isend[0] = 0.; 1185 isend[1] = 0.; 1186 isend[2] = 0.; 1187 isend[3] = 0.; 1188 isend[4] = 0.; 1189 } 1190 isend[5] = matis->A->num_ass; 1191 if (flag == MAT_LOCAL) { 1192 ginfo->nz_used = isend[0]; 1193 ginfo->nz_allocated = isend[1]; 1194 ginfo->nz_unneeded = isend[2]; 1195 ginfo->memory = isend[3]; 1196 ginfo->mallocs = isend[4]; 1197 ginfo->assemblies = isend[5]; 1198 } else if (flag == MAT_GLOBAL_MAX) { 1199 PetscCallMPI(MPIU_Allreduce(isend, irecv, 6, MPIU_PETSCLOGDOUBLE, MPI_MAX, PetscObjectComm((PetscObject)A))); 1200 1201 ginfo->nz_used = irecv[0]; 1202 ginfo->nz_allocated = irecv[1]; 1203 ginfo->nz_unneeded = irecv[2]; 1204 ginfo->memory = irecv[3]; 1205 ginfo->mallocs = irecv[4]; 1206 ginfo->assemblies = irecv[5]; 1207 } else if (flag == MAT_GLOBAL_SUM) { 1208 PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_SUM, PetscObjectComm((PetscObject)A))); 1209 1210 ginfo->nz_used = irecv[0]; 1211 ginfo->nz_allocated = irecv[1]; 1212 ginfo->nz_unneeded = irecv[2]; 1213 ginfo->memory = irecv[3]; 1214 ginfo->mallocs = irecv[4]; 1215 ginfo->assemblies = A->num_ass; 1216 } 1217 ginfo->block_size = bs; 1218 ginfo->fill_ratio_given = 0; 1219 ginfo->fill_ratio_needed = 0; 1220 ginfo->factor_mallocs = 0; 1221 PetscFunctionReturn(PETSC_SUCCESS); 1222 } 1223 1224 static PetscErrorCode MatTranspose_IS(Mat A, MatReuse reuse, Mat *B) 1225 { 1226 Mat C, lC, lA; 1227 1228 PetscFunctionBegin; 1229 if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B)); 1230 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) { 1231 ISLocalToGlobalMapping rl2g, cl2g; 1232 PetscBool allow_repeated; 1233 1234 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C)); 1235 PetscCall(MatSetSizes(C, A->cmap->n, A->rmap->n, A->cmap->N, A->rmap->N)); 1236 PetscCall(MatSetBlockSizes(C, A->cmap->bs, A->rmap->bs)); 1237 PetscCall(MatSetType(C, MATIS)); 1238 PetscCall(MatISGetAllowRepeated(A, &allow_repeated)); 1239 PetscCall(MatISSetAllowRepeated(C, allow_repeated)); 1240 PetscCall(MatGetLocalToGlobalMapping(A, &rl2g, &cl2g)); 1241 PetscCall(MatSetLocalToGlobalMapping(C, cl2g, rl2g)); 1242 } else C = *B; 1243 1244 /* perform local transposition */ 1245 PetscCall(MatISGetLocalMat(A, &lA)); 1246 PetscCall(MatTranspose(lA, MAT_INITIAL_MATRIX, &lC)); 1247 PetscCall(MatSetLocalToGlobalMapping(lC, lA->cmap->mapping, lA->rmap->mapping)); 1248 PetscCall(MatISSetLocalMat(C, lC)); 1249 PetscCall(MatDestroy(&lC)); 1250 1251 if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) { 1252 *B = C; 1253 } else { 1254 PetscCall(MatHeaderMerge(A, &C)); 1255 } 1256 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 1257 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 1258 PetscFunctionReturn(PETSC_SUCCESS); 1259 } 1260 1261 static PetscErrorCode MatDiagonalSet_IS(Mat A, Vec D, InsertMode insmode) 1262 { 1263 Mat_IS *is = (Mat_IS *)A->data; 1264 1265 PetscFunctionBegin; 1266 PetscCheck(!is->allow_repeated || insmode == ADD_VALUES, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "INSERT_VALUES with repeated entries not supported"); 1267 if (D) { /* MatShift_IS pass D = NULL */ 1268 PetscCall(VecScatterBegin(is->rctx, D, is->y, INSERT_VALUES, SCATTER_FORWARD)); 1269 PetscCall(VecScatterEnd(is->rctx, D, is->y, INSERT_VALUES, SCATTER_FORWARD)); 1270 } 1271 PetscCall(VecPointwiseDivide(is->y, is->y, is->counter)); 1272 PetscCall(MatDiagonalSet(is->A, is->y, insmode)); 1273 PetscFunctionReturn(PETSC_SUCCESS); 1274 } 1275 1276 static PetscErrorCode MatShift_IS(Mat A, PetscScalar a) 1277 { 1278 Mat_IS *is = (Mat_IS *)A->data; 1279 1280 PetscFunctionBegin; 1281 PetscCall(VecSet(is->y, a)); 1282 PetscCall(MatDiagonalSet_IS(A, NULL, ADD_VALUES)); 1283 PetscFunctionReturn(PETSC_SUCCESS); 1284 } 1285 1286 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1287 { 1288 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1289 1290 PetscFunctionBegin; 1291 PetscCheck(m <= MATIS_MAX_ENTRIES_INSERTION && n <= MATIS_MAX_ENTRIES_INSERTION, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of row/column indices must be <= %d: they are %" PetscInt_FMT " %" PetscInt_FMT, MATIS_MAX_ENTRIES_INSERTION, m, n); 1292 PetscCall(ISLocalToGlobalMappingApply(A->rmap->mapping, m, rows, rows_l)); 1293 PetscCall(ISLocalToGlobalMappingApply(A->cmap->mapping, n, cols, cols_l)); 1294 PetscCall(MatSetValuesLocal_IS(A, m, rows_l, n, cols_l, values, addv)); 1295 PetscFunctionReturn(PETSC_SUCCESS); 1296 } 1297 1298 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1299 { 1300 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1301 1302 PetscFunctionBegin; 1303 PetscCheck(m <= MATIS_MAX_ENTRIES_INSERTION && n <= MATIS_MAX_ENTRIES_INSERTION, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of row/column block indices must be <= %d: they are %" PetscInt_FMT " %" PetscInt_FMT, MATIS_MAX_ENTRIES_INSERTION, m, n); 1304 PetscCall(ISLocalToGlobalMappingApplyBlock(A->rmap->mapping, m, rows, rows_l)); 1305 PetscCall(ISLocalToGlobalMappingApplyBlock(A->cmap->mapping, n, cols, cols_l)); 1306 PetscCall(MatSetValuesBlockedLocal_IS(A, m, rows_l, n, cols_l, values, addv)); 1307 PetscFunctionReturn(PETSC_SUCCESS); 1308 } 1309 1310 static PetscErrorCode MatCreateSubMatrix_IS(Mat mat, IS irow, IS icol, MatReuse scall, Mat *newmat) 1311 { 1312 Mat locmat, newlocmat; 1313 Mat_IS *newmatis; 1314 const PetscInt *idxs; 1315 PetscInt i, m, n; 1316 1317 PetscFunctionBegin; 1318 if (scall == MAT_REUSE_MATRIX) { 1319 PetscBool ismatis; 1320 1321 PetscCall(PetscObjectTypeCompare((PetscObject)*newmat, MATIS, &ismatis)); 1322 PetscCheck(ismatis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Not of MATIS type"); 1323 newmatis = (Mat_IS *)(*newmat)->data; 1324 PetscCheck(newmatis->getsub_ris, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Misses local row IS"); 1325 PetscCheck(newmatis->getsub_cis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Misses local col IS"); 1326 } 1327 /* irow and icol may not have duplicate entries */ 1328 if (PetscDefined(USE_DEBUG)) { 1329 Vec rtest, ltest; 1330 const PetscScalar *array; 1331 1332 PetscCall(MatCreateVecs(mat, <est, &rtest)); 1333 PetscCall(ISGetLocalSize(irow, &n)); 1334 PetscCall(ISGetIndices(irow, &idxs)); 1335 for (i = 0; i < n; i++) PetscCall(VecSetValue(rtest, idxs[i], 1.0, ADD_VALUES)); 1336 PetscCall(VecAssemblyBegin(rtest)); 1337 PetscCall(VecAssemblyEnd(rtest)); 1338 PetscCall(VecGetLocalSize(rtest, &n)); 1339 PetscCall(VecGetOwnershipRange(rtest, &m, NULL)); 1340 PetscCall(VecGetArrayRead(rtest, &array)); 1341 for (i = 0; i < n; i++) PetscCheck(array[i] == 0. || array[i] == 1., PETSC_COMM_SELF, PETSC_ERR_SUP, "Index %" PetscInt_FMT " counted %" PetscInt_FMT " times! Irow may not have duplicate entries", i + m, (PetscInt)PetscRealPart(array[i])); 1342 PetscCall(VecRestoreArrayRead(rtest, &array)); 1343 PetscCall(ISRestoreIndices(irow, &idxs)); 1344 PetscCall(ISGetLocalSize(icol, &n)); 1345 PetscCall(ISGetIndices(icol, &idxs)); 1346 for (i = 0; i < n; i++) PetscCall(VecSetValue(ltest, idxs[i], 1.0, ADD_VALUES)); 1347 PetscCall(VecAssemblyBegin(ltest)); 1348 PetscCall(VecAssemblyEnd(ltest)); 1349 PetscCall(VecGetLocalSize(ltest, &n)); 1350 PetscCall(VecGetOwnershipRange(ltest, &m, NULL)); 1351 PetscCall(VecGetArrayRead(ltest, &array)); 1352 for (i = 0; i < n; i++) PetscCheck(array[i] == 0. || array[i] == 1., PETSC_COMM_SELF, PETSC_ERR_SUP, "Index %" PetscInt_FMT " counted %" PetscInt_FMT " times! Icol may not have duplicate entries", i + m, (PetscInt)PetscRealPart(array[i])); 1353 PetscCall(VecRestoreArrayRead(ltest, &array)); 1354 PetscCall(ISRestoreIndices(icol, &idxs)); 1355 PetscCall(VecDestroy(&rtest)); 1356 PetscCall(VecDestroy(<est)); 1357 } 1358 if (scall == MAT_INITIAL_MATRIX) { 1359 Mat_IS *matis = (Mat_IS *)mat->data; 1360 ISLocalToGlobalMapping rl2g; 1361 IS is; 1362 PetscInt *lidxs, *lgidxs, *newgidxs; 1363 PetscInt ll, newloc, irbs, icbs, arbs, acbs, rbs, cbs; 1364 PetscBool cong; 1365 MPI_Comm comm; 1366 1367 PetscCall(PetscObjectGetComm((PetscObject)mat, &comm)); 1368 PetscCall(MatGetBlockSizes(mat, &arbs, &acbs)); 1369 PetscCall(ISGetBlockSize(irow, &irbs)); 1370 PetscCall(ISGetBlockSize(icol, &icbs)); 1371 rbs = arbs == irbs ? irbs : 1; 1372 cbs = acbs == icbs ? icbs : 1; 1373 PetscCall(ISGetLocalSize(irow, &m)); 1374 PetscCall(ISGetLocalSize(icol, &n)); 1375 PetscCall(MatCreate(comm, newmat)); 1376 PetscCall(MatSetType(*newmat, MATIS)); 1377 PetscCall(MatISSetAllowRepeated(*newmat, matis->allow_repeated)); 1378 PetscCall(MatSetSizes(*newmat, m, n, PETSC_DECIDE, PETSC_DECIDE)); 1379 PetscCall(MatSetBlockSizes(*newmat, rbs, cbs)); 1380 /* communicate irow to their owners in the layout */ 1381 PetscCall(ISGetIndices(irow, &idxs)); 1382 PetscCall(PetscLayoutMapLocal(mat->rmap, m, idxs, &ll, &lidxs, &lgidxs)); 1383 PetscCall(ISRestoreIndices(irow, &idxs)); 1384 PetscCall(PetscArrayzero(matis->sf_rootdata, matis->sf->nroots)); 1385 for (i = 0; i < ll; i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i] + 1; 1386 PetscCall(PetscFree(lidxs)); 1387 PetscCall(PetscFree(lgidxs)); 1388 PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE)); 1389 PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE)); 1390 for (i = 0, newloc = 0; i < matis->sf->nleaves; i++) 1391 if (matis->sf_leafdata[i]) newloc++; 1392 PetscCall(PetscMalloc1(newloc, &newgidxs)); 1393 PetscCall(PetscMalloc1(newloc, &lidxs)); 1394 for (i = 0, newloc = 0; i < matis->sf->nleaves; i++) 1395 if (matis->sf_leafdata[i]) { 1396 lidxs[newloc] = i; 1397 newgidxs[newloc++] = matis->sf_leafdata[i] - 1; 1398 } 1399 PetscCall(ISCreateGeneral(comm, newloc, newgidxs, PETSC_OWN_POINTER, &is)); 1400 PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g)); 1401 PetscCall(ISLocalToGlobalMappingSetBlockSize(rl2g, rbs)); 1402 PetscCall(ISDestroy(&is)); 1403 /* local is to extract local submatrix */ 1404 newmatis = (Mat_IS *)(*newmat)->data; 1405 PetscCall(ISCreateGeneral(comm, newloc, lidxs, PETSC_OWN_POINTER, &newmatis->getsub_ris)); 1406 PetscCall(MatHasCongruentLayouts(mat, &cong)); 1407 if (cong && irow == icol && matis->csf == matis->sf) { 1408 PetscCall(MatSetLocalToGlobalMapping(*newmat, rl2g, rl2g)); 1409 PetscCall(PetscObjectReference((PetscObject)newmatis->getsub_ris)); 1410 newmatis->getsub_cis = newmatis->getsub_ris; 1411 } else { 1412 ISLocalToGlobalMapping cl2g; 1413 1414 /* communicate icol to their owners in the layout */ 1415 PetscCall(ISGetIndices(icol, &idxs)); 1416 PetscCall(PetscLayoutMapLocal(mat->cmap, n, idxs, &ll, &lidxs, &lgidxs)); 1417 PetscCall(ISRestoreIndices(icol, &idxs)); 1418 PetscCall(PetscArrayzero(matis->csf_rootdata, matis->csf->nroots)); 1419 for (i = 0; i < ll; i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i] + 1; 1420 PetscCall(PetscFree(lidxs)); 1421 PetscCall(PetscFree(lgidxs)); 1422 PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, matis->csf_rootdata, matis->csf_leafdata, MPI_REPLACE)); 1423 PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, matis->csf_rootdata, matis->csf_leafdata, MPI_REPLACE)); 1424 for (i = 0, newloc = 0; i < matis->csf->nleaves; i++) 1425 if (matis->csf_leafdata[i]) newloc++; 1426 PetscCall(PetscMalloc1(newloc, &newgidxs)); 1427 PetscCall(PetscMalloc1(newloc, &lidxs)); 1428 for (i = 0, newloc = 0; i < matis->csf->nleaves; i++) 1429 if (matis->csf_leafdata[i]) { 1430 lidxs[newloc] = i; 1431 newgidxs[newloc++] = matis->csf_leafdata[i] - 1; 1432 } 1433 PetscCall(ISCreateGeneral(comm, newloc, newgidxs, PETSC_OWN_POINTER, &is)); 1434 PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g)); 1435 PetscCall(ISLocalToGlobalMappingSetBlockSize(cl2g, cbs)); 1436 PetscCall(ISDestroy(&is)); 1437 /* local is to extract local submatrix */ 1438 PetscCall(ISCreateGeneral(comm, newloc, lidxs, PETSC_OWN_POINTER, &newmatis->getsub_cis)); 1439 PetscCall(MatSetLocalToGlobalMapping(*newmat, rl2g, cl2g)); 1440 PetscCall(ISLocalToGlobalMappingDestroy(&cl2g)); 1441 } 1442 PetscCall(ISLocalToGlobalMappingDestroy(&rl2g)); 1443 } else { 1444 PetscCall(MatISGetLocalMat(*newmat, &newlocmat)); 1445 } 1446 PetscCall(MatISGetLocalMat(mat, &locmat)); 1447 newmatis = (Mat_IS *)(*newmat)->data; 1448 PetscCall(MatCreateSubMatrix(locmat, newmatis->getsub_ris, newmatis->getsub_cis, scall, &newlocmat)); 1449 if (scall == MAT_INITIAL_MATRIX) { 1450 PetscCall(MatISSetLocalMat(*newmat, newlocmat)); 1451 PetscCall(MatDestroy(&newlocmat)); 1452 } 1453 PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY)); 1454 PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY)); 1455 PetscFunctionReturn(PETSC_SUCCESS); 1456 } 1457 1458 static PetscErrorCode MatCopy_IS(Mat A, Mat B, MatStructure str) 1459 { 1460 Mat_IS *a = (Mat_IS *)A->data, *b; 1461 PetscBool ismatis; 1462 1463 PetscFunctionBegin; 1464 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATIS, &ismatis)); 1465 PetscCheck(ismatis, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Need to be implemented"); 1466 b = (Mat_IS *)B->data; 1467 PetscCall(MatCopy(a->A, b->A, str)); 1468 PetscCall(PetscObjectStateIncrease((PetscObject)B)); 1469 PetscFunctionReturn(PETSC_SUCCESS); 1470 } 1471 1472 static PetscErrorCode MatMissingDiagonal_IS(Mat A, PetscBool *missing, PetscInt *d) 1473 { 1474 Vec v; 1475 const PetscScalar *array; 1476 PetscInt i, n; 1477 1478 PetscFunctionBegin; 1479 *missing = PETSC_FALSE; 1480 PetscCall(MatCreateVecs(A, NULL, &v)); 1481 PetscCall(MatGetDiagonal(A, v)); 1482 PetscCall(VecGetLocalSize(v, &n)); 1483 PetscCall(VecGetArrayRead(v, &array)); 1484 for (i = 0; i < n; i++) 1485 if (array[i] == 0.) break; 1486 PetscCall(VecRestoreArrayRead(v, &array)); 1487 PetscCall(VecDestroy(&v)); 1488 if (i != n) *missing = PETSC_TRUE; 1489 if (d) { 1490 *d = -1; 1491 if (*missing) { 1492 PetscInt rstart; 1493 PetscCall(MatGetOwnershipRange(A, &rstart, NULL)); 1494 *d = i + rstart; 1495 } 1496 } 1497 PetscFunctionReturn(PETSC_SUCCESS); 1498 } 1499 1500 static PetscErrorCode MatISSetUpSF_IS(Mat B) 1501 { 1502 Mat_IS *matis = (Mat_IS *)B->data; 1503 const PetscInt *gidxs; 1504 PetscInt nleaves; 1505 1506 PetscFunctionBegin; 1507 if (matis->sf) PetscFunctionReturn(PETSC_SUCCESS); 1508 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)B), &matis->sf)); 1509 PetscCall(ISLocalToGlobalMappingGetIndices(matis->rmapping, &gidxs)); 1510 PetscCall(ISLocalToGlobalMappingGetSize(matis->rmapping, &nleaves)); 1511 PetscCall(PetscSFSetGraphLayout(matis->sf, B->rmap, nleaves, NULL, PETSC_OWN_POINTER, gidxs)); 1512 PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->rmapping, &gidxs)); 1513 PetscCall(PetscMalloc2(matis->sf->nroots, &matis->sf_rootdata, matis->sf->nleaves, &matis->sf_leafdata)); 1514 if (matis->rmapping != matis->cmapping) { /* setup SF for columns */ 1515 PetscCall(ISLocalToGlobalMappingGetSize(matis->cmapping, &nleaves)); 1516 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)B), &matis->csf)); 1517 PetscCall(ISLocalToGlobalMappingGetIndices(matis->cmapping, &gidxs)); 1518 PetscCall(PetscSFSetGraphLayout(matis->csf, B->cmap, nleaves, NULL, PETSC_OWN_POINTER, gidxs)); 1519 PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->cmapping, &gidxs)); 1520 PetscCall(PetscMalloc2(matis->csf->nroots, &matis->csf_rootdata, matis->csf->nleaves, &matis->csf_leafdata)); 1521 } else { 1522 matis->csf = matis->sf; 1523 matis->csf_leafdata = matis->sf_leafdata; 1524 matis->csf_rootdata = matis->sf_rootdata; 1525 } 1526 PetscFunctionReturn(PETSC_SUCCESS); 1527 } 1528 1529 /*@ 1530 MatISGetAllowRepeated - Get the flag to allow repeated entries in the local to global map 1531 1532 Not Collective 1533 1534 Input Parameter: 1535 . A - the matrix 1536 1537 Output Parameter: 1538 . flg - the boolean flag 1539 1540 Level: intermediate 1541 1542 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatSetLocalToGlobalMapping()`, `MatISSetAllowRepeated()` 1543 @*/ 1544 PetscErrorCode MatISGetAllowRepeated(Mat A, PetscBool *flg) 1545 { 1546 PetscBool ismatis; 1547 1548 PetscFunctionBegin; 1549 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1550 PetscAssertPointer(flg, 2); 1551 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATIS, &ismatis)); 1552 PetscCheck(ismatis, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)A)->type_name); 1553 *flg = ((Mat_IS *)A->data)->allow_repeated; 1554 PetscFunctionReturn(PETSC_SUCCESS); 1555 } 1556 1557 /*@ 1558 MatISSetAllowRepeated - Set the flag to allow repeated entries in the local to global map 1559 1560 Logically Collective 1561 1562 Input Parameters: 1563 + A - the matrix 1564 - flg - the boolean flag 1565 1566 Level: intermediate 1567 1568 Notes: 1569 The default value is `PETSC_FALSE`. 1570 When called AFTER calling `MatSetLocalToGlobalMapping()` it will recreate the local matrices 1571 if `flg` is different from the previously set value. 1572 Specifically, when `flg` is true it will just recreate the local matrices, while if 1573 `flg` is false will assemble the local matrices summing up repeated entries. 1574 1575 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatSetLocalToGlobalMapping()`, `MatISGetAllowRepeated()` 1576 @*/ 1577 PetscErrorCode MatISSetAllowRepeated(Mat A, PetscBool flg) 1578 { 1579 PetscFunctionBegin; 1580 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1581 PetscValidType(A, 1); 1582 PetscValidLogicalCollectiveBool(A, flg, 2); 1583 PetscTryMethod(A, "MatISSetAllowRepeated_C", (Mat, PetscBool), (A, flg)); 1584 PetscFunctionReturn(PETSC_SUCCESS); 1585 } 1586 1587 static PetscErrorCode MatISSetAllowRepeated_IS(Mat A, PetscBool flg) 1588 { 1589 Mat_IS *matis = (Mat_IS *)A->data; 1590 Mat lA = NULL; 1591 ISLocalToGlobalMapping lrmap, lcmap; 1592 1593 PetscFunctionBegin; 1594 if (flg == matis->allow_repeated) PetscFunctionReturn(PETSC_SUCCESS); 1595 if (!matis->A) { /* matrix has not been preallocated yet */ 1596 matis->allow_repeated = flg; 1597 PetscFunctionReturn(PETSC_SUCCESS); 1598 } 1599 PetscCheck(!matis->islocalref, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not implemented for local references"); 1600 if (matis->allow_repeated) { /* we will assemble the old local matrix if needed */ 1601 lA = matis->A; 1602 PetscCall(PetscObjectReference((PetscObject)lA)); 1603 } 1604 /* In case flg is True, we only recreate the local matrix */ 1605 matis->allow_repeated = flg; 1606 PetscCall(MatSetLocalToGlobalMapping(A, A->rmap->mapping, A->cmap->mapping)); 1607 if (lA) { /* assemble previous local matrix if needed */ 1608 Mat nA = matis->A; 1609 1610 PetscCall(MatGetLocalToGlobalMapping(nA, &lrmap, &lcmap)); 1611 if (!lrmap && !lcmap) { 1612 PetscCall(MatISSetLocalMat(A, lA)); 1613 } else { 1614 Mat P = NULL, R = NULL; 1615 MatProductType ptype; 1616 1617 if (lrmap == lcmap) { 1618 ptype = MATPRODUCT_PtAP; 1619 PetscCall(MatCreateFromISLocalToGlobalMapping(lcmap, nA, PETSC_TRUE, PETSC_FALSE, NULL, &P)); 1620 PetscCall(MatProductCreate(lA, P, NULL, &nA)); 1621 } else { 1622 if (lcmap) PetscCall(MatCreateFromISLocalToGlobalMapping(lcmap, nA, PETSC_TRUE, PETSC_FALSE, NULL, &P)); 1623 if (lrmap) PetscCall(MatCreateFromISLocalToGlobalMapping(lrmap, nA, PETSC_FALSE, PETSC_TRUE, NULL, &R)); 1624 if (R && P) { 1625 ptype = MATPRODUCT_ABC; 1626 PetscCall(MatProductCreate(R, lA, P, &nA)); 1627 } else if (R) { 1628 ptype = MATPRODUCT_AB; 1629 PetscCall(MatProductCreate(R, lA, NULL, &nA)); 1630 } else { 1631 ptype = MATPRODUCT_AB; 1632 PetscCall(MatProductCreate(lA, P, NULL, &nA)); 1633 } 1634 } 1635 PetscCall(MatProductSetType(nA, ptype)); 1636 PetscCall(MatProductSetFromOptions(nA)); 1637 PetscCall(MatProductSymbolic(nA)); 1638 PetscCall(MatProductNumeric(nA)); 1639 PetscCall(MatProductClear(nA)); 1640 PetscCall(MatConvert(nA, matis->lmattype, MAT_INPLACE_MATRIX, &nA)); 1641 PetscCall(MatISSetLocalMat(A, nA)); 1642 PetscCall(MatDestroy(&nA)); 1643 PetscCall(MatDestroy(&P)); 1644 PetscCall(MatDestroy(&R)); 1645 } 1646 } 1647 PetscCall(MatDestroy(&lA)); 1648 PetscFunctionReturn(PETSC_SUCCESS); 1649 } 1650 1651 /*@ 1652 MatISStoreL2L - Store local-to-local operators during the Galerkin process of computing `MatPtAP()` 1653 1654 Logically Collective 1655 1656 Input Parameters: 1657 + A - the matrix 1658 - store - the boolean flag 1659 1660 Level: advanced 1661 1662 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatISSetPreallocation()`, `MatPtAP()` 1663 @*/ 1664 PetscErrorCode MatISStoreL2L(Mat A, PetscBool store) 1665 { 1666 PetscFunctionBegin; 1667 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1668 PetscValidType(A, 1); 1669 PetscValidLogicalCollectiveBool(A, store, 2); 1670 PetscTryMethod(A, "MatISStoreL2L_C", (Mat, PetscBool), (A, store)); 1671 PetscFunctionReturn(PETSC_SUCCESS); 1672 } 1673 1674 static PetscErrorCode MatISStoreL2L_IS(Mat A, PetscBool store) 1675 { 1676 Mat_IS *matis = (Mat_IS *)A->data; 1677 1678 PetscFunctionBegin; 1679 matis->storel2l = store; 1680 if (!store) PetscCall(PetscObjectCompose((PetscObject)A, "_MatIS_PtAP_l2l", NULL)); 1681 PetscFunctionReturn(PETSC_SUCCESS); 1682 } 1683 1684 /*@ 1685 MatISFixLocalEmpty - Compress out zero local rows from the local matrices 1686 1687 Logically Collective 1688 1689 Input Parameters: 1690 + A - the matrix 1691 - fix - the boolean flag 1692 1693 Level: advanced 1694 1695 Note: 1696 When `fix` is `PETSC_TRUE`, new local matrices and l2g maps are generated during the final assembly process. 1697 1698 .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatCreate()`, `MatCreateIS()`, `MatISSetPreallocation()`, `MatAssemblyEnd()`, `MAT_FINAL_ASSEMBLY` 1699 @*/ 1700 PetscErrorCode MatISFixLocalEmpty(Mat A, PetscBool fix) 1701 { 1702 PetscFunctionBegin; 1703 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1704 PetscValidType(A, 1); 1705 PetscValidLogicalCollectiveBool(A, fix, 2); 1706 PetscTryMethod(A, "MatISFixLocalEmpty_C", (Mat, PetscBool), (A, fix)); 1707 PetscFunctionReturn(PETSC_SUCCESS); 1708 } 1709 1710 static PetscErrorCode MatISFixLocalEmpty_IS(Mat A, PetscBool fix) 1711 { 1712 Mat_IS *matis = (Mat_IS *)A->data; 1713 1714 PetscFunctionBegin; 1715 matis->locempty = fix; 1716 PetscFunctionReturn(PETSC_SUCCESS); 1717 } 1718 1719 /*@ 1720 MatISSetPreallocation - Preallocates memory for a `MATIS` parallel matrix. 1721 1722 Collective 1723 1724 Input Parameters: 1725 + B - the matrix 1726 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 1727 (same value is used for all local rows) 1728 . d_nnz - array containing the number of nonzeros in the various rows of the 1729 DIAGONAL portion of the local submatrix (possibly different for each row) 1730 or `NULL`, if `d_nz` is used to specify the nonzero structure. 1731 The size of this array is equal to the number of local rows, i.e `m`. 1732 For matrices that will be factored, you must leave room for (and set) 1733 the diagonal entry even if it is zero. 1734 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1735 submatrix (same value is used for all local rows). 1736 - o_nnz - array containing the number of nonzeros in the various rows of the 1737 OFF-DIAGONAL portion of the local submatrix (possibly different for 1738 each row) or `NULL`, if `o_nz` is used to specify the nonzero 1739 structure. The size of this array is equal to the number 1740 of local rows, i.e `m`. 1741 1742 If the *_nnz parameter is given then the *_nz parameter is ignored 1743 1744 Level: intermediate 1745 1746 Note: 1747 This function has the same interface as the `MATMPIAIJ` preallocation routine in order to simplify the transition 1748 from the asssembled format to the unassembled one. It overestimates the preallocation of `MATIS` local 1749 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 1750 1751 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatMPIAIJSetPreallocation()`, `MatISGetLocalMat()`, `MATIS` 1752 @*/ 1753 PetscErrorCode MatISSetPreallocation(Mat B, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[]) 1754 { 1755 PetscFunctionBegin; 1756 PetscValidHeaderSpecific(B, MAT_CLASSID, 1); 1757 PetscValidType(B, 1); 1758 PetscTryMethod(B, "MatISSetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (B, d_nz, d_nnz, o_nz, o_nnz)); 1759 PetscFunctionReturn(PETSC_SUCCESS); 1760 } 1761 1762 static PetscErrorCode MatISSetPreallocation_IS(Mat B, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[]) 1763 { 1764 Mat_IS *matis = (Mat_IS *)B->data; 1765 PetscInt bs, i, nlocalcols; 1766 1767 PetscFunctionBegin; 1768 PetscCall(MatSetUp(B)); 1769 if (!d_nnz) 1770 for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] = d_nz; 1771 else 1772 for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] = d_nnz[i]; 1773 1774 if (!o_nnz) 1775 for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] += o_nz; 1776 else 1777 for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] += o_nnz[i]; 1778 1779 PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE)); 1780 PetscCall(MatGetSize(matis->A, NULL, &nlocalcols)); 1781 PetscCall(MatGetBlockSize(matis->A, &bs)); 1782 PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE)); 1783 1784 for (i = 0; i < matis->sf->nleaves; i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i], nlocalcols); 1785 PetscCall(MatSeqAIJSetPreallocation(matis->A, 0, matis->sf_leafdata)); 1786 #if defined(PETSC_HAVE_HYPRE) 1787 PetscCall(MatHYPRESetPreallocation(matis->A, 0, matis->sf_leafdata, 0, NULL)); 1788 #endif 1789 1790 for (i = 0; i < matis->sf->nleaves / bs; i++) { 1791 PetscInt b; 1792 1793 matis->sf_leafdata[i] = matis->sf_leafdata[i * bs] / bs; 1794 for (b = 1; b < bs; b++) matis->sf_leafdata[i] = PetscMax(matis->sf_leafdata[i], matis->sf_leafdata[i * bs + b] / bs); 1795 } 1796 PetscCall(MatSeqBAIJSetPreallocation(matis->A, bs, 0, matis->sf_leafdata)); 1797 1798 nlocalcols /= bs; 1799 for (i = 0; i < matis->sf->nleaves / bs; i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i], nlocalcols - i); 1800 PetscCall(MatSeqSBAIJSetPreallocation(matis->A, bs, 0, matis->sf_leafdata)); 1801 1802 /* for other matrix types */ 1803 PetscCall(MatSetUp(matis->A)); 1804 PetscFunctionReturn(PETSC_SUCCESS); 1805 } 1806 1807 PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce) 1808 { 1809 Mat_IS *matis = (Mat_IS *)A->data; 1810 PetscInt *my_dnz, *my_onz, *dnz, *onz, *mat_ranges, *row_ownership; 1811 const PetscInt *global_indices_r, *global_indices_c; 1812 PetscInt i, j, bs, rows, cols; 1813 PetscInt lrows, lcols; 1814 PetscInt local_rows, local_cols; 1815 PetscMPIInt size; 1816 PetscBool isdense, issbaij; 1817 1818 PetscFunctionBegin; 1819 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 1820 PetscCall(MatGetSize(A, &rows, &cols)); 1821 PetscCall(MatGetBlockSize(A, &bs)); 1822 PetscCall(MatGetSize(matis->A, &local_rows, &local_cols)); 1823 PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQDENSE, &isdense)); 1824 PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &issbaij)); 1825 PetscCall(ISLocalToGlobalMappingGetIndices(matis->rmapping, &global_indices_r)); 1826 if (matis->rmapping != matis->cmapping) PetscCall(ISLocalToGlobalMappingGetIndices(matis->cmapping, &global_indices_c)); 1827 else global_indices_c = global_indices_r; 1828 1829 if (issbaij) PetscCall(MatGetRowUpperTriangular(matis->A)); 1830 /* 1831 An SF reduce is needed to sum up properly on shared rows. 1832 Note that generally preallocation is not exact, since it overestimates nonzeros 1833 */ 1834 PetscCall(MatGetLocalSize(A, &lrows, &lcols)); 1835 MatPreallocateBegin(PetscObjectComm((PetscObject)A), lrows, lcols, dnz, onz); 1836 /* All processes need to compute entire row ownership */ 1837 PetscCall(PetscMalloc1(rows, &row_ownership)); 1838 PetscCall(MatGetOwnershipRanges(A, (const PetscInt **)&mat_ranges)); 1839 for (i = 0; i < size; i++) { 1840 for (j = mat_ranges[i]; j < mat_ranges[i + 1]; j++) row_ownership[j] = i; 1841 } 1842 PetscCall(MatGetOwnershipRangesColumn(A, (const PetscInt **)&mat_ranges)); 1843 1844 /* 1845 my_dnz and my_onz contains exact contribution to preallocation from each local mat 1846 then, they will be summed up properly. This way, preallocation is always sufficient 1847 */ 1848 PetscCall(PetscCalloc2(local_rows, &my_dnz, local_rows, &my_onz)); 1849 /* preallocation as a MATAIJ */ 1850 if (isdense) { /* special case for dense local matrices */ 1851 for (i = 0; i < local_rows; i++) { 1852 PetscInt owner = row_ownership[global_indices_r[i]]; 1853 for (j = 0; j < local_cols; j++) { 1854 PetscInt index_col = global_indices_c[j]; 1855 if (index_col > mat_ranges[owner] - 1 && index_col < mat_ranges[owner + 1]) { /* diag block */ 1856 my_dnz[i] += 1; 1857 } else { /* offdiag block */ 1858 my_onz[i] += 1; 1859 } 1860 } 1861 } 1862 } else if (matis->A->ops->getrowij) { 1863 const PetscInt *ii, *jj, *jptr; 1864 PetscBool done; 1865 PetscCall(MatGetRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &local_rows, &ii, &jj, &done)); 1866 PetscCheck(done, PetscObjectComm((PetscObject)matis->A), PETSC_ERR_PLIB, "Error in MatGetRowIJ"); 1867 jptr = jj; 1868 for (i = 0; i < local_rows; i++) { 1869 PetscInt index_row = global_indices_r[i]; 1870 for (j = 0; j < ii[i + 1] - ii[i]; j++, jptr++) { 1871 PetscInt owner = row_ownership[index_row]; 1872 PetscInt index_col = global_indices_c[*jptr]; 1873 if (index_col > mat_ranges[owner] - 1 && index_col < mat_ranges[owner + 1]) { /* diag block */ 1874 my_dnz[i] += 1; 1875 } else { /* offdiag block */ 1876 my_onz[i] += 1; 1877 } 1878 /* same as before, interchanging rows and cols */ 1879 if (issbaij && index_col != index_row) { 1880 owner = row_ownership[index_col]; 1881 if (index_row > mat_ranges[owner] - 1 && index_row < mat_ranges[owner + 1]) { 1882 my_dnz[*jptr] += 1; 1883 } else { 1884 my_onz[*jptr] += 1; 1885 } 1886 } 1887 } 1888 } 1889 PetscCall(MatRestoreRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &local_rows, &ii, &jj, &done)); 1890 PetscCheck(done, PetscObjectComm((PetscObject)matis->A), PETSC_ERR_PLIB, "Error in MatRestoreRowIJ"); 1891 } else { /* loop over rows and use MatGetRow */ 1892 for (i = 0; i < local_rows; i++) { 1893 const PetscInt *cols; 1894 PetscInt ncols, index_row = global_indices_r[i]; 1895 PetscCall(MatGetRow(matis->A, i, &ncols, &cols, NULL)); 1896 for (j = 0; j < ncols; j++) { 1897 PetscInt owner = row_ownership[index_row]; 1898 PetscInt index_col = global_indices_c[cols[j]]; 1899 if (index_col > mat_ranges[owner] - 1 && index_col < mat_ranges[owner + 1]) { /* diag block */ 1900 my_dnz[i] += 1; 1901 } else { /* offdiag block */ 1902 my_onz[i] += 1; 1903 } 1904 /* same as before, interchanging rows and cols */ 1905 if (issbaij && index_col != index_row) { 1906 owner = row_ownership[index_col]; 1907 if (index_row > mat_ranges[owner] - 1 && index_row < mat_ranges[owner + 1]) { 1908 my_dnz[cols[j]] += 1; 1909 } else { 1910 my_onz[cols[j]] += 1; 1911 } 1912 } 1913 } 1914 PetscCall(MatRestoreRow(matis->A, i, &ncols, &cols, NULL)); 1915 } 1916 } 1917 if (global_indices_c != global_indices_r) PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->cmapping, &global_indices_c)); 1918 PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->rmapping, &global_indices_r)); 1919 PetscCall(PetscFree(row_ownership)); 1920 1921 /* Reduce my_dnz and my_onz */ 1922 if (maxreduce) { 1923 PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_dnz, dnz, MPI_MAX)); 1924 PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_onz, onz, MPI_MAX)); 1925 PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_dnz, dnz, MPI_MAX)); 1926 PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_onz, onz, MPI_MAX)); 1927 } else { 1928 PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_dnz, dnz, MPI_SUM)); 1929 PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_onz, onz, MPI_SUM)); 1930 PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_dnz, dnz, MPI_SUM)); 1931 PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_onz, onz, MPI_SUM)); 1932 } 1933 PetscCall(PetscFree2(my_dnz, my_onz)); 1934 1935 /* Resize preallocation if overestimated */ 1936 for (i = 0; i < lrows; i++) { 1937 dnz[i] = PetscMin(dnz[i], lcols); 1938 onz[i] = PetscMin(onz[i], cols - lcols); 1939 } 1940 1941 /* Set preallocation */ 1942 PetscCall(MatSetBlockSizesFromMats(B, A, A)); 1943 PetscCall(MatSeqAIJSetPreallocation(B, 0, dnz)); 1944 PetscCall(MatMPIAIJSetPreallocation(B, 0, dnz, 0, onz)); 1945 for (i = 0; i < lrows; i += bs) { 1946 PetscInt b, d = dnz[i], o = onz[i]; 1947 1948 for (b = 1; b < bs; b++) { 1949 d = PetscMax(d, dnz[i + b]); 1950 o = PetscMax(o, onz[i + b]); 1951 } 1952 dnz[i / bs] = PetscMin(d / bs + d % bs, lcols / bs); 1953 onz[i / bs] = PetscMin(o / bs + o % bs, (cols - lcols) / bs); 1954 } 1955 PetscCall(MatSeqBAIJSetPreallocation(B, bs, 0, dnz)); 1956 PetscCall(MatMPIBAIJSetPreallocation(B, bs, 0, dnz, 0, onz)); 1957 PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, dnz, 0, onz)); 1958 MatPreallocateEnd(dnz, onz); 1959 if (issbaij) PetscCall(MatRestoreRowUpperTriangular(matis->A)); 1960 PetscCall(MatSetOption(B, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE)); 1961 PetscFunctionReturn(PETSC_SUCCESS); 1962 } 1963 1964 PETSC_INTERN PetscErrorCode MatConvert_IS_XAIJ(Mat mat, MatType mtype, MatReuse reuse, Mat *M) 1965 { 1966 Mat_IS *matis = (Mat_IS *)mat->data; 1967 Mat local_mat, MT; 1968 PetscInt rbs, cbs, rows, cols, lrows, lcols; 1969 PetscInt local_rows, local_cols; 1970 PetscBool isseqdense, isseqsbaij, isseqaij, isseqbaij; 1971 PetscMPIInt size; 1972 const PetscScalar *array; 1973 1974 PetscFunctionBegin; 1975 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size)); 1976 if (size == 1 && mat->rmap->N == matis->A->rmap->N && mat->cmap->N == matis->A->cmap->N && !matis->allow_repeated) { 1977 Mat B; 1978 IS irows = NULL, icols = NULL; 1979 PetscInt rbs, cbs; 1980 1981 PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->rmapping, &rbs)); 1982 PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->cmapping, &cbs)); 1983 if (reuse != MAT_REUSE_MATRIX) { /* check if l2g maps are one-to-one */ 1984 IS rows, cols; 1985 const PetscInt *ridxs, *cidxs; 1986 PetscInt i, nw; 1987 PetscBT work; 1988 1989 PetscCall(ISLocalToGlobalMappingGetBlockIndices(matis->rmapping, &ridxs)); 1990 PetscCall(ISLocalToGlobalMappingGetSize(matis->rmapping, &nw)); 1991 nw = nw / rbs; 1992 PetscCall(PetscBTCreate(nw, &work)); 1993 for (i = 0; i < nw; i++) PetscCall(PetscBTSet(work, ridxs[i])); 1994 for (i = 0; i < nw; i++) 1995 if (!PetscBTLookup(work, i)) break; 1996 if (i == nw) { 1997 PetscCall(ISCreateBlock(PETSC_COMM_SELF, rbs, nw, ridxs, PETSC_USE_POINTER, &rows)); 1998 PetscCall(ISSetPermutation(rows)); 1999 PetscCall(ISInvertPermutation(rows, PETSC_DECIDE, &irows)); 2000 PetscCall(ISDestroy(&rows)); 2001 } 2002 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(matis->rmapping, &ridxs)); 2003 PetscCall(PetscBTDestroy(&work)); 2004 if (irows && matis->rmapping != matis->cmapping) { 2005 PetscCall(ISLocalToGlobalMappingGetBlockIndices(matis->cmapping, &cidxs)); 2006 PetscCall(ISLocalToGlobalMappingGetSize(matis->cmapping, &nw)); 2007 nw = nw / cbs; 2008 PetscCall(PetscBTCreate(nw, &work)); 2009 for (i = 0; i < nw; i++) PetscCall(PetscBTSet(work, cidxs[i])); 2010 for (i = 0; i < nw; i++) 2011 if (!PetscBTLookup(work, i)) break; 2012 if (i == nw) { 2013 PetscCall(ISCreateBlock(PETSC_COMM_SELF, cbs, nw, cidxs, PETSC_USE_POINTER, &cols)); 2014 PetscCall(ISSetPermutation(cols)); 2015 PetscCall(ISInvertPermutation(cols, PETSC_DECIDE, &icols)); 2016 PetscCall(ISDestroy(&cols)); 2017 } 2018 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(matis->cmapping, &cidxs)); 2019 PetscCall(PetscBTDestroy(&work)); 2020 } else if (irows) { 2021 PetscCall(PetscObjectReference((PetscObject)irows)); 2022 icols = irows; 2023 } 2024 } else { 2025 PetscCall(PetscObjectQuery((PetscObject)*M, "_MatIS_IS_XAIJ_irows", (PetscObject *)&irows)); 2026 PetscCall(PetscObjectQuery((PetscObject)*M, "_MatIS_IS_XAIJ_icols", (PetscObject *)&icols)); 2027 if (irows) PetscCall(PetscObjectReference((PetscObject)irows)); 2028 if (icols) PetscCall(PetscObjectReference((PetscObject)icols)); 2029 } 2030 if (!irows || !icols) { 2031 PetscCall(ISDestroy(&icols)); 2032 PetscCall(ISDestroy(&irows)); 2033 goto general_assembly; 2034 } 2035 PetscCall(MatConvert(matis->A, mtype, MAT_INITIAL_MATRIX, &B)); 2036 if (reuse != MAT_INPLACE_MATRIX) { 2037 PetscCall(MatCreateSubMatrix(B, irows, icols, reuse, M)); 2038 PetscCall(PetscObjectCompose((PetscObject)*M, "_MatIS_IS_XAIJ_irows", (PetscObject)irows)); 2039 PetscCall(PetscObjectCompose((PetscObject)*M, "_MatIS_IS_XAIJ_icols", (PetscObject)icols)); 2040 } else { 2041 Mat C; 2042 2043 PetscCall(MatCreateSubMatrix(B, irows, icols, MAT_INITIAL_MATRIX, &C)); 2044 PetscCall(MatHeaderReplace(mat, &C)); 2045 } 2046 PetscCall(MatDestroy(&B)); 2047 PetscCall(ISDestroy(&icols)); 2048 PetscCall(ISDestroy(&irows)); 2049 PetscFunctionReturn(PETSC_SUCCESS); 2050 } 2051 general_assembly: 2052 PetscCall(MatGetSize(mat, &rows, &cols)); 2053 PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->rmapping, &rbs)); 2054 PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->cmapping, &cbs)); 2055 PetscCall(MatGetLocalSize(mat, &lrows, &lcols)); 2056 PetscCall(MatGetSize(matis->A, &local_rows, &local_cols)); 2057 PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQDENSE, &isseqdense)); 2058 PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQAIJ, &isseqaij)); 2059 PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQBAIJ, &isseqbaij)); 2060 PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &isseqsbaij)); 2061 PetscCheck(isseqdense || isseqaij || isseqbaij || isseqsbaij, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)matis->A)->type_name); 2062 if (PetscDefined(USE_DEBUG)) { 2063 PetscBool lb[4], bb[4]; 2064 2065 lb[0] = isseqdense; 2066 lb[1] = isseqaij; 2067 lb[2] = isseqbaij; 2068 lb[3] = isseqsbaij; 2069 PetscCallMPI(MPIU_Allreduce(lb, bb, 4, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)mat))); 2070 PetscCheck(bb[0] || bb[1] || bb[2] || bb[3], PETSC_COMM_SELF, PETSC_ERR_SUP, "Local matrices must have the same type"); 2071 } 2072 2073 if (reuse != MAT_REUSE_MATRIX) { 2074 PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &MT)); 2075 PetscCall(MatSetSizes(MT, lrows, lcols, rows, cols)); 2076 PetscCall(MatSetType(MT, mtype)); 2077 PetscCall(MatSetBlockSizes(MT, rbs, cbs)); 2078 PetscCall(MatISSetMPIXAIJPreallocation_Private(mat, MT, PETSC_FALSE)); 2079 } else { 2080 PetscInt mrbs, mcbs, mrows, mcols, mlrows, mlcols; 2081 2082 /* some checks */ 2083 MT = *M; 2084 PetscCall(MatGetBlockSizes(MT, &mrbs, &mcbs)); 2085 PetscCall(MatGetSize(MT, &mrows, &mcols)); 2086 PetscCall(MatGetLocalSize(MT, &mlrows, &mlcols)); 2087 PetscCheck(mrows == rows, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of rows (%" PetscInt_FMT " != %" PetscInt_FMT ")", rows, mrows); 2088 PetscCheck(mcols == cols, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of cols (%" PetscInt_FMT " != %" PetscInt_FMT ")", cols, mcols); 2089 PetscCheck(mlrows == lrows, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of local rows (%" PetscInt_FMT " != %" PetscInt_FMT ")", lrows, mlrows); 2090 PetscCheck(mlcols == lcols, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of local cols (%" PetscInt_FMT " != %" PetscInt_FMT ")", lcols, mlcols); 2091 PetscCheck(mrbs == rbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong row block size (%" PetscInt_FMT " != %" PetscInt_FMT ")", rbs, mrbs); 2092 PetscCheck(mcbs == cbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong col block size (%" PetscInt_FMT " != %" PetscInt_FMT ")", cbs, mcbs); 2093 PetscCall(MatZeroEntries(MT)); 2094 } 2095 2096 if (isseqsbaij || isseqbaij) { 2097 PetscCall(MatConvert(matis->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &local_mat)); 2098 isseqaij = PETSC_TRUE; 2099 } else { 2100 PetscCall(PetscObjectReference((PetscObject)matis->A)); 2101 local_mat = matis->A; 2102 } 2103 2104 /* Set values */ 2105 PetscCall(MatSetLocalToGlobalMapping(MT, matis->rmapping, matis->cmapping)); 2106 if (isseqdense) { /* special case for dense local matrices */ 2107 PetscInt i, *dummy; 2108 2109 PetscCall(PetscMalloc1(PetscMax(local_rows, local_cols), &dummy)); 2110 for (i = 0; i < PetscMax(local_rows, local_cols); i++) dummy[i] = i; 2111 PetscCall(MatSetOption(MT, MAT_ROW_ORIENTED, PETSC_FALSE)); 2112 PetscCall(MatDenseGetArrayRead(local_mat, &array)); 2113 PetscCall(MatSetValuesLocal(MT, local_rows, dummy, local_cols, dummy, array, ADD_VALUES)); 2114 PetscCall(MatDenseRestoreArrayRead(local_mat, &array)); 2115 PetscCall(PetscFree(dummy)); 2116 } else if (isseqaij) { 2117 const PetscInt *blocks; 2118 PetscInt i, nvtxs, *xadj, *adjncy, nb; 2119 PetscBool done; 2120 PetscScalar *sarray; 2121 2122 PetscCall(MatGetRowIJ(local_mat, 0, PETSC_FALSE, PETSC_FALSE, &nvtxs, (const PetscInt **)&xadj, (const PetscInt **)&adjncy, &done)); 2123 PetscCheck(done, PetscObjectComm((PetscObject)local_mat), PETSC_ERR_PLIB, "Error in MatGetRowIJ"); 2124 PetscCall(MatSeqAIJGetArray(local_mat, &sarray)); 2125 PetscCall(MatGetVariableBlockSizes(local_mat, &nb, &blocks)); 2126 if (nb) { /* speed up assembly for special blocked matrices (used by BDDC) */ 2127 PetscInt sum; 2128 2129 for (i = 0, sum = 0; i < nb; i++) sum += blocks[i]; 2130 if (sum == nvtxs) { 2131 PetscInt r; 2132 2133 for (i = 0, r = 0; i < nb; i++) { 2134 PetscAssert(blocks[i] == xadj[r + 1] - xadj[r], PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid block sizes prescribed for block %" PetscInt_FMT ": expected %" PetscInt_FMT ", got %" PetscInt_FMT, i, blocks[i], xadj[r + 1] - xadj[r]); 2135 PetscCall(MatSetValuesLocal(MT, blocks[i], adjncy + xadj[r], blocks[i], adjncy + xadj[r], sarray + xadj[r], ADD_VALUES)); 2136 r += blocks[i]; 2137 } 2138 } else { 2139 for (i = 0; i < nvtxs; i++) PetscCall(MatSetValuesLocal(MT, 1, &i, xadj[i + 1] - xadj[i], adjncy + xadj[i], sarray + xadj[i], ADD_VALUES)); 2140 } 2141 } else { 2142 for (i = 0; i < nvtxs; i++) PetscCall(MatSetValuesLocal(MT, 1, &i, xadj[i + 1] - xadj[i], PetscSafePointerPlusOffset(adjncy, xadj[i]), PetscSafePointerPlusOffset(sarray, xadj[i]), ADD_VALUES)); 2143 } 2144 PetscCall(MatRestoreRowIJ(local_mat, 0, PETSC_FALSE, PETSC_FALSE, &nvtxs, (const PetscInt **)&xadj, (const PetscInt **)&adjncy, &done)); 2145 PetscCheck(done, PetscObjectComm((PetscObject)local_mat), PETSC_ERR_PLIB, "Error in MatRestoreRowIJ"); 2146 PetscCall(MatSeqAIJRestoreArray(local_mat, &sarray)); 2147 } else { /* very basic values insertion for all other matrix types */ 2148 for (PetscInt i = 0; i < local_rows; i++) { 2149 PetscInt j; 2150 const PetscInt *local_indices_cols; 2151 2152 PetscCall(MatGetRow(local_mat, i, &j, &local_indices_cols, &array)); 2153 PetscCall(MatSetValuesLocal(MT, 1, &i, j, local_indices_cols, array, ADD_VALUES)); 2154 PetscCall(MatRestoreRow(local_mat, i, &j, &local_indices_cols, &array)); 2155 } 2156 } 2157 PetscCall(MatDestroy(&local_mat)); 2158 PetscCall(MatAssemblyBegin(MT, MAT_FINAL_ASSEMBLY)); 2159 PetscCall(MatAssemblyEnd(MT, MAT_FINAL_ASSEMBLY)); 2160 if (isseqdense) PetscCall(MatSetOption(MT, MAT_ROW_ORIENTED, PETSC_TRUE)); 2161 if (reuse == MAT_INPLACE_MATRIX) { 2162 PetscCall(MatHeaderReplace(mat, &MT)); 2163 } else if (reuse == MAT_INITIAL_MATRIX) { 2164 *M = MT; 2165 } 2166 PetscFunctionReturn(PETSC_SUCCESS); 2167 } 2168 2169 static PetscErrorCode MatDuplicate_IS(Mat mat, MatDuplicateOption op, Mat *newmat) 2170 { 2171 Mat_IS *matis = (Mat_IS *)mat->data; 2172 PetscInt rbs, cbs, m, n, M, N; 2173 Mat B, localmat; 2174 2175 PetscFunctionBegin; 2176 PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &rbs)); 2177 PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &cbs)); 2178 PetscCall(MatGetSize(mat, &M, &N)); 2179 PetscCall(MatGetLocalSize(mat, &m, &n)); 2180 PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B)); 2181 PetscCall(MatSetSizes(B, m, n, M, N)); 2182 PetscCall(MatSetBlockSize(B, rbs == cbs ? rbs : 1)); 2183 PetscCall(MatSetType(B, MATIS)); 2184 PetscCall(MatISSetLocalMatType(B, matis->lmattype)); 2185 PetscCall(MatISSetAllowRepeated(B, matis->allow_repeated)); 2186 PetscCall(MatSetLocalToGlobalMapping(B, mat->rmap->mapping, mat->cmap->mapping)); 2187 PetscCall(MatDuplicate(matis->A, op, &localmat)); 2188 PetscCall(MatSetLocalToGlobalMapping(localmat, matis->A->rmap->mapping, matis->A->cmap->mapping)); 2189 PetscCall(MatISSetLocalMat(B, localmat)); 2190 PetscCall(MatDestroy(&localmat)); 2191 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 2192 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 2193 *newmat = B; 2194 PetscFunctionReturn(PETSC_SUCCESS); 2195 } 2196 2197 static PetscErrorCode MatIsHermitian_IS(Mat A, PetscReal tol, PetscBool *flg) 2198 { 2199 Mat_IS *matis = (Mat_IS *)A->data; 2200 PetscBool local_sym; 2201 2202 PetscFunctionBegin; 2203 PetscCall(MatIsHermitian(matis->A, tol, &local_sym)); 2204 PetscCallMPI(MPIU_Allreduce(&local_sym, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A))); 2205 PetscFunctionReturn(PETSC_SUCCESS); 2206 } 2207 2208 static PetscErrorCode MatIsSymmetric_IS(Mat A, PetscReal tol, PetscBool *flg) 2209 { 2210 Mat_IS *matis = (Mat_IS *)A->data; 2211 PetscBool local_sym; 2212 2213 PetscFunctionBegin; 2214 if (matis->rmapping != matis->cmapping) { 2215 *flg = PETSC_FALSE; 2216 PetscFunctionReturn(PETSC_SUCCESS); 2217 } 2218 PetscCall(MatIsSymmetric(matis->A, tol, &local_sym)); 2219 PetscCallMPI(MPIU_Allreduce(&local_sym, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A))); 2220 PetscFunctionReturn(PETSC_SUCCESS); 2221 } 2222 2223 static PetscErrorCode MatIsStructurallySymmetric_IS(Mat A, PetscBool *flg) 2224 { 2225 Mat_IS *matis = (Mat_IS *)A->data; 2226 PetscBool local_sym; 2227 2228 PetscFunctionBegin; 2229 if (matis->rmapping != matis->cmapping) { 2230 *flg = PETSC_FALSE; 2231 PetscFunctionReturn(PETSC_SUCCESS); 2232 } 2233 PetscCall(MatIsStructurallySymmetric(matis->A, &local_sym)); 2234 PetscCallMPI(MPIU_Allreduce(&local_sym, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A))); 2235 PetscFunctionReturn(PETSC_SUCCESS); 2236 } 2237 2238 static PetscErrorCode MatDestroy_IS(Mat A) 2239 { 2240 Mat_IS *b = (Mat_IS *)A->data; 2241 2242 PetscFunctionBegin; 2243 PetscCall(PetscFree(b->bdiag)); 2244 PetscCall(PetscFree(b->lmattype)); 2245 PetscCall(MatDestroy(&b->A)); 2246 PetscCall(VecScatterDestroy(&b->cctx)); 2247 PetscCall(VecScatterDestroy(&b->rctx)); 2248 PetscCall(VecDestroy(&b->x)); 2249 PetscCall(VecDestroy(&b->y)); 2250 PetscCall(VecDestroy(&b->counter)); 2251 PetscCall(ISDestroy(&b->getsub_ris)); 2252 PetscCall(ISDestroy(&b->getsub_cis)); 2253 if (b->sf != b->csf) { 2254 PetscCall(PetscSFDestroy(&b->csf)); 2255 PetscCall(PetscFree2(b->csf_rootdata, b->csf_leafdata)); 2256 } else b->csf = NULL; 2257 PetscCall(PetscSFDestroy(&b->sf)); 2258 PetscCall(PetscFree2(b->sf_rootdata, b->sf_leafdata)); 2259 PetscCall(ISLocalToGlobalMappingDestroy(&b->rmapping)); 2260 PetscCall(ISLocalToGlobalMappingDestroy(&b->cmapping)); 2261 PetscCall(MatDestroy(&b->dA)); 2262 PetscCall(MatDestroy(&b->assembledA)); 2263 PetscCall(PetscFree(A->data)); 2264 PetscCall(PetscObjectChangeTypeName((PetscObject)A, NULL)); 2265 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMatType_C", NULL)); 2266 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalMat_C", NULL)); 2267 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMat_C", NULL)); 2268 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISRestoreLocalMat_C", NULL)); 2269 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetPreallocation_C", NULL)); 2270 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISStoreL2L_C", NULL)); 2271 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISFixLocalEmpty_C", NULL)); 2272 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalToGlobalMapping_C", NULL)); 2273 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpiaij_C", NULL)); 2274 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpibaij_C", NULL)); 2275 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpisbaij_C", NULL)); 2276 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqaij_C", NULL)); 2277 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqbaij_C", NULL)); 2278 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqsbaij_C", NULL)); 2279 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_aij_C", NULL)); 2280 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOOLocal_C", NULL)); 2281 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL)); 2282 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL)); 2283 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetAllowRepeated_C", NULL)); 2284 PetscFunctionReturn(PETSC_SUCCESS); 2285 } 2286 2287 static PetscErrorCode MatMult_IS(Mat A, Vec x, Vec y) 2288 { 2289 Mat_IS *is = (Mat_IS *)A->data; 2290 PetscScalar zero = 0.0; 2291 2292 PetscFunctionBegin; 2293 /* scatter the global vector x into the local work vector */ 2294 PetscCall(VecScatterBegin(is->cctx, x, is->x, INSERT_VALUES, SCATTER_FORWARD)); 2295 PetscCall(VecScatterEnd(is->cctx, x, is->x, INSERT_VALUES, SCATTER_FORWARD)); 2296 2297 /* multiply the local matrix */ 2298 PetscCall(MatMult(is->A, is->x, is->y)); 2299 2300 /* scatter product back into global memory */ 2301 PetscCall(VecSet(y, zero)); 2302 PetscCall(VecScatterBegin(is->rctx, is->y, y, ADD_VALUES, SCATTER_REVERSE)); 2303 PetscCall(VecScatterEnd(is->rctx, is->y, y, ADD_VALUES, SCATTER_REVERSE)); 2304 PetscFunctionReturn(PETSC_SUCCESS); 2305 } 2306 2307 static PetscErrorCode MatMultAdd_IS(Mat A, Vec v1, Vec v2, Vec v3) 2308 { 2309 Vec temp_vec; 2310 2311 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 2312 if (v3 != v2) { 2313 PetscCall(MatMult(A, v1, v3)); 2314 PetscCall(VecAXPY(v3, 1.0, v2)); 2315 } else { 2316 PetscCall(VecDuplicate(v2, &temp_vec)); 2317 PetscCall(MatMult(A, v1, temp_vec)); 2318 PetscCall(VecAXPY(temp_vec, 1.0, v2)); 2319 PetscCall(VecCopy(temp_vec, v3)); 2320 PetscCall(VecDestroy(&temp_vec)); 2321 } 2322 PetscFunctionReturn(PETSC_SUCCESS); 2323 } 2324 2325 static PetscErrorCode MatMultTranspose_IS(Mat A, Vec y, Vec x) 2326 { 2327 Mat_IS *is = (Mat_IS *)A->data; 2328 2329 PetscFunctionBegin; 2330 /* scatter the global vector x into the local work vector */ 2331 PetscCall(VecScatterBegin(is->rctx, y, is->y, INSERT_VALUES, SCATTER_FORWARD)); 2332 PetscCall(VecScatterEnd(is->rctx, y, is->y, INSERT_VALUES, SCATTER_FORWARD)); 2333 2334 /* multiply the local matrix */ 2335 PetscCall(MatMultTranspose(is->A, is->y, is->x)); 2336 2337 /* scatter product back into global vector */ 2338 PetscCall(VecSet(x, 0)); 2339 PetscCall(VecScatterBegin(is->cctx, is->x, x, ADD_VALUES, SCATTER_REVERSE)); 2340 PetscCall(VecScatterEnd(is->cctx, is->x, x, ADD_VALUES, SCATTER_REVERSE)); 2341 PetscFunctionReturn(PETSC_SUCCESS); 2342 } 2343 2344 static PetscErrorCode MatMultTransposeAdd_IS(Mat A, Vec v1, Vec v2, Vec v3) 2345 { 2346 Vec temp_vec; 2347 2348 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 2349 if (v3 != v2) { 2350 PetscCall(MatMultTranspose(A, v1, v3)); 2351 PetscCall(VecAXPY(v3, 1.0, v2)); 2352 } else { 2353 PetscCall(VecDuplicate(v2, &temp_vec)); 2354 PetscCall(MatMultTranspose(A, v1, temp_vec)); 2355 PetscCall(VecAXPY(temp_vec, 1.0, v2)); 2356 PetscCall(VecCopy(temp_vec, v3)); 2357 PetscCall(VecDestroy(&temp_vec)); 2358 } 2359 PetscFunctionReturn(PETSC_SUCCESS); 2360 } 2361 2362 static PetscErrorCode MatView_IS(Mat A, PetscViewer viewer) 2363 { 2364 Mat_IS *a = (Mat_IS *)A->data; 2365 PetscViewer sviewer; 2366 PetscBool isascii, isbinary, viewl2g = PETSC_FALSE, native; 2367 PetscViewerFormat format; 2368 ISLocalToGlobalMapping rmap, cmap; 2369 2370 PetscFunctionBegin; 2371 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 2372 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 2373 PetscCall(PetscViewerGetFormat(viewer, &format)); 2374 native = (PetscBool)(format == PETSC_VIEWER_NATIVE); 2375 if (native) { 2376 rmap = A->rmap->mapping; 2377 cmap = A->cmap->mapping; 2378 } else { 2379 rmap = a->rmapping; 2380 cmap = a->cmapping; 2381 } 2382 if (isascii) { 2383 if (format == PETSC_VIEWER_ASCII_INFO) PetscFunctionReturn(PETSC_SUCCESS); 2384 if (format == PETSC_VIEWER_ASCII_INFO_DETAIL || format == PETSC_VIEWER_ASCII_MATLAB) viewl2g = PETSC_TRUE; 2385 } else if (isbinary) { 2386 PetscInt tr[6], nr, nc; 2387 char lmattype[64] = {'\0'}; 2388 PetscMPIInt size; 2389 PetscBool skipHeader; 2390 IS is; 2391 2392 PetscCall(PetscViewerSetUp(viewer)); 2393 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size)); 2394 tr[0] = MAT_FILE_CLASSID; 2395 tr[1] = A->rmap->N; 2396 tr[2] = A->cmap->N; 2397 tr[3] = -size; /* AIJ stores nnz here */ 2398 tr[4] = (PetscInt)(rmap == cmap); 2399 tr[5] = a->allow_repeated; 2400 PetscCall(PetscSNPrintf(lmattype, sizeof(lmattype), "%s", a->lmattype)); 2401 2402 PetscCall(PetscViewerBinaryWrite(viewer, tr, PETSC_STATIC_ARRAY_LENGTH(tr), PETSC_INT)); 2403 PetscCall(PetscViewerBinaryWrite(viewer, lmattype, sizeof(lmattype), PETSC_CHAR)); 2404 2405 /* first dump l2g info (we need the header for proper loading on different number of processes) */ 2406 PetscCall(PetscViewerBinaryGetSkipHeader(viewer, &skipHeader)); 2407 PetscCall(PetscViewerBinarySetSkipHeader(viewer, PETSC_FALSE)); 2408 PetscCall(ISLocalToGlobalMappingView(rmap, viewer)); 2409 if (cmap != rmap) PetscCall(ISLocalToGlobalMappingView(cmap, viewer)); 2410 2411 /* then the sizes of the local matrices */ 2412 PetscCall(MatGetSize(a->A, &nr, &nc)); 2413 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), 1, &nr, PETSC_USE_POINTER, &is)); 2414 PetscCall(ISView(is, viewer)); 2415 PetscCall(ISDestroy(&is)); 2416 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), 1, &nc, PETSC_USE_POINTER, &is)); 2417 PetscCall(ISView(is, viewer)); 2418 PetscCall(ISDestroy(&is)); 2419 PetscCall(PetscViewerBinarySetSkipHeader(viewer, skipHeader)); 2420 } 2421 if (format == PETSC_VIEWER_ASCII_MATLAB) { 2422 char name[64]; 2423 PetscMPIInt size, rank; 2424 2425 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size)); 2426 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)viewer), &rank)); 2427 if (size > 1) PetscCall(PetscSNPrintf(name, sizeof(name), "lmat_%d", rank)); 2428 else PetscCall(PetscSNPrintf(name, sizeof(name), "lmat")); 2429 PetscCall(PetscObjectSetName((PetscObject)a->A, name)); 2430 } 2431 2432 /* Dump the local matrices */ 2433 if (isbinary) { /* ViewerGetSubViewer does not work in parallel */ 2434 PetscBool isaij; 2435 PetscInt nr, nc; 2436 Mat lA, B; 2437 Mat_MPIAIJ *b; 2438 2439 /* We create a temporary MPIAIJ matrix that stores the unassembled operator */ 2440 PetscCall(PetscObjectBaseTypeCompare((PetscObject)a->A, MATAIJ, &isaij)); 2441 if (!isaij) PetscCall(MatConvert(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &lA)); 2442 else { 2443 PetscCall(PetscObjectReference((PetscObject)a->A)); 2444 lA = a->A; 2445 } 2446 PetscCall(MatCreate(PetscObjectComm((PetscObject)viewer), &B)); 2447 PetscCall(MatSetType(B, MATMPIAIJ)); 2448 PetscCall(MatGetSize(lA, &nr, &nc)); 2449 PetscCall(MatSetSizes(B, nr, nc, PETSC_DECIDE, PETSC_DECIDE)); 2450 PetscCall(MatMPIAIJSetPreallocation(B, 0, NULL, 0, NULL)); 2451 2452 b = (Mat_MPIAIJ *)B->data; 2453 PetscCall(MatDestroy(&b->A)); 2454 b->A = lA; 2455 2456 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 2457 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 2458 PetscCall(MatView(B, viewer)); 2459 PetscCall(MatDestroy(&B)); 2460 } else { 2461 PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 2462 PetscCall(MatView(a->A, sviewer)); 2463 PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 2464 } 2465 2466 /* with ASCII, we dump the l2gmaps at the end */ 2467 if (viewl2g) { 2468 if (format == PETSC_VIEWER_ASCII_MATLAB) { 2469 PetscCall(PetscObjectSetName((PetscObject)rmap, "row")); 2470 PetscCall(ISLocalToGlobalMappingView(rmap, viewer)); 2471 PetscCall(PetscObjectSetName((PetscObject)cmap, "col")); 2472 PetscCall(ISLocalToGlobalMappingView(cmap, viewer)); 2473 } else { 2474 PetscCall(ISLocalToGlobalMappingView(rmap, viewer)); 2475 PetscCall(ISLocalToGlobalMappingView(cmap, viewer)); 2476 } 2477 } 2478 PetscFunctionReturn(PETSC_SUCCESS); 2479 } 2480 2481 static PetscErrorCode MatLoad_IS(Mat A, PetscViewer viewer) 2482 { 2483 ISLocalToGlobalMapping rmap, cmap; 2484 MPI_Comm comm = PetscObjectComm((PetscObject)A); 2485 PetscBool isbinary, samel, allow, isbaij; 2486 PetscInt tr[6], M, N, nr, nc, Asize, isn; 2487 const PetscInt *idx; 2488 PetscMPIInt size; 2489 char lmattype[64]; 2490 Mat dA, lA; 2491 IS is; 2492 2493 PetscFunctionBegin; 2494 PetscCheckSameComm(A, 1, viewer, 2); 2495 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 2496 PetscCheck(isbinary, PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Invalid viewer of type %s", ((PetscObject)viewer)->type_name); 2497 2498 PetscCall(PetscViewerBinaryRead(viewer, tr, PETSC_STATIC_ARRAY_LENGTH(tr), NULL, PETSC_INT)); 2499 PetscCheck(tr[0] == MAT_FILE_CLASSID, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a matrix next in file"); 2500 PetscCheck(tr[1] >= 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file"); 2501 PetscCheck(tr[2] >= 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file"); 2502 PetscCheck(tr[3] < 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file"); 2503 PetscCheck(tr[4] == 0 || tr[4] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file"); 2504 PetscCheck(tr[5] == 0 || tr[5] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file"); 2505 M = tr[1]; 2506 N = tr[2]; 2507 Asize = -tr[3]; 2508 samel = (PetscBool)tr[4]; 2509 allow = (PetscBool)tr[5]; 2510 PetscCall(PetscViewerBinaryRead(viewer, lmattype, sizeof(lmattype), NULL, PETSC_CHAR)); 2511 2512 /* if we are loading from a larger set of processes, allow repeated entries */ 2513 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size)); 2514 if (Asize > size) allow = PETSC_TRUE; 2515 2516 /* set global sizes if not set already */ 2517 if (A->rmap->N < 0) A->rmap->N = M; 2518 if (A->cmap->N < 0) A->cmap->N = N; 2519 PetscCall(PetscLayoutSetUp(A->rmap)); 2520 PetscCall(PetscLayoutSetUp(A->cmap)); 2521 PetscCheck(M == A->rmap->N, comm, PETSC_ERR_ARG_SIZ, "Matrix rows should be %" PetscInt_FMT ", found %" PetscInt_FMT, M, A->rmap->N); 2522 PetscCheck(N == A->cmap->N, comm, PETSC_ERR_ARG_SIZ, "Matrix columns should be %" PetscInt_FMT ", found %" PetscInt_FMT, N, A->cmap->N); 2523 2524 /* load l2g maps */ 2525 PetscCall(ISLocalToGlobalMappingCreate(comm, 0, 0, NULL, PETSC_USE_POINTER, &rmap)); 2526 PetscCall(ISLocalToGlobalMappingLoad(rmap, viewer)); 2527 if (!samel) { 2528 PetscCall(ISLocalToGlobalMappingCreate(comm, 0, 0, NULL, PETSC_USE_POINTER, &cmap)); 2529 PetscCall(ISLocalToGlobalMappingLoad(cmap, viewer)); 2530 } else { 2531 PetscCall(PetscObjectReference((PetscObject)rmap)); 2532 cmap = rmap; 2533 } 2534 2535 /* load sizes of local matrices */ 2536 PetscCall(ISCreate(comm, &is)); 2537 PetscCall(ISSetType(is, ISGENERAL)); 2538 PetscCall(ISLoad(is, viewer)); 2539 PetscCall(ISGetLocalSize(is, &isn)); 2540 PetscCall(ISGetIndices(is, &idx)); 2541 nr = 0; 2542 for (PetscInt i = 0; i < isn; i++) nr += idx[i]; 2543 PetscCall(ISRestoreIndices(is, &idx)); 2544 PetscCall(ISDestroy(&is)); 2545 PetscCall(ISCreate(comm, &is)); 2546 PetscCall(ISSetType(is, ISGENERAL)); 2547 PetscCall(ISLoad(is, viewer)); 2548 PetscCall(ISGetLocalSize(is, &isn)); 2549 PetscCall(ISGetIndices(is, &idx)); 2550 nc = 0; 2551 for (PetscInt i = 0; i < isn; i++) nc += idx[i]; 2552 PetscCall(ISRestoreIndices(is, &idx)); 2553 PetscCall(ISDestroy(&is)); 2554 2555 /* now load the unassembled operator */ 2556 PetscCall(MatCreate(comm, &dA)); 2557 PetscCall(MatSetType(dA, MATMPIAIJ)); 2558 PetscCall(MatSetSizes(dA, nr, nc, PETSC_DECIDE, PETSC_DECIDE)); 2559 PetscCall(MatLoad(dA, viewer)); 2560 PetscCall(MatMPIAIJGetSeqAIJ(dA, &lA, NULL, NULL)); 2561 PetscCall(PetscObjectReference((PetscObject)lA)); 2562 PetscCall(MatDestroy(&dA)); 2563 2564 /* and convert to the desired format */ 2565 PetscCall(PetscStrcmpAny(lmattype, &isbaij, MATSBAIJ, MATSEQSBAIJ, "")); 2566 if (isbaij) PetscCall(MatSetOption(lA, MAT_SYMMETRIC, PETSC_TRUE)); 2567 PetscCall(MatConvert(lA, lmattype, MAT_INPLACE_MATRIX, &lA)); 2568 2569 /* assemble the MATIS object */ 2570 PetscCall(MatISSetAllowRepeated(A, allow)); 2571 PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap)); 2572 PetscCall(MatISSetLocalMat(A, lA)); 2573 PetscCall(MatDestroy(&lA)); 2574 PetscCall(ISLocalToGlobalMappingDestroy(&rmap)); 2575 PetscCall(ISLocalToGlobalMappingDestroy(&cmap)); 2576 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 2577 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 2578 PetscFunctionReturn(PETSC_SUCCESS); 2579 } 2580 2581 static PetscErrorCode MatInvertBlockDiagonal_IS(Mat mat, const PetscScalar **values) 2582 { 2583 Mat_IS *is = (Mat_IS *)mat->data; 2584 MPI_Datatype nodeType; 2585 const PetscScalar *lv; 2586 PetscInt bs; 2587 PetscMPIInt mbs; 2588 2589 PetscFunctionBegin; 2590 PetscCall(MatGetBlockSize(mat, &bs)); 2591 PetscCall(MatSetBlockSize(is->A, bs)); 2592 PetscCall(MatInvertBlockDiagonal(is->A, &lv)); 2593 if (!is->bdiag) PetscCall(PetscMalloc1(bs * mat->rmap->n, &is->bdiag)); 2594 PetscCall(PetscMPIIntCast(bs, &mbs)); 2595 PetscCallMPI(MPI_Type_contiguous(mbs, MPIU_SCALAR, &nodeType)); 2596 PetscCallMPI(MPI_Type_commit(&nodeType)); 2597 PetscCall(PetscSFReduceBegin(is->sf, nodeType, lv, is->bdiag, MPI_REPLACE)); 2598 PetscCall(PetscSFReduceEnd(is->sf, nodeType, lv, is->bdiag, MPI_REPLACE)); 2599 PetscCallMPI(MPI_Type_free(&nodeType)); 2600 if (values) *values = is->bdiag; 2601 PetscFunctionReturn(PETSC_SUCCESS); 2602 } 2603 2604 static PetscErrorCode MatISSetUpScatters_Private(Mat A) 2605 { 2606 Vec cglobal, rglobal; 2607 IS from; 2608 Mat_IS *is = (Mat_IS *)A->data; 2609 PetscScalar sum; 2610 const PetscInt *garray; 2611 PetscInt nr, rbs, nc, cbs; 2612 VecType rtype; 2613 2614 PetscFunctionBegin; 2615 PetscCall(ISLocalToGlobalMappingGetSize(is->rmapping, &nr)); 2616 PetscCall(ISLocalToGlobalMappingGetBlockSize(is->rmapping, &rbs)); 2617 PetscCall(ISLocalToGlobalMappingGetSize(is->cmapping, &nc)); 2618 PetscCall(ISLocalToGlobalMappingGetBlockSize(is->cmapping, &cbs)); 2619 PetscCall(VecDestroy(&is->x)); 2620 PetscCall(VecDestroy(&is->y)); 2621 PetscCall(VecDestroy(&is->counter)); 2622 PetscCall(VecScatterDestroy(&is->rctx)); 2623 PetscCall(VecScatterDestroy(&is->cctx)); 2624 PetscCall(MatCreateVecs(is->A, &is->x, &is->y)); 2625 PetscCall(VecBindToCPU(is->y, PETSC_TRUE)); 2626 PetscCall(VecGetRootType_Private(is->y, &rtype)); 2627 PetscCall(PetscFree(A->defaultvectype)); 2628 PetscCall(PetscStrallocpy(rtype, &A->defaultvectype)); 2629 PetscCall(MatCreateVecs(A, &cglobal, &rglobal)); 2630 PetscCall(VecBindToCPU(rglobal, PETSC_TRUE)); 2631 PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->rmapping, &garray)); 2632 PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), rbs, nr / rbs, garray, PETSC_USE_POINTER, &from)); 2633 PetscCall(VecScatterCreate(rglobal, from, is->y, NULL, &is->rctx)); 2634 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->rmapping, &garray)); 2635 PetscCall(ISDestroy(&from)); 2636 if (is->rmapping != is->cmapping) { 2637 PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->cmapping, &garray)); 2638 PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), cbs, nc / cbs, garray, PETSC_USE_POINTER, &from)); 2639 PetscCall(VecScatterCreate(cglobal, from, is->x, NULL, &is->cctx)); 2640 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->cmapping, &garray)); 2641 PetscCall(ISDestroy(&from)); 2642 } else { 2643 PetscCall(PetscObjectReference((PetscObject)is->rctx)); 2644 is->cctx = is->rctx; 2645 } 2646 PetscCall(VecDestroy(&cglobal)); 2647 2648 /* interface counter vector (local) */ 2649 PetscCall(VecDuplicate(is->y, &is->counter)); 2650 PetscCall(VecBindToCPU(is->counter, PETSC_TRUE)); 2651 PetscCall(VecSet(is->y, 1.)); 2652 PetscCall(VecScatterBegin(is->rctx, is->y, rglobal, ADD_VALUES, SCATTER_REVERSE)); 2653 PetscCall(VecScatterEnd(is->rctx, is->y, rglobal, ADD_VALUES, SCATTER_REVERSE)); 2654 PetscCall(VecScatterBegin(is->rctx, rglobal, is->counter, INSERT_VALUES, SCATTER_FORWARD)); 2655 PetscCall(VecScatterEnd(is->rctx, rglobal, is->counter, INSERT_VALUES, SCATTER_FORWARD)); 2656 PetscCall(VecBindToCPU(is->y, PETSC_FALSE)); 2657 PetscCall(VecBindToCPU(is->counter, PETSC_FALSE)); 2658 2659 /* special functions for block-diagonal matrices */ 2660 PetscCall(VecSum(rglobal, &sum)); 2661 A->ops->invertblockdiagonal = NULL; 2662 if ((PetscInt)(PetscRealPart(sum)) == A->rmap->N && A->rmap->N == A->cmap->N && is->rmapping == is->cmapping) A->ops->invertblockdiagonal = MatInvertBlockDiagonal_IS; 2663 PetscCall(VecDestroy(&rglobal)); 2664 2665 /* setup SF for general purpose shared indices based communications */ 2666 PetscCall(MatISSetUpSF_IS(A)); 2667 PetscFunctionReturn(PETSC_SUCCESS); 2668 } 2669 2670 static PetscErrorCode MatISFilterL2GMap(Mat A, ISLocalToGlobalMapping map, ISLocalToGlobalMapping *nmap, ISLocalToGlobalMapping *lmap) 2671 { 2672 Mat_IS *matis = (Mat_IS *)A->data; 2673 IS is; 2674 ISLocalToGlobalMappingType l2gtype; 2675 const PetscInt *idxs; 2676 PetscHSetI ht; 2677 PetscInt *nidxs; 2678 PetscInt i, n, bs, c; 2679 PetscBool flg[] = {PETSC_FALSE, PETSC_FALSE}; 2680 2681 PetscFunctionBegin; 2682 PetscCall(ISLocalToGlobalMappingGetSize(map, &n)); 2683 PetscCall(ISLocalToGlobalMappingGetBlockSize(map, &bs)); 2684 PetscCall(ISLocalToGlobalMappingGetBlockIndices(map, &idxs)); 2685 PetscCall(PetscHSetICreate(&ht)); 2686 PetscCall(PetscMalloc1(n / bs, &nidxs)); 2687 for (i = 0, c = 0; i < n / bs; i++) { 2688 PetscBool missing = PETSC_TRUE; 2689 if (idxs[i] < 0) { 2690 flg[0] = PETSC_TRUE; 2691 continue; 2692 } 2693 if (!matis->allow_repeated) PetscCall(PetscHSetIQueryAdd(ht, idxs[i], &missing)); 2694 if (!missing) flg[1] = PETSC_TRUE; 2695 else nidxs[c++] = idxs[i]; 2696 } 2697 PetscCall(PetscHSetIDestroy(&ht)); 2698 PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, flg, 2, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A))); 2699 if (!flg[0] && !flg[1]) { /* Entries are all non negative and unique */ 2700 *nmap = NULL; 2701 *lmap = NULL; 2702 PetscCall(PetscFree(nidxs)); 2703 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(map, &idxs)); 2704 PetscFunctionReturn(PETSC_SUCCESS); 2705 } 2706 2707 /* New l2g map without negative indices (and repeated indices if not allowed) */ 2708 PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, c, nidxs, PETSC_USE_POINTER, &is)); 2709 PetscCall(ISLocalToGlobalMappingCreateIS(is, nmap)); 2710 PetscCall(ISDestroy(&is)); 2711 PetscCall(ISLocalToGlobalMappingGetType(map, &l2gtype)); 2712 PetscCall(ISLocalToGlobalMappingSetType(*nmap, l2gtype)); 2713 2714 /* New local l2g map for repeated indices if not allowed */ 2715 PetscCall(ISGlobalToLocalMappingApplyBlock(*nmap, IS_GTOLM_MASK, n / bs, idxs, NULL, nidxs)); 2716 PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, n / bs, nidxs, PETSC_USE_POINTER, &is)); 2717 PetscCall(ISLocalToGlobalMappingCreateIS(is, lmap)); 2718 PetscCall(ISDestroy(&is)); 2719 PetscCall(PetscFree(nidxs)); 2720 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(map, &idxs)); 2721 PetscFunctionReturn(PETSC_SUCCESS); 2722 } 2723 2724 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping) 2725 { 2726 Mat_IS *is = (Mat_IS *)A->data; 2727 ISLocalToGlobalMapping localrmapping = NULL, localcmapping = NULL; 2728 PetscInt nr, rbs, nc, cbs; 2729 PetscBool cong, freem[] = {PETSC_FALSE, PETSC_FALSE}; 2730 2731 PetscFunctionBegin; 2732 if (rmapping) PetscCheckSameComm(A, 1, rmapping, 2); 2733 if (cmapping) PetscCheckSameComm(A, 1, cmapping, 3); 2734 2735 PetscCall(ISLocalToGlobalMappingDestroy(&is->rmapping)); 2736 PetscCall(ISLocalToGlobalMappingDestroy(&is->cmapping)); 2737 PetscCall(PetscLayoutSetUp(A->rmap)); 2738 PetscCall(PetscLayoutSetUp(A->cmap)); 2739 PetscCall(MatHasCongruentLayouts(A, &cong)); 2740 2741 /* If NULL, local space matches global space */ 2742 if (!rmapping) { 2743 IS is; 2744 2745 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->rmap->N, 0, 1, &is)); 2746 PetscCall(ISLocalToGlobalMappingCreateIS(is, &rmapping)); 2747 PetscCall(ISLocalToGlobalMappingSetBlockSize(rmapping, A->rmap->bs)); 2748 PetscCall(ISDestroy(&is)); 2749 freem[0] = PETSC_TRUE; 2750 if (!cmapping && cong && A->rmap->bs == A->cmap->bs) cmapping = rmapping; 2751 } else if (!is->islocalref) { /* check if the l2g map has negative or repeated entries */ 2752 PetscCall(MatISFilterL2GMap(A, rmapping, &is->rmapping, &localrmapping)); 2753 if (rmapping == cmapping) { 2754 PetscCall(PetscObjectReference((PetscObject)is->rmapping)); 2755 is->cmapping = is->rmapping; 2756 PetscCall(PetscObjectReference((PetscObject)localrmapping)); 2757 localcmapping = localrmapping; 2758 } 2759 } 2760 if (!cmapping) { 2761 IS is; 2762 2763 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->cmap->N, 0, 1, &is)); 2764 PetscCall(ISLocalToGlobalMappingCreateIS(is, &cmapping)); 2765 PetscCall(ISLocalToGlobalMappingSetBlockSize(cmapping, A->cmap->bs)); 2766 PetscCall(ISDestroy(&is)); 2767 freem[1] = PETSC_TRUE; 2768 } else if (cmapping != rmapping && !is->islocalref) { /* check if the l2g map has negative or repeated entries */ 2769 PetscCall(MatISFilterL2GMap(A, cmapping, &is->cmapping, &localcmapping)); 2770 } 2771 if (!is->rmapping) { 2772 PetscCall(PetscObjectReference((PetscObject)rmapping)); 2773 is->rmapping = rmapping; 2774 } 2775 if (!is->cmapping) { 2776 PetscCall(PetscObjectReference((PetscObject)cmapping)); 2777 is->cmapping = cmapping; 2778 } 2779 2780 /* Clean up */ 2781 is->lnnzstate = 0; 2782 PetscCall(MatDestroy(&is->dA)); 2783 PetscCall(MatDestroy(&is->assembledA)); 2784 PetscCall(MatDestroy(&is->A)); 2785 if (is->csf != is->sf) { 2786 PetscCall(PetscSFDestroy(&is->csf)); 2787 PetscCall(PetscFree2(is->csf_rootdata, is->csf_leafdata)); 2788 } else is->csf = NULL; 2789 PetscCall(PetscSFDestroy(&is->sf)); 2790 PetscCall(PetscFree2(is->sf_rootdata, is->sf_leafdata)); 2791 PetscCall(PetscFree(is->bdiag)); 2792 2793 /* check if the two mappings are actually the same for square matrices since MATIS has some optimization for this case 2794 (DOLFIN passes 2 different objects) */ 2795 PetscCall(ISLocalToGlobalMappingGetSize(is->rmapping, &nr)); 2796 PetscCall(ISLocalToGlobalMappingGetBlockSize(is->rmapping, &rbs)); 2797 PetscCall(ISLocalToGlobalMappingGetSize(is->cmapping, &nc)); 2798 PetscCall(ISLocalToGlobalMappingGetBlockSize(is->cmapping, &cbs)); 2799 if (is->rmapping != is->cmapping && cong) { 2800 PetscBool same = PETSC_FALSE; 2801 if (nr == nc && cbs == rbs) { 2802 const PetscInt *idxs1, *idxs2; 2803 2804 PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->rmapping, &idxs1)); 2805 PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->cmapping, &idxs2)); 2806 PetscCall(PetscArraycmp(idxs1, idxs2, nr / rbs, &same)); 2807 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->rmapping, &idxs1)); 2808 PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->cmapping, &idxs2)); 2809 } 2810 PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &same, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A))); 2811 if (same) { 2812 PetscCall(ISLocalToGlobalMappingDestroy(&is->cmapping)); 2813 PetscCall(PetscObjectReference((PetscObject)is->rmapping)); 2814 is->cmapping = is->rmapping; 2815 } 2816 } 2817 PetscCall(PetscLayoutSetBlockSize(A->rmap, rbs)); 2818 PetscCall(PetscLayoutSetBlockSize(A->cmap, cbs)); 2819 /* Pass the user defined maps to the layout */ 2820 PetscCall(PetscLayoutSetISLocalToGlobalMapping(A->rmap, rmapping)); 2821 PetscCall(PetscLayoutSetISLocalToGlobalMapping(A->cmap, cmapping)); 2822 if (freem[0]) PetscCall(ISLocalToGlobalMappingDestroy(&rmapping)); 2823 if (freem[1]) PetscCall(ISLocalToGlobalMappingDestroy(&cmapping)); 2824 2825 if (!is->islocalref) { 2826 /* Create the local matrix A */ 2827 PetscCall(MatCreate(PETSC_COMM_SELF, &is->A)); 2828 PetscCall(MatSetType(is->A, is->lmattype)); 2829 PetscCall(MatSetSizes(is->A, nr, nc, nr, nc)); 2830 PetscCall(MatSetBlockSizes(is->A, rbs, cbs)); 2831 PetscCall(MatSetOptionsPrefix(is->A, "is_")); 2832 PetscCall(MatAppendOptionsPrefix(is->A, ((PetscObject)A)->prefix)); 2833 PetscCall(PetscLayoutSetUp(is->A->rmap)); 2834 PetscCall(PetscLayoutSetUp(is->A->cmap)); 2835 PetscCall(MatSetLocalToGlobalMapping(is->A, localrmapping, localcmapping)); 2836 PetscCall(ISLocalToGlobalMappingDestroy(&localrmapping)); 2837 PetscCall(ISLocalToGlobalMappingDestroy(&localcmapping)); 2838 /* setup scatters and local vectors for MatMult */ 2839 PetscCall(MatISSetUpScatters_Private(A)); 2840 } 2841 A->preallocated = PETSC_TRUE; 2842 PetscFunctionReturn(PETSC_SUCCESS); 2843 } 2844 2845 static PetscErrorCode MatSetUp_IS(Mat A) 2846 { 2847 Mat_IS *is = (Mat_IS *)A->data; 2848 ISLocalToGlobalMapping rmap, cmap; 2849 2850 PetscFunctionBegin; 2851 if (!is->sf) { 2852 PetscCall(MatGetLocalToGlobalMapping(A, &rmap, &cmap)); 2853 PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap)); 2854 } 2855 PetscFunctionReturn(PETSC_SUCCESS); 2856 } 2857 2858 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv) 2859 { 2860 Mat_IS *is = (Mat_IS *)mat->data; 2861 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION]; 2862 2863 PetscFunctionBegin; 2864 PetscCall(ISGlobalToLocalMappingApply(is->rmapping, IS_GTOLM_MASK, m, rows, &m, rows_l)); 2865 if (m != n || rows != cols || is->cmapping != is->rmapping) { 2866 PetscCall(ISGlobalToLocalMappingApply(is->cmapping, IS_GTOLM_MASK, n, cols, &n, cols_l)); 2867 PetscCall(MatSetValues(is->A, m, rows_l, n, cols_l, values, addv)); 2868 } else { 2869 PetscCall(MatSetValues(is->A, m, rows_l, m, rows_l, values, addv)); 2870 } 2871 PetscFunctionReturn(PETSC_SUCCESS); 2872 } 2873 2874 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv) 2875 { 2876 Mat_IS *is = (Mat_IS *)mat->data; 2877 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION]; 2878 2879 PetscFunctionBegin; 2880 PetscCall(ISGlobalToLocalMappingApplyBlock(is->rmapping, IS_GTOLM_MASK, m, rows, &m, rows_l)); 2881 if (m != n || rows != cols || is->cmapping != is->rmapping) { 2882 PetscCall(ISGlobalToLocalMappingApplyBlock(is->cmapping, IS_GTOLM_MASK, n, cols, &n, cols_l)); 2883 PetscCall(MatSetValuesBlocked(is->A, m, rows_l, n, cols_l, values, addv)); 2884 } else { 2885 PetscCall(MatSetValuesBlocked(is->A, m, rows_l, m, rows_l, values, addv)); 2886 } 2887 PetscFunctionReturn(PETSC_SUCCESS); 2888 } 2889 2890 static PetscErrorCode MatSetValuesLocal_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv) 2891 { 2892 Mat_IS *is = (Mat_IS *)A->data; 2893 2894 PetscFunctionBegin; 2895 if (is->A->rmap->mapping || is->A->cmap->mapping) { 2896 PetscCall(MatSetValuesLocal(is->A, m, rows, n, cols, values, addv)); 2897 } else { 2898 PetscCall(MatSetValues(is->A, m, rows, n, cols, values, addv)); 2899 } 2900 PetscFunctionReturn(PETSC_SUCCESS); 2901 } 2902 2903 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv) 2904 { 2905 Mat_IS *is = (Mat_IS *)A->data; 2906 2907 PetscFunctionBegin; 2908 if (is->A->rmap->mapping || is->A->cmap->mapping) { 2909 PetscCall(MatSetValuesBlockedLocal(is->A, m, rows, n, cols, values, addv)); 2910 } else { 2911 PetscCall(MatSetValuesBlocked(is->A, m, rows, n, cols, values, addv)); 2912 } 2913 PetscFunctionReturn(PETSC_SUCCESS); 2914 } 2915 2916 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, PetscBool columns) 2917 { 2918 Mat_IS *is = (Mat_IS *)A->data; 2919 2920 PetscFunctionBegin; 2921 if (!n) { 2922 is->pure_neumann = PETSC_TRUE; 2923 } else { 2924 PetscInt i; 2925 is->pure_neumann = PETSC_FALSE; 2926 2927 if (columns) { 2928 PetscCall(MatZeroRowsColumns(is->A, n, rows, diag, NULL, NULL)); 2929 } else { 2930 PetscCall(MatZeroRows(is->A, n, rows, diag, NULL, NULL)); 2931 } 2932 if (diag != 0.) { 2933 const PetscScalar *array; 2934 PetscCall(VecGetArrayRead(is->counter, &array)); 2935 for (i = 0; i < n; i++) PetscCall(MatSetValue(is->A, rows[i], rows[i], diag / (array[rows[i]]), INSERT_VALUES)); 2936 PetscCall(VecRestoreArrayRead(is->counter, &array)); 2937 } 2938 PetscCall(MatAssemblyBegin(is->A, MAT_FINAL_ASSEMBLY)); 2939 PetscCall(MatAssemblyEnd(is->A, MAT_FINAL_ASSEMBLY)); 2940 } 2941 PetscFunctionReturn(PETSC_SUCCESS); 2942 } 2943 2944 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b, PetscBool columns) 2945 { 2946 Mat_IS *matis = (Mat_IS *)A->data; 2947 PetscInt nr, nl, len, i; 2948 PetscInt *lrows; 2949 2950 PetscFunctionBegin; 2951 if (PetscUnlikelyDebug(columns || diag != 0. || (x && b))) { 2952 PetscBool cong; 2953 2954 PetscCall(PetscLayoutCompare(A->rmap, A->cmap, &cong)); 2955 cong = (PetscBool)(cong && matis->sf == matis->csf); 2956 PetscCheck(cong || !columns, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Columns can be zeroed if and only if A->rmap and A->cmap are congruent and the l2g maps are the same for MATIS"); 2957 PetscCheck(cong || diag == 0., PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent and the l2g maps are the same for MATIS"); 2958 PetscCheck(cong || !x || !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "A->rmap and A->cmap need to be congruent, and the l2g maps be the same"); 2959 } 2960 /* get locally owned rows */ 2961 PetscCall(PetscLayoutMapLocal(A->rmap, n, rows, &len, &lrows, NULL)); 2962 /* fix right-hand side if needed */ 2963 if (x && b) { 2964 const PetscScalar *xx; 2965 PetscScalar *bb; 2966 2967 PetscCall(VecGetArrayRead(x, &xx)); 2968 PetscCall(VecGetArray(b, &bb)); 2969 for (i = 0; i < len; ++i) bb[lrows[i]] = diag * xx[lrows[i]]; 2970 PetscCall(VecRestoreArrayRead(x, &xx)); 2971 PetscCall(VecRestoreArray(b, &bb)); 2972 } 2973 /* get rows associated to the local matrices */ 2974 PetscCall(MatGetSize(matis->A, &nl, NULL)); 2975 PetscCall(PetscArrayzero(matis->sf_leafdata, nl)); 2976 PetscCall(PetscArrayzero(matis->sf_rootdata, A->rmap->n)); 2977 for (i = 0; i < len; i++) matis->sf_rootdata[lrows[i]] = 1; 2978 PetscCall(PetscFree(lrows)); 2979 PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE)); 2980 PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE)); 2981 PetscCall(PetscMalloc1(nl, &lrows)); 2982 for (i = 0, nr = 0; i < nl; i++) 2983 if (matis->sf_leafdata[i]) lrows[nr++] = i; 2984 PetscCall(MatISZeroRowsColumnsLocal_Private(A, nr, lrows, diag, columns)); 2985 PetscCall(PetscFree(lrows)); 2986 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 2987 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 2988 PetscFunctionReturn(PETSC_SUCCESS); 2989 } 2990 2991 static PetscErrorCode MatZeroRows_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 2992 { 2993 PetscFunctionBegin; 2994 PetscCall(MatZeroRowsColumns_Private_IS(A, n, rows, diag, x, b, PETSC_FALSE)); 2995 PetscFunctionReturn(PETSC_SUCCESS); 2996 } 2997 2998 static PetscErrorCode MatZeroRowsColumns_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 2999 { 3000 PetscFunctionBegin; 3001 PetscCall(MatZeroRowsColumns_Private_IS(A, n, rows, diag, x, b, PETSC_TRUE)); 3002 PetscFunctionReturn(PETSC_SUCCESS); 3003 } 3004 3005 static PetscErrorCode MatAssemblyBegin_IS(Mat A, MatAssemblyType type) 3006 { 3007 Mat_IS *is = (Mat_IS *)A->data; 3008 3009 PetscFunctionBegin; 3010 PetscCall(MatAssemblyBegin(is->A, type)); 3011 PetscFunctionReturn(PETSC_SUCCESS); 3012 } 3013 3014 static PetscErrorCode MatAssemblyEnd_IS(Mat A, MatAssemblyType type) 3015 { 3016 Mat_IS *is = (Mat_IS *)A->data; 3017 PetscBool lnnz; 3018 3019 PetscFunctionBegin; 3020 PetscCall(MatAssemblyEnd(is->A, type)); 3021 /* fix for local empty rows/cols */ 3022 if (is->locempty && type == MAT_FINAL_ASSEMBLY) { 3023 Mat newlA; 3024 ISLocalToGlobalMapping rl2g, cl2g; 3025 IS nzr, nzc; 3026 PetscInt nr, nc, nnzr, nnzc; 3027 PetscBool lnewl2g, newl2g; 3028 3029 PetscCall(MatGetSize(is->A, &nr, &nc)); 3030 PetscCall(MatFindNonzeroRowsOrCols_Basic(is->A, PETSC_FALSE, PETSC_SMALL, &nzr)); 3031 if (!nzr) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)is->A), nr, 0, 1, &nzr)); 3032 PetscCall(MatFindNonzeroRowsOrCols_Basic(is->A, PETSC_TRUE, PETSC_SMALL, &nzc)); 3033 if (!nzc) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)is->A), nc, 0, 1, &nzc)); 3034 PetscCall(ISGetSize(nzr, &nnzr)); 3035 PetscCall(ISGetSize(nzc, &nnzc)); 3036 if (nnzr != nr || nnzc != nc) { /* need new global l2g map */ 3037 lnewl2g = PETSC_TRUE; 3038 PetscCallMPI(MPIU_Allreduce(&lnewl2g, &newl2g, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A))); 3039 3040 /* extract valid submatrix */ 3041 PetscCall(MatCreateSubMatrix(is->A, nzr, nzc, MAT_INITIAL_MATRIX, &newlA)); 3042 } else { /* local matrix fully populated */ 3043 lnewl2g = PETSC_FALSE; 3044 PetscCallMPI(MPIU_Allreduce(&lnewl2g, &newl2g, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A))); 3045 PetscCall(PetscObjectReference((PetscObject)is->A)); 3046 newlA = is->A; 3047 } 3048 3049 /* attach new global l2g map if needed */ 3050 if (newl2g) { 3051 IS zr, zc; 3052 const PetscInt *ridxs, *cidxs, *zridxs, *zcidxs; 3053 PetscInt *nidxs, i; 3054 3055 PetscCall(ISComplement(nzr, 0, nr, &zr)); 3056 PetscCall(ISComplement(nzc, 0, nc, &zc)); 3057 PetscCall(PetscMalloc1(PetscMax(nr, nc), &nidxs)); 3058 PetscCall(ISLocalToGlobalMappingGetIndices(is->rmapping, &ridxs)); 3059 PetscCall(ISLocalToGlobalMappingGetIndices(is->cmapping, &cidxs)); 3060 PetscCall(ISGetIndices(zr, &zridxs)); 3061 PetscCall(ISGetIndices(zc, &zcidxs)); 3062 PetscCall(ISGetLocalSize(zr, &nnzr)); 3063 PetscCall(ISGetLocalSize(zc, &nnzc)); 3064 3065 PetscCall(PetscArraycpy(nidxs, ridxs, nr)); 3066 for (i = 0; i < nnzr; i++) nidxs[zridxs[i]] = -1; 3067 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, nr, nidxs, PETSC_COPY_VALUES, &rl2g)); 3068 PetscCall(PetscArraycpy(nidxs, cidxs, nc)); 3069 for (i = 0; i < nnzc; i++) nidxs[zcidxs[i]] = -1; 3070 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, nc, nidxs, PETSC_COPY_VALUES, &cl2g)); 3071 3072 PetscCall(ISRestoreIndices(zr, &zridxs)); 3073 PetscCall(ISRestoreIndices(zc, &zcidxs)); 3074 PetscCall(ISLocalToGlobalMappingRestoreIndices(is->rmapping, &ridxs)); 3075 PetscCall(ISLocalToGlobalMappingRestoreIndices(is->cmapping, &cidxs)); 3076 PetscCall(ISDestroy(&nzr)); 3077 PetscCall(ISDestroy(&nzc)); 3078 PetscCall(ISDestroy(&zr)); 3079 PetscCall(ISDestroy(&zc)); 3080 PetscCall(PetscFree(nidxs)); 3081 PetscCall(MatSetLocalToGlobalMapping(A, rl2g, cl2g)); 3082 PetscCall(ISLocalToGlobalMappingDestroy(&rl2g)); 3083 PetscCall(ISLocalToGlobalMappingDestroy(&cl2g)); 3084 } 3085 PetscCall(MatISSetLocalMat(A, newlA)); 3086 PetscCall(MatDestroy(&newlA)); 3087 PetscCall(ISDestroy(&nzr)); 3088 PetscCall(ISDestroy(&nzc)); 3089 is->locempty = PETSC_FALSE; 3090 } 3091 lnnz = (PetscBool)(is->A->nonzerostate == is->lnnzstate); 3092 is->lnnzstate = is->A->nonzerostate; 3093 PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &lnnz, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A))); 3094 if (!lnnz) A->nonzerostate++; 3095 PetscFunctionReturn(PETSC_SUCCESS); 3096 } 3097 3098 static PetscErrorCode MatISGetLocalMat_IS(Mat mat, Mat *local) 3099 { 3100 Mat_IS *is = (Mat_IS *)mat->data; 3101 3102 PetscFunctionBegin; 3103 *local = is->A; 3104 PetscFunctionReturn(PETSC_SUCCESS); 3105 } 3106 3107 static PetscErrorCode MatISRestoreLocalMat_IS(Mat mat, Mat *local) 3108 { 3109 PetscFunctionBegin; 3110 *local = NULL; 3111 PetscFunctionReturn(PETSC_SUCCESS); 3112 } 3113 3114 /*@ 3115 MatISGetLocalMat - Gets the local matrix stored inside a `MATIS` matrix. 3116 3117 Not Collective. 3118 3119 Input Parameter: 3120 . mat - the matrix 3121 3122 Output Parameter: 3123 . local - the local matrix 3124 3125 Level: intermediate 3126 3127 Notes: 3128 This can be called if you have precomputed the nonzero structure of the 3129 matrix and want to provide it to the inner matrix object to improve the performance 3130 of the `MatSetValues()` operation. 3131 3132 Call `MatISRestoreLocalMat()` when finished with the local matrix. 3133 3134 .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatISRestoreLocalMat()` 3135 @*/ 3136 PetscErrorCode MatISGetLocalMat(Mat mat, Mat *local) 3137 { 3138 PetscFunctionBegin; 3139 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3140 PetscAssertPointer(local, 2); 3141 PetscUseMethod(mat, "MatISGetLocalMat_C", (Mat, Mat *), (mat, local)); 3142 PetscFunctionReturn(PETSC_SUCCESS); 3143 } 3144 3145 /*@ 3146 MatISRestoreLocalMat - Restores the local matrix obtained with `MatISGetLocalMat()` 3147 3148 Not Collective. 3149 3150 Input Parameters: 3151 + mat - the matrix 3152 - local - the local matrix 3153 3154 Level: intermediate 3155 3156 .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatISGetLocalMat()` 3157 @*/ 3158 PetscErrorCode MatISRestoreLocalMat(Mat mat, Mat *local) 3159 { 3160 PetscFunctionBegin; 3161 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3162 PetscAssertPointer(local, 2); 3163 PetscUseMethod(mat, "MatISRestoreLocalMat_C", (Mat, Mat *), (mat, local)); 3164 PetscFunctionReturn(PETSC_SUCCESS); 3165 } 3166 3167 static PetscErrorCode MatISSetLocalMatType_IS(Mat mat, MatType mtype) 3168 { 3169 Mat_IS *is = (Mat_IS *)mat->data; 3170 3171 PetscFunctionBegin; 3172 if (is->A) PetscCall(MatSetType(is->A, mtype)); 3173 PetscCall(PetscFree(is->lmattype)); 3174 PetscCall(PetscStrallocpy(mtype, &is->lmattype)); 3175 PetscFunctionReturn(PETSC_SUCCESS); 3176 } 3177 3178 /*@ 3179 MatISSetLocalMatType - Specifies the type of local matrix inside the `MATIS` 3180 3181 Logically Collective. 3182 3183 Input Parameters: 3184 + mat - the matrix 3185 - mtype - the local matrix type 3186 3187 Level: intermediate 3188 3189 .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatSetType()`, `MatType` 3190 @*/ 3191 PetscErrorCode MatISSetLocalMatType(Mat mat, MatType mtype) 3192 { 3193 PetscFunctionBegin; 3194 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3195 PetscUseMethod(mat, "MatISSetLocalMatType_C", (Mat, MatType), (mat, mtype)); 3196 PetscFunctionReturn(PETSC_SUCCESS); 3197 } 3198 3199 static PetscErrorCode MatISSetLocalMat_IS(Mat mat, Mat local) 3200 { 3201 Mat_IS *is = (Mat_IS *)mat->data; 3202 PetscInt nrows, ncols, orows, ocols; 3203 MatType mtype, otype; 3204 PetscBool sametype = PETSC_TRUE; 3205 3206 PetscFunctionBegin; 3207 if (is->A && !is->islocalref) { 3208 PetscCall(MatGetSize(is->A, &orows, &ocols)); 3209 PetscCall(MatGetSize(local, &nrows, &ncols)); 3210 PetscCheck(orows == nrows && ocols == ncols, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Local MATIS matrix should be of size %" PetscInt_FMT "x%" PetscInt_FMT " (passed a %" PetscInt_FMT "x%" PetscInt_FMT " matrix)", orows, ocols, nrows, ncols); 3211 PetscCall(MatGetType(local, &mtype)); 3212 PetscCall(MatGetType(is->A, &otype)); 3213 PetscCall(PetscStrcmp(mtype, otype, &sametype)); 3214 } 3215 PetscCall(PetscObjectReference((PetscObject)local)); 3216 PetscCall(MatDestroy(&is->A)); 3217 is->A = local; 3218 PetscCall(MatGetType(is->A, &mtype)); 3219 PetscCall(MatISSetLocalMatType(mat, mtype)); 3220 if (!sametype && !is->islocalref) PetscCall(MatISSetUpScatters_Private(mat)); 3221 is->lnnzstate = 0; 3222 PetscFunctionReturn(PETSC_SUCCESS); 3223 } 3224 3225 /*@ 3226 MatISSetLocalMat - Replace the local matrix stored inside a `MATIS` object. 3227 3228 Not Collective 3229 3230 Input Parameters: 3231 + mat - the matrix 3232 - local - the local matrix 3233 3234 Level: intermediate 3235 3236 .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatISSetLocalMatType`, `MatISGetLocalMat()` 3237 @*/ 3238 PetscErrorCode MatISSetLocalMat(Mat mat, Mat local) 3239 { 3240 PetscFunctionBegin; 3241 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3242 PetscValidHeaderSpecific(local, MAT_CLASSID, 2); 3243 PetscUseMethod(mat, "MatISSetLocalMat_C", (Mat, Mat), (mat, local)); 3244 PetscFunctionReturn(PETSC_SUCCESS); 3245 } 3246 3247 static PetscErrorCode MatZeroEntries_IS(Mat A) 3248 { 3249 Mat_IS *a = (Mat_IS *)A->data; 3250 3251 PetscFunctionBegin; 3252 PetscCall(MatZeroEntries(a->A)); 3253 PetscFunctionReturn(PETSC_SUCCESS); 3254 } 3255 3256 static PetscErrorCode MatScale_IS(Mat A, PetscScalar a) 3257 { 3258 Mat_IS *is = (Mat_IS *)A->data; 3259 3260 PetscFunctionBegin; 3261 PetscCall(MatScale(is->A, a)); 3262 PetscFunctionReturn(PETSC_SUCCESS); 3263 } 3264 3265 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 3266 { 3267 Mat_IS *is = (Mat_IS *)A->data; 3268 3269 PetscFunctionBegin; 3270 /* get diagonal of the local matrix */ 3271 PetscCall(MatGetDiagonal(is->A, is->y)); 3272 3273 /* scatter diagonal back into global vector */ 3274 PetscCall(VecSet(v, 0)); 3275 PetscCall(VecScatterBegin(is->rctx, is->y, v, ADD_VALUES, SCATTER_REVERSE)); 3276 PetscCall(VecScatterEnd(is->rctx, is->y, v, ADD_VALUES, SCATTER_REVERSE)); 3277 PetscFunctionReturn(PETSC_SUCCESS); 3278 } 3279 3280 static PetscErrorCode MatSetOption_IS(Mat A, MatOption op, PetscBool flg) 3281 { 3282 Mat_IS *a = (Mat_IS *)A->data; 3283 3284 PetscFunctionBegin; 3285 PetscCall(MatSetOption(a->A, op, flg)); 3286 PetscFunctionReturn(PETSC_SUCCESS); 3287 } 3288 3289 static PetscErrorCode MatAXPY_IS(Mat Y, PetscScalar a, Mat X, MatStructure str) 3290 { 3291 Mat_IS *y = (Mat_IS *)Y->data; 3292 Mat_IS *x; 3293 3294 PetscFunctionBegin; 3295 if (PetscDefined(USE_DEBUG)) { 3296 PetscBool ismatis; 3297 PetscCall(PetscObjectTypeCompare((PetscObject)X, MATIS, &ismatis)); 3298 PetscCheck(ismatis, PetscObjectComm((PetscObject)Y), PETSC_ERR_SUP, "Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS"); 3299 } 3300 x = (Mat_IS *)X->data; 3301 PetscCall(MatAXPY(y->A, a, x->A, str)); 3302 PetscFunctionReturn(PETSC_SUCCESS); 3303 } 3304 3305 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A, IS row, IS col, Mat *submat) 3306 { 3307 Mat lA; 3308 Mat_IS *matis = (Mat_IS *)A->data; 3309 ISLocalToGlobalMapping rl2g, cl2g; 3310 IS is; 3311 const PetscInt *rg, *rl; 3312 PetscInt nrg; 3313 PetscInt N, M, nrl, i, *idxs; 3314 3315 PetscFunctionBegin; 3316 PetscCall(ISLocalToGlobalMappingGetIndices(A->rmap->mapping, &rg)); 3317 PetscCall(ISGetLocalSize(row, &nrl)); 3318 PetscCall(ISGetIndices(row, &rl)); 3319 PetscCall(ISLocalToGlobalMappingGetSize(A->rmap->mapping, &nrg)); 3320 if (PetscDefined(USE_DEBUG)) { 3321 for (i = 0; i < nrl; i++) PetscCheck(rl[i] < nrg, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Local row index %" PetscInt_FMT " -> %" PetscInt_FMT " greater than maximum possible %" PetscInt_FMT, i, rl[i], nrg); 3322 } 3323 PetscCall(PetscMalloc1(nrg, &idxs)); 3324 /* map from [0,nrl) to row */ 3325 for (i = 0; i < nrl; i++) idxs[i] = rl[i]; 3326 for (i = nrl; i < nrg; i++) idxs[i] = -1; 3327 PetscCall(ISRestoreIndices(row, &rl)); 3328 PetscCall(ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping, &rg)); 3329 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), nrg, idxs, PETSC_OWN_POINTER, &is)); 3330 PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g)); 3331 PetscCall(ISDestroy(&is)); 3332 /* compute new l2g map for columns */ 3333 if (col != row || matis->rmapping != matis->cmapping || matis->A->rmap->mapping != matis->A->cmap->mapping) { 3334 const PetscInt *cg, *cl; 3335 PetscInt ncg; 3336 PetscInt ncl; 3337 3338 PetscCall(ISLocalToGlobalMappingGetIndices(A->cmap->mapping, &cg)); 3339 PetscCall(ISGetLocalSize(col, &ncl)); 3340 PetscCall(ISGetIndices(col, &cl)); 3341 PetscCall(ISLocalToGlobalMappingGetSize(A->cmap->mapping, &ncg)); 3342 if (PetscDefined(USE_DEBUG)) { 3343 for (i = 0; i < ncl; i++) PetscCheck(cl[i] < ncg, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Local column index %" PetscInt_FMT " -> %" PetscInt_FMT " greater than maximum possible %" PetscInt_FMT, i, cl[i], ncg); 3344 } 3345 PetscCall(PetscMalloc1(ncg, &idxs)); 3346 /* map from [0,ncl) to col */ 3347 for (i = 0; i < ncl; i++) idxs[i] = cl[i]; 3348 for (i = ncl; i < ncg; i++) idxs[i] = -1; 3349 PetscCall(ISRestoreIndices(col, &cl)); 3350 PetscCall(ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping, &cg)); 3351 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), ncg, idxs, PETSC_OWN_POINTER, &is)); 3352 PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g)); 3353 PetscCall(ISDestroy(&is)); 3354 } else { 3355 PetscCall(PetscObjectReference((PetscObject)rl2g)); 3356 cl2g = rl2g; 3357 } 3358 /* create the MATIS submatrix */ 3359 PetscCall(MatGetSize(A, &M, &N)); 3360 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), submat)); 3361 PetscCall(MatSetSizes(*submat, PETSC_DECIDE, PETSC_DECIDE, M, N)); 3362 PetscCall(MatSetType(*submat, MATIS)); 3363 matis = (Mat_IS *)((*submat)->data); 3364 matis->islocalref = PETSC_TRUE; 3365 PetscCall(MatSetLocalToGlobalMapping(*submat, rl2g, cl2g)); 3366 PetscCall(MatISGetLocalMat(A, &lA)); 3367 PetscCall(MatISSetLocalMat(*submat, lA)); 3368 PetscCall(ISLocalToGlobalMappingDestroy(&rl2g)); 3369 PetscCall(ISLocalToGlobalMappingDestroy(&cl2g)); 3370 3371 /* remove unsupported ops */ 3372 PetscCall(PetscMemzero((*submat)->ops, sizeof(struct _MatOps))); 3373 (*submat)->ops->destroy = MatDestroy_IS; 3374 (*submat)->ops->setvalueslocal = MatSetValuesLocal_SubMat_IS; 3375 (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS; 3376 (*submat)->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS; 3377 PetscFunctionReturn(PETSC_SUCCESS); 3378 } 3379 3380 static PetscErrorCode MatSetFromOptions_IS(Mat A, PetscOptionItems PetscOptionsObject) 3381 { 3382 Mat_IS *a = (Mat_IS *)A->data; 3383 char type[256]; 3384 PetscBool flg; 3385 3386 PetscFunctionBegin; 3387 PetscOptionsHeadBegin(PetscOptionsObject, "MATIS options"); 3388 PetscCall(PetscOptionsDeprecated("-matis_keepassembled", "-mat_is_keepassembled", "3.21", NULL)); 3389 PetscCall(PetscOptionsDeprecated("-matis_fixempty", "-mat_is_fixempty", "3.21", NULL)); 3390 PetscCall(PetscOptionsDeprecated("-matis_storel2l", "-mat_is_storel2l", "3.21", NULL)); 3391 PetscCall(PetscOptionsDeprecated("-matis_localmat_type", "-mat_is_localmat_type", "3.21", NULL)); 3392 PetscCall(PetscOptionsBool("-mat_is_keepassembled", "Store an assembled version if needed", NULL, a->keepassembled, &a->keepassembled, NULL)); 3393 PetscCall(PetscOptionsBool("-mat_is_fixempty", "Fix local matrices in case of empty local rows/columns", "MatISFixLocalEmpty", a->locempty, &a->locempty, NULL)); 3394 PetscCall(PetscOptionsBool("-mat_is_storel2l", "Store local-to-local matrices generated from PtAP operations", "MatISStoreL2L", a->storel2l, &a->storel2l, NULL)); 3395 PetscCall(PetscOptionsBool("-mat_is_allow_repeated", "Allow local repeated entries", "MatISSetAllowRepeated", a->allow_repeated, &a->allow_repeated, NULL)); 3396 PetscCall(PetscOptionsFList("-mat_is_localmat_type", "Matrix type", "MatISSetLocalMatType", MatList, a->lmattype, type, 256, &flg)); 3397 if (flg) PetscCall(MatISSetLocalMatType(A, type)); 3398 if (a->A) PetscCall(MatSetFromOptions(a->A)); 3399 PetscOptionsHeadEnd(); 3400 PetscFunctionReturn(PETSC_SUCCESS); 3401 } 3402 3403 /*@ 3404 MatCreateIS - Creates a "process" unassembled matrix. 3405 3406 Collective. 3407 3408 Input Parameters: 3409 + comm - MPI communicator that will share the matrix 3410 . bs - block size of the matrix 3411 . m - local size of left vector used in matrix vector products 3412 . n - local size of right vector used in matrix vector products 3413 . M - global size of left vector used in matrix vector products 3414 . N - global size of right vector used in matrix vector products 3415 . rmap - local to global map for rows 3416 - cmap - local to global map for cols 3417 3418 Output Parameter: 3419 . A - the resulting matrix 3420 3421 Level: intermediate 3422 3423 Notes: 3424 `m` and `n` are NOT related to the size of the map; they represent the size of the local parts of the distributed vectors 3425 used in `MatMult()` operations. The local sizes of `rmap` and `cmap` define the size of the local matrices. 3426 3427 If `rmap` (`cmap`) is `NULL`, then the local row (column) spaces matches the global space. 3428 3429 .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatSetLocalToGlobalMapping()` 3430 @*/ 3431 PetscErrorCode MatCreateIS(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, ISLocalToGlobalMapping rmap, ISLocalToGlobalMapping cmap, Mat *A) 3432 { 3433 PetscFunctionBegin; 3434 PetscCall(MatCreate(comm, A)); 3435 PetscCall(MatSetSizes(*A, m, n, M, N)); 3436 if (bs > 0) PetscCall(MatSetBlockSize(*A, bs)); 3437 PetscCall(MatSetType(*A, MATIS)); 3438 PetscCall(MatSetLocalToGlobalMapping(*A, rmap, cmap)); 3439 PetscFunctionReturn(PETSC_SUCCESS); 3440 } 3441 3442 static PetscErrorCode MatHasOperation_IS(Mat A, MatOperation op, PetscBool *has) 3443 { 3444 Mat_IS *a = (Mat_IS *)A->data; 3445 MatOperation tobefiltered[] = {MATOP_MULT_ADD, MATOP_MULT_TRANSPOSE_ADD, MATOP_GET_DIAGONAL_BLOCK, MATOP_INCREASE_OVERLAP}; 3446 3447 PetscFunctionBegin; 3448 *has = PETSC_FALSE; 3449 if (!((void **)A->ops)[op] || !a->A) PetscFunctionReturn(PETSC_SUCCESS); 3450 *has = PETSC_TRUE; 3451 for (PetscInt i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(tobefiltered); i++) 3452 if (op == tobefiltered[i]) PetscFunctionReturn(PETSC_SUCCESS); 3453 PetscCall(MatHasOperation(a->A, op, has)); 3454 PetscFunctionReturn(PETSC_SUCCESS); 3455 } 3456 3457 static PetscErrorCode MatSetValuesCOO_IS(Mat A, const PetscScalar v[], InsertMode imode) 3458 { 3459 Mat_IS *a = (Mat_IS *)A->data; 3460 3461 PetscFunctionBegin; 3462 PetscCall(MatSetValuesCOO(a->A, v, imode)); 3463 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 3464 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 3465 PetscFunctionReturn(PETSC_SUCCESS); 3466 } 3467 3468 static PetscErrorCode MatSetPreallocationCOOLocal_IS(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[]) 3469 { 3470 Mat_IS *a = (Mat_IS *)A->data; 3471 3472 PetscFunctionBegin; 3473 PetscCheck(a->A, PetscObjectComm((PetscObject)A), PETSC_ERR_ORDER, "Need to provide l2g map first via MatSetLocalToGlobalMapping"); 3474 if (a->A->rmap->mapping || a->A->cmap->mapping) { 3475 PetscCall(MatSetPreallocationCOOLocal(a->A, ncoo, coo_i, coo_j)); 3476 } else { 3477 PetscCall(MatSetPreallocationCOO(a->A, ncoo, coo_i, coo_j)); 3478 } 3479 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", MatSetValuesCOO_IS)); 3480 A->preallocated = PETSC_TRUE; 3481 PetscFunctionReturn(PETSC_SUCCESS); 3482 } 3483 3484 static PetscErrorCode MatSetPreallocationCOO_IS(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[]) 3485 { 3486 Mat_IS *a = (Mat_IS *)A->data; 3487 PetscInt ncoo_i; 3488 3489 PetscFunctionBegin; 3490 PetscCheck(a->A, PetscObjectComm((PetscObject)A), PETSC_ERR_ORDER, "Need to provide l2g map first via MatSetLocalToGlobalMapping"); 3491 PetscCall(PetscIntCast(ncoo, &ncoo_i)); 3492 PetscCall(ISGlobalToLocalMappingApply(a->rmapping, IS_GTOLM_MASK, ncoo_i, coo_i, NULL, coo_i)); 3493 PetscCall(ISGlobalToLocalMappingApply(a->cmapping, IS_GTOLM_MASK, ncoo_i, coo_j, NULL, coo_j)); 3494 PetscCall(MatSetPreallocationCOO(a->A, ncoo, coo_i, coo_j)); 3495 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", MatSetValuesCOO_IS)); 3496 A->preallocated = PETSC_TRUE; 3497 PetscFunctionReturn(PETSC_SUCCESS); 3498 } 3499 3500 static PetscErrorCode MatISGetAssembled_Private(Mat A, Mat *tA) 3501 { 3502 Mat_IS *a = (Mat_IS *)A->data; 3503 PetscObjectState Astate, aAstate = PETSC_INT_MIN; 3504 PetscObjectState Annzstate, aAnnzstate = PETSC_INT_MIN; 3505 3506 PetscFunctionBegin; 3507 PetscCall(PetscObjectStateGet((PetscObject)A, &Astate)); 3508 Annzstate = A->nonzerostate; 3509 if (a->assembledA) { 3510 PetscCall(PetscObjectStateGet((PetscObject)a->assembledA, &aAstate)); 3511 aAnnzstate = a->assembledA->nonzerostate; 3512 } 3513 if (aAnnzstate != Annzstate) PetscCall(MatDestroy(&a->assembledA)); 3514 if (Astate != aAstate || !a->assembledA) { 3515 MatType aAtype; 3516 PetscMPIInt size; 3517 PetscInt rbs, cbs, bs; 3518 3519 /* the assembled form is used as temporary storage for parallel operations 3520 like createsubmatrices and the like, do not waste device memory */ 3521 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 3522 PetscCall(ISLocalToGlobalMappingGetBlockSize(a->cmapping, &cbs)); 3523 PetscCall(ISLocalToGlobalMappingGetBlockSize(a->rmapping, &rbs)); 3524 bs = rbs == cbs ? rbs : 1; 3525 if (a->assembledA) PetscCall(MatGetType(a->assembledA, &aAtype)); 3526 else if (size > 1) aAtype = bs > 1 ? MATMPIBAIJ : MATMPIAIJ; 3527 else aAtype = bs > 1 ? MATSEQBAIJ : MATSEQAIJ; 3528 3529 PetscCall(MatConvert(A, aAtype, a->assembledA ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX, &a->assembledA)); 3530 PetscCall(PetscObjectStateSet((PetscObject)a->assembledA, Astate)); 3531 a->assembledA->nonzerostate = Annzstate; 3532 } 3533 PetscCall(PetscObjectReference((PetscObject)a->assembledA)); 3534 *tA = a->assembledA; 3535 if (!a->keepassembled) PetscCall(MatDestroy(&a->assembledA)); 3536 PetscFunctionReturn(PETSC_SUCCESS); 3537 } 3538 3539 static PetscErrorCode MatISRestoreAssembled_Private(Mat A, Mat *tA) 3540 { 3541 PetscFunctionBegin; 3542 PetscCall(MatDestroy(tA)); 3543 *tA = NULL; 3544 PetscFunctionReturn(PETSC_SUCCESS); 3545 } 3546 3547 static PetscErrorCode MatGetDiagonalBlock_IS(Mat A, Mat *dA) 3548 { 3549 Mat_IS *a = (Mat_IS *)A->data; 3550 PetscObjectState Astate, dAstate = PETSC_INT_MIN; 3551 3552 PetscFunctionBegin; 3553 PetscCall(PetscObjectStateGet((PetscObject)A, &Astate)); 3554 if (a->dA) PetscCall(PetscObjectStateGet((PetscObject)a->dA, &dAstate)); 3555 if (Astate != dAstate) { 3556 Mat tA; 3557 MatType ltype; 3558 3559 PetscCall(MatDestroy(&a->dA)); 3560 PetscCall(MatISGetAssembled_Private(A, &tA)); 3561 PetscCall(MatGetDiagonalBlock(tA, &a->dA)); 3562 PetscCall(MatPropagateSymmetryOptions(tA, a->dA)); 3563 PetscCall(MatGetType(a->A, <ype)); 3564 PetscCall(MatConvert(a->dA, ltype, MAT_INPLACE_MATRIX, &a->dA)); 3565 PetscCall(PetscObjectReference((PetscObject)a->dA)); 3566 PetscCall(MatISRestoreAssembled_Private(A, &tA)); 3567 PetscCall(PetscObjectStateSet((PetscObject)a->dA, Astate)); 3568 } 3569 *dA = a->dA; 3570 PetscFunctionReturn(PETSC_SUCCESS); 3571 } 3572 3573 static PetscErrorCode MatCreateSubMatrices_IS(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse reuse, Mat *submat[]) 3574 { 3575 Mat tA; 3576 3577 PetscFunctionBegin; 3578 PetscCall(MatISGetAssembled_Private(A, &tA)); 3579 PetscCall(MatCreateSubMatrices(tA, n, irow, icol, reuse, submat)); 3580 /* MatCreateSubMatrices_MPIAIJ is a mess at the moment */ 3581 #if 0 3582 { 3583 Mat_IS *a = (Mat_IS*)A->data; 3584 MatType ltype; 3585 VecType vtype; 3586 char *flg; 3587 3588 PetscCall(MatGetType(a->A,<ype)); 3589 PetscCall(MatGetVecType(a->A,&vtype)); 3590 PetscCall(PetscStrstr(vtype,"cuda",&flg)); 3591 if (!flg) PetscCall(PetscStrstr(vtype,"hip",&flg)); 3592 if (!flg) PetscCall(PetscStrstr(vtype,"kokkos",&flg)); 3593 if (flg) { 3594 for (PetscInt i = 0; i < n; i++) { 3595 Mat sA = (*submat)[i]; 3596 3597 PetscCall(MatConvert(sA,ltype,MAT_INPLACE_MATRIX,&sA)); 3598 (*submat)[i] = sA; 3599 } 3600 } 3601 } 3602 #endif 3603 PetscCall(MatISRestoreAssembled_Private(A, &tA)); 3604 PetscFunctionReturn(PETSC_SUCCESS); 3605 } 3606 3607 static PetscErrorCode MatIncreaseOverlap_IS(Mat A, PetscInt n, IS is[], PetscInt ov) 3608 { 3609 Mat tA; 3610 3611 PetscFunctionBegin; 3612 PetscCall(MatISGetAssembled_Private(A, &tA)); 3613 PetscCall(MatIncreaseOverlap(tA, n, is, ov)); 3614 PetscCall(MatISRestoreAssembled_Private(A, &tA)); 3615 PetscFunctionReturn(PETSC_SUCCESS); 3616 } 3617 3618 /*@ 3619 MatISGetLocalToGlobalMapping - Gets the local-to-global numbering of the `MATIS` object 3620 3621 Not Collective 3622 3623 Input Parameter: 3624 . A - the matrix 3625 3626 Output Parameters: 3627 + rmapping - row mapping 3628 - cmapping - column mapping 3629 3630 Level: advanced 3631 3632 Note: 3633 The returned map can be different from the one used to construct the `MATIS` object, since it will not contain negative or repeated indices. 3634 3635 .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatSetLocalToGlobalMapping()` 3636 @*/ 3637 PetscErrorCode MatISGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping) 3638 { 3639 PetscFunctionBegin; 3640 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 3641 PetscValidType(A, 1); 3642 if (rmapping) PetscAssertPointer(rmapping, 2); 3643 if (cmapping) PetscAssertPointer(cmapping, 3); 3644 PetscUseMethod(A, "MatISGetLocalToGlobalMapping_C", (Mat, ISLocalToGlobalMapping *, ISLocalToGlobalMapping *), (A, rmapping, cmapping)); 3645 PetscFunctionReturn(PETSC_SUCCESS); 3646 } 3647 3648 static PetscErrorCode MatISGetLocalToGlobalMapping_IS(Mat A, ISLocalToGlobalMapping *r, ISLocalToGlobalMapping *c) 3649 { 3650 Mat_IS *a = (Mat_IS *)A->data; 3651 3652 PetscFunctionBegin; 3653 if (r) *r = a->rmapping; 3654 if (c) *c = a->cmapping; 3655 PetscFunctionReturn(PETSC_SUCCESS); 3656 } 3657 3658 static PetscErrorCode MatSetBlockSizes_IS(Mat A, PetscInt rbs, PetscInt cbs) 3659 { 3660 Mat_IS *a = (Mat_IS *)A->data; 3661 3662 PetscFunctionBegin; 3663 if (a->A) PetscCall(MatSetBlockSizes(a->A, rbs, cbs)); 3664 PetscFunctionReturn(PETSC_SUCCESS); 3665 } 3666 3667 /*MC 3668 MATIS - MATIS = "is" - A matrix type to be used for non-overlapping domain decomposition methods (e.g. `PCBDDC` or `KSPFETIDP`). 3669 This stores the matrices in globally unassembled form and the parallel matrix vector product is handled "implicitly". 3670 3671 Options Database Keys: 3672 + -mat_type is - Set the matrix type to `MATIS`. 3673 . -mat_is_allow_repeated - Allow repeated entries in the local part of the local to global maps. 3674 . -mat_is_fixempty - Fix local matrices in case of empty local rows/columns. 3675 - -mat_is_storel2l - Store the local-to-local operators generated by the Galerkin process of `MatPtAP()`. 3676 3677 Level: intermediate 3678 3679 Notes: 3680 Options prefix for the inner matrix are given by `-is_mat_xxx` 3681 3682 You must call `MatSetLocalToGlobalMapping()` before using this matrix type. 3683 3684 You can do matrix preallocation on the local matrix after you obtain it with 3685 `MatISGetLocalMat()`; otherwise, you could use `MatISSetPreallocation()` or `MatXAIJSetPreallocation()` 3686 3687 .seealso: [](ch_matrices), `Mat`, `MATIS`, `Mat`, `MatISGetLocalMat()`, `MatSetLocalToGlobalMapping()`, `MatISSetPreallocation()`, `MatCreateIS()`, `PCBDDC`, `KSPFETIDP` 3688 M*/ 3689 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 3690 { 3691 Mat_IS *a; 3692 3693 PetscFunctionBegin; 3694 PetscCall(PetscNew(&a)); 3695 PetscCall(PetscStrallocpy(MATAIJ, &a->lmattype)); 3696 A->data = (void *)a; 3697 3698 /* matrix ops */ 3699 PetscCall(PetscMemzero(A->ops, sizeof(struct _MatOps))); 3700 A->ops->mult = MatMult_IS; 3701 A->ops->multadd = MatMultAdd_IS; 3702 A->ops->multtranspose = MatMultTranspose_IS; 3703 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 3704 A->ops->destroy = MatDestroy_IS; 3705 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 3706 A->ops->setvalues = MatSetValues_IS; 3707 A->ops->setvaluesblocked = MatSetValuesBlocked_IS; 3708 A->ops->setvalueslocal = MatSetValuesLocal_IS; 3709 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 3710 A->ops->zerorows = MatZeroRows_IS; 3711 A->ops->zerorowscolumns = MatZeroRowsColumns_IS; 3712 A->ops->assemblybegin = MatAssemblyBegin_IS; 3713 A->ops->assemblyend = MatAssemblyEnd_IS; 3714 A->ops->view = MatView_IS; 3715 A->ops->load = MatLoad_IS; 3716 A->ops->zeroentries = MatZeroEntries_IS; 3717 A->ops->scale = MatScale_IS; 3718 A->ops->getdiagonal = MatGetDiagonal_IS; 3719 A->ops->setoption = MatSetOption_IS; 3720 A->ops->ishermitian = MatIsHermitian_IS; 3721 A->ops->issymmetric = MatIsSymmetric_IS; 3722 A->ops->isstructurallysymmetric = MatIsStructurallySymmetric_IS; 3723 A->ops->duplicate = MatDuplicate_IS; 3724 A->ops->missingdiagonal = MatMissingDiagonal_IS; 3725 A->ops->copy = MatCopy_IS; 3726 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS; 3727 A->ops->createsubmatrix = MatCreateSubMatrix_IS; 3728 A->ops->axpy = MatAXPY_IS; 3729 A->ops->diagonalset = MatDiagonalSet_IS; 3730 A->ops->shift = MatShift_IS; 3731 A->ops->transpose = MatTranspose_IS; 3732 A->ops->getinfo = MatGetInfo_IS; 3733 A->ops->diagonalscale = MatDiagonalScale_IS; 3734 A->ops->setfromoptions = MatSetFromOptions_IS; 3735 A->ops->setup = MatSetUp_IS; 3736 A->ops->hasoperation = MatHasOperation_IS; 3737 A->ops->getdiagonalblock = MatGetDiagonalBlock_IS; 3738 A->ops->createsubmatrices = MatCreateSubMatrices_IS; 3739 A->ops->increaseoverlap = MatIncreaseOverlap_IS; 3740 A->ops->setblocksizes = MatSetBlockSizes_IS; 3741 3742 /* special MATIS functions */ 3743 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMatType_C", MatISSetLocalMatType_IS)); 3744 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalMat_C", MatISGetLocalMat_IS)); 3745 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISRestoreLocalMat_C", MatISRestoreLocalMat_IS)); 3746 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMat_C", MatISSetLocalMat_IS)); 3747 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetPreallocation_C", MatISSetPreallocation_IS)); 3748 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetAllowRepeated_C", MatISSetAllowRepeated_IS)); 3749 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISStoreL2L_C", MatISStoreL2L_IS)); 3750 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISFixLocalEmpty_C", MatISFixLocalEmpty_IS)); 3751 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalToGlobalMapping_C", MatISGetLocalToGlobalMapping_IS)); 3752 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpiaij_C", MatConvert_IS_XAIJ)); 3753 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpibaij_C", MatConvert_IS_XAIJ)); 3754 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpisbaij_C", MatConvert_IS_XAIJ)); 3755 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqaij_C", MatConvert_IS_XAIJ)); 3756 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqbaij_C", MatConvert_IS_XAIJ)); 3757 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqsbaij_C", MatConvert_IS_XAIJ)); 3758 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_aij_C", MatConvert_IS_XAIJ)); 3759 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOOLocal_C", MatSetPreallocationCOOLocal_IS)); 3760 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_IS)); 3761 PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATIS)); 3762 PetscFunctionReturn(PETSC_SUCCESS); 3763 } 3764