1 /* 2 This file defines an additive Schwarz preconditioner for any Mat implementation. 3 4 Note that each processor may have any number of subdomains. But in order to 5 deal easily with the VecScatter(), we treat each processor as if it has the 6 same number of subdomains. 7 8 n - total number of true subdomains on all processors 9 n_local_true - actual number of subdomains on this processor 10 n_local = maximum over all processors of n_local_true 11 */ 12 13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/ 14 #include <petsc/private/matimpl.h> 15 16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer) 17 { 18 PC_ASM *osm = (PC_ASM *)pc->data; 19 PetscMPIInt rank; 20 PetscInt i, bsz; 21 PetscBool iascii, isstring; 22 PetscViewer sviewer; 23 PetscViewerFormat format; 24 const char *prefix; 25 26 PetscFunctionBegin; 27 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 28 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring)); 29 if (iascii) { 30 char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set"; 31 if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap)); 32 if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n)); 33 PetscCall(PetscViewerASCIIPrintf(viewer, " %s, %s\n", blocks, overlaps)); 34 PetscCall(PetscViewerASCIIPrintf(viewer, " restriction/interpolation type - %s\n", PCASMTypes[osm->type])); 35 if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, " Additive Schwarz: using DM to define subdomains\n")); 36 if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, " Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype])); 37 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 38 PetscCall(PetscViewerGetFormat(viewer, &format)); 39 if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) { 40 if (osm->ksp) { 41 PetscCall(PetscViewerASCIIPrintf(viewer, " Local solver information for first block is in the following KSP and PC objects on rank 0:\n")); 42 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 43 PetscCall(PetscViewerASCIIPrintf(viewer, " Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : "")); 44 PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 45 if (rank == 0) { 46 PetscCall(PetscViewerASCIIPushTab(sviewer)); 47 PetscCall(KSPView(osm->ksp[0], sviewer)); 48 PetscCall(PetscViewerASCIIPopTab(sviewer)); 49 } 50 PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 51 } 52 } else { 53 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 54 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d] number of local blocks = %" PetscInt_FMT "\n", rank, osm->n_local_true)); 55 PetscCall(PetscViewerFlush(viewer)); 56 PetscCall(PetscViewerASCIIPrintf(viewer, " Local solver information for each block is in the following KSP and PC objects:\n")); 57 PetscCall(PetscViewerASCIIPushTab(viewer)); 58 PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n")); 59 PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 60 for (i = 0; i < osm->n_local_true; i++) { 61 PetscCall(ISGetLocalSize(osm->is[i], &bsz)); 62 PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", rank, i, bsz)); 63 PetscCall(KSPView(osm->ksp[i], sviewer)); 64 PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n")); 65 } 66 PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 67 PetscCall(PetscViewerASCIIPopTab(viewer)); 68 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 69 } 70 } else if (isstring) { 71 PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type])); 72 PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 73 if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer)); 74 PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 75 } 76 PetscFunctionReturn(PETSC_SUCCESS); 77 } 78 79 static PetscErrorCode PCASMPrintSubdomains(PC pc) 80 { 81 PC_ASM *osm = (PC_ASM *)pc->data; 82 const char *prefix; 83 char fname[PETSC_MAX_PATH_LEN + 1]; 84 PetscViewer viewer, sviewer; 85 char *s; 86 PetscInt i, j, nidx; 87 const PetscInt *idx; 88 PetscMPIInt rank, size; 89 90 PetscFunctionBegin; 91 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 92 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 93 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 94 PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL)); 95 if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname))); 96 PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer)); 97 for (i = 0; i < osm->n_local; i++) { 98 if (i < osm->n_local_true) { 99 PetscCall(ISGetLocalSize(osm->is[i], &nidx)); 100 PetscCall(ISGetIndices(osm->is[i], &idx)); 101 /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/ 102 #define len 16 * (nidx + 1) + 512 103 PetscCall(PetscMalloc1(len, &s)); 104 PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer)); 105 #undef len 106 PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i)); 107 for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j])); 108 PetscCall(ISRestoreIndices(osm->is[i], &idx)); 109 PetscCall(PetscViewerStringSPrintf(sviewer, "\n")); 110 PetscCall(PetscViewerDestroy(&sviewer)); 111 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 112 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s)); 113 PetscCall(PetscViewerFlush(viewer)); 114 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 115 PetscCall(PetscFree(s)); 116 if (osm->is_local) { 117 /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/ 118 #define len 16 * (nidx + 1) + 512 119 PetscCall(PetscMalloc1(len, &s)); 120 PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer)); 121 #undef len 122 PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i)); 123 PetscCall(ISGetLocalSize(osm->is_local[i], &nidx)); 124 PetscCall(ISGetIndices(osm->is_local[i], &idx)); 125 for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j])); 126 PetscCall(ISRestoreIndices(osm->is_local[i], &idx)); 127 PetscCall(PetscViewerStringSPrintf(sviewer, "\n")); 128 PetscCall(PetscViewerDestroy(&sviewer)); 129 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 130 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s)); 131 PetscCall(PetscViewerFlush(viewer)); 132 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 133 PetscCall(PetscFree(s)); 134 } 135 } else { 136 /* Participate in collective viewer calls. */ 137 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 138 PetscCall(PetscViewerFlush(viewer)); 139 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 140 /* Assume either all ranks have is_local or none do. */ 141 if (osm->is_local) { 142 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 143 PetscCall(PetscViewerFlush(viewer)); 144 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 145 } 146 } 147 } 148 PetscCall(PetscViewerFlush(viewer)); 149 PetscCall(PetscViewerDestroy(&viewer)); 150 PetscFunctionReturn(PETSC_SUCCESS); 151 } 152 153 static PetscErrorCode PCSetUp_ASM(PC pc) 154 { 155 PC_ASM *osm = (PC_ASM *)pc->data; 156 PetscBool flg; 157 PetscInt i, m, m_local; 158 MatReuse scall = MAT_REUSE_MATRIX; 159 IS isl; 160 KSP ksp; 161 PC subpc; 162 const char *prefix, *pprefix; 163 Vec vec; 164 DM *domain_dm = NULL; 165 MatNullSpace *nullsp = NULL; 166 167 PetscFunctionBegin; 168 if (!pc->setupcalled) { 169 PetscInt m; 170 171 /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */ 172 if (osm->n_local_true == PETSC_DECIDE) { 173 /* no subdomains given */ 174 /* try pc->dm first, if allowed */ 175 if (osm->dm_subdomains && pc->dm) { 176 PetscInt num_domains, d; 177 char **domain_names; 178 IS *inner_domain_is, *outer_domain_is; 179 PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm)); 180 osm->overlap = -1; /* We do not want to increase the overlap of the IS. 181 A future improvement of this code might allow one to use 182 DM-defined subdomains and also increase the overlap, 183 but that is not currently supported */ 184 if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is)); 185 for (d = 0; d < num_domains; ++d) { 186 if (domain_names) PetscCall(PetscFree(domain_names[d])); 187 if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d])); 188 if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d])); 189 } 190 PetscCall(PetscFree(domain_names)); 191 PetscCall(PetscFree(inner_domain_is)); 192 PetscCall(PetscFree(outer_domain_is)); 193 } 194 if (osm->n_local_true == PETSC_DECIDE) { 195 /* still no subdomains; use one subdomain per processor */ 196 osm->n_local_true = 1; 197 } 198 } 199 { /* determine the global and max number of subdomains */ 200 struct { 201 PetscInt max, sum; 202 } inwork, outwork; 203 PetscMPIInt size; 204 205 inwork.max = osm->n_local_true; 206 inwork.sum = osm->n_local_true; 207 PetscCallMPI(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc))); 208 osm->n_local = outwork.max; 209 osm->n = outwork.sum; 210 211 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 212 if (outwork.max == 1 && outwork.sum == size) { 213 /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */ 214 PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE)); 215 } 216 } 217 if (!osm->is) { /* create the index sets */ 218 PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is)); 219 } 220 if (osm->n_local_true > 1 && !osm->is_local) { 221 PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local)); 222 for (i = 0; i < osm->n_local_true; i++) { 223 if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */ 224 PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i])); 225 PetscCall(ISCopy(osm->is[i], osm->is_local[i])); 226 } else { 227 PetscCall(PetscObjectReference((PetscObject)osm->is[i])); 228 osm->is_local[i] = osm->is[i]; 229 } 230 } 231 } 232 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 233 if (osm->overlap > 0) { 234 /* Extend the "overlapping" regions by a number of steps */ 235 PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap)); 236 } 237 if (osm->sort_indices) { 238 for (i = 0; i < osm->n_local_true; i++) { 239 PetscCall(ISSort(osm->is[i])); 240 if (osm->is_local) PetscCall(ISSort(osm->is_local[i])); 241 } 242 } 243 flg = PETSC_FALSE; 244 PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg)); 245 if (flg) PetscCall(PCASMPrintSubdomains(pc)); 246 if (!osm->ksp) { 247 /* Create the local solvers */ 248 PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp)); 249 if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n")); 250 for (i = 0; i < osm->n_local_true; i++) { 251 PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp)); 252 PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel)); 253 PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure)); 254 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1)); 255 PetscCall(KSPSetType(ksp, KSPPREONLY)); 256 PetscCall(KSPGetPC(ksp, &subpc)); 257 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 258 PetscCall(KSPSetOptionsPrefix(ksp, prefix)); 259 PetscCall(KSPAppendOptionsPrefix(ksp, "sub_")); 260 if (domain_dm) { 261 PetscCall(KSPSetDM(ksp, domain_dm[i])); 262 PetscCall(KSPSetDMActive(ksp, PETSC_FALSE)); 263 PetscCall(DMDestroy(&domain_dm[i])); 264 } 265 osm->ksp[i] = ksp; 266 } 267 if (domain_dm) PetscCall(PetscFree(domain_dm)); 268 } 269 270 PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis)); 271 PetscCall(ISSortRemoveDups(osm->lis)); 272 PetscCall(ISGetLocalSize(osm->lis, &m)); 273 274 scall = MAT_INITIAL_MATRIX; 275 } else { 276 /* 277 Destroy the blocks from the previous iteration 278 */ 279 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 280 PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp)); 281 PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat)); 282 scall = MAT_INITIAL_MATRIX; 283 } 284 } 285 286 /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */ 287 if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) { 288 PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp)); 289 if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat)); 290 scall = MAT_INITIAL_MATRIX; 291 } 292 293 /* 294 Extract out the submatrices 295 */ 296 PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat)); 297 if (scall == MAT_INITIAL_MATRIX) { 298 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix)); 299 for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix)); 300 if (nullsp) PetscCall(MatRestoreNullSpaces(osm->n_local_true, osm->pmat, &nullsp)); 301 } 302 303 /* Convert the types of the submatrices (if needbe) */ 304 if (osm->sub_mat_type) { 305 for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &osm->pmat[i])); 306 } 307 308 if (!pc->setupcalled) { 309 VecType vtype; 310 311 /* Create the local work vectors (from the local matrices) and scatter contexts */ 312 PetscCall(MatCreateVecs(pc->pmat, &vec, NULL)); 313 314 PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain"); 315 if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation)); 316 PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction)); 317 PetscCall(PetscMalloc1(osm->n_local_true, &osm->x)); 318 PetscCall(PetscMalloc1(osm->n_local_true, &osm->y)); 319 320 PetscCall(ISGetLocalSize(osm->lis, &m)); 321 PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl)); 322 PetscCall(MatGetVecType(osm->pmat[0], &vtype)); 323 PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx)); 324 PetscCall(VecSetSizes(osm->lx, m, m)); 325 PetscCall(VecSetType(osm->lx, vtype)); 326 PetscCall(VecDuplicate(osm->lx, &osm->ly)); 327 PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction)); 328 PetscCall(ISDestroy(&isl)); 329 330 for (i = 0; i < osm->n_local_true; ++i) { 331 ISLocalToGlobalMapping ltog; 332 IS isll; 333 const PetscInt *idx_is; 334 PetscInt *idx_lis, nout; 335 336 PetscCall(ISGetLocalSize(osm->is[i], &m)); 337 PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL)); 338 PetscCall(VecDuplicate(osm->x[i], &osm->y[i])); 339 340 /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */ 341 PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, <og)); 342 PetscCall(ISGetLocalSize(osm->is[i], &m)); 343 PetscCall(ISGetIndices(osm->is[i], &idx_is)); 344 PetscCall(PetscMalloc1(m, &idx_lis)); 345 PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis)); 346 PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis"); 347 PetscCall(ISRestoreIndices(osm->is[i], &idx_is)); 348 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll)); 349 PetscCall(ISLocalToGlobalMappingDestroy(<og)); 350 PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl)); 351 PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i])); 352 PetscCall(ISDestroy(&isll)); 353 PetscCall(ISDestroy(&isl)); 354 if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the non-overlapping is_local[i] entries */ 355 ISLocalToGlobalMapping ltog; 356 IS isll, isll_local; 357 const PetscInt *idx_local; 358 PetscInt *idx1, *idx2, nout; 359 360 PetscCall(ISGetLocalSize(osm->is_local[i], &m_local)); 361 PetscCall(ISGetIndices(osm->is_local[i], &idx_local)); 362 363 PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], <og)); 364 PetscCall(PetscMalloc1(m_local, &idx1)); 365 PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1)); 366 PetscCall(ISLocalToGlobalMappingDestroy(<og)); 367 PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is"); 368 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll)); 369 370 PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, <og)); 371 PetscCall(PetscMalloc1(m_local, &idx2)); 372 PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2)); 373 PetscCall(ISLocalToGlobalMappingDestroy(<og)); 374 PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis"); 375 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local)); 376 377 PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local)); 378 PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i])); 379 380 PetscCall(ISDestroy(&isll)); 381 PetscCall(ISDestroy(&isll_local)); 382 } 383 } 384 PetscCall(VecDestroy(&vec)); 385 } 386 387 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) { 388 IS *cis; 389 PetscInt c; 390 391 PetscCall(PetscMalloc1(osm->n_local_true, &cis)); 392 for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis; 393 PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats)); 394 PetscCall(PetscFree(cis)); 395 } 396 397 /* Return control to the user so that the submatrices can be modified (e.g., to apply 398 different boundary conditions for the submatrices than for the global problem) */ 399 PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP)); 400 401 /* 402 Loop over subdomains putting them into local ksp 403 */ 404 PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix)); 405 for (i = 0; i < osm->n_local_true; i++) { 406 PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i])); 407 PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix)); 408 if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i])); 409 } 410 PetscFunctionReturn(PETSC_SUCCESS); 411 } 412 413 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc) 414 { 415 PC_ASM *osm = (PC_ASM *)pc->data; 416 PetscInt i; 417 KSPConvergedReason reason; 418 419 PetscFunctionBegin; 420 for (i = 0; i < osm->n_local_true; i++) { 421 PetscCall(KSPSetUp(osm->ksp[i])); 422 PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason)); 423 if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR; 424 } 425 PetscFunctionReturn(PETSC_SUCCESS); 426 } 427 428 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y) 429 { 430 PC_ASM *osm = (PC_ASM *)pc->data; 431 PetscInt i, n_local_true = osm->n_local_true; 432 ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE; 433 434 PetscFunctionBegin; 435 /* 436 support for limiting the restriction or interpolation to only local 437 subdomain values (leaving the other values 0). 438 */ 439 if (!(osm->type & PC_ASM_RESTRICT)) { 440 forward = SCATTER_FORWARD_LOCAL; 441 /* have to zero the work RHS since scatter may leave some slots empty */ 442 PetscCall(VecSet(osm->lx, 0.0)); 443 } 444 if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL; 445 446 PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]); 447 /* zero the global and the local solutions */ 448 PetscCall(VecSet(y, 0.0)); 449 PetscCall(VecSet(osm->ly, 0.0)); 450 451 /* copy the global RHS to local RHS including the ghost nodes */ 452 PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 453 PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 454 455 /* restrict local RHS to the overlapping 0-block RHS */ 456 PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 457 PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 458 459 /* do the local solves */ 460 for (i = 0; i < n_local_true; ++i) { 461 /* solve the overlapping i-block */ 462 PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 463 PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i])); 464 PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i])); 465 PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 466 467 if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */ 468 PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 469 PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 470 } else { /* interpolate the overlapping i-block solution to the local solution */ 471 PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 472 PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 473 } 474 475 if (i < n_local_true - 1) { 476 /* restrict local RHS to the overlapping (i+1)-block RHS */ 477 PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 478 PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 479 480 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) { 481 /* update the overlapping (i+1)-block RHS using the current local solution */ 482 PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1])); 483 PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1])); 484 } 485 } 486 } 487 /* add the local solution to the global solution including the ghost nodes */ 488 PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 489 PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 490 PetscFunctionReturn(PETSC_SUCCESS); 491 } 492 493 static PetscErrorCode PCMatApply_ASM_Private(PC pc, Mat X, Mat Y, PetscBool transpose) 494 { 495 PC_ASM *osm = (PC_ASM *)pc->data; 496 Mat Z, W; 497 Vec x; 498 PetscInt i, m, N; 499 ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE; 500 501 PetscFunctionBegin; 502 PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented"); 503 /* 504 support for limiting the restriction or interpolation to only local 505 subdomain values (leaving the other values 0). 506 */ 507 if ((!transpose && !(osm->type & PC_ASM_RESTRICT)) || (transpose && !(osm->type & PC_ASM_INTERPOLATE))) { 508 forward = SCATTER_FORWARD_LOCAL; 509 /* have to zero the work RHS since scatter may leave some slots empty */ 510 PetscCall(VecSet(osm->lx, 0.0)); 511 } 512 if ((!transpose && !(osm->type & PC_ASM_INTERPOLATE)) || (transpose && !(osm->type & PC_ASM_RESTRICT))) reverse = SCATTER_REVERSE_LOCAL; 513 PetscCall(VecGetLocalSize(osm->x[0], &m)); 514 PetscCall(MatGetSize(X, NULL, &N)); 515 PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z)); 516 517 PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]); 518 /* zero the global and the local solutions */ 519 PetscCall(MatZeroEntries(Y)); 520 PetscCall(VecSet(osm->ly, 0.0)); 521 522 for (i = 0; i < N; ++i) { 523 PetscCall(MatDenseGetColumnVecRead(X, i, &x)); 524 /* copy the global RHS to local RHS including the ghost nodes */ 525 PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 526 PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 527 PetscCall(MatDenseRestoreColumnVecRead(X, i, &x)); 528 529 PetscCall(MatDenseGetColumnVecWrite(Z, i, &x)); 530 /* restrict local RHS to the overlapping 0-block RHS */ 531 PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward)); 532 PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward)); 533 PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x)); 534 } 535 PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W)); 536 /* solve the overlapping 0-block */ 537 if (!transpose) { 538 PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0)); 539 PetscCall(KSPMatSolve(osm->ksp[0], Z, W)); 540 PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0)); 541 } else { 542 PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, osm->ksp[0], Z, W, 0)); 543 PetscCall(KSPMatSolveTranspose(osm->ksp[0], Z, W)); 544 PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, osm->ksp[0], Z, W, 0)); 545 } 546 PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL)); 547 PetscCall(MatDestroy(&Z)); 548 549 for (i = 0; i < N; ++i) { 550 PetscCall(VecSet(osm->ly, 0.0)); 551 PetscCall(MatDenseGetColumnVecRead(W, i, &x)); 552 if (osm->lprolongation && ((!transpose && osm->type != PC_ASM_INTERPOLATE) || (transpose && osm->type != PC_ASM_RESTRICT))) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */ 553 PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward)); 554 PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward)); 555 } else { /* interpolate the overlapping 0-block solution to the local solution */ 556 PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse)); 557 PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse)); 558 } 559 PetscCall(MatDenseRestoreColumnVecRead(W, i, &x)); 560 561 PetscCall(MatDenseGetColumnVecWrite(Y, i, &x)); 562 /* add the local solution to the global solution including the ghost nodes */ 563 PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse)); 564 PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse)); 565 PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x)); 566 } 567 PetscCall(MatDestroy(&W)); 568 PetscFunctionReturn(PETSC_SUCCESS); 569 } 570 571 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y) 572 { 573 PetscFunctionBegin; 574 PetscCall(PCMatApply_ASM_Private(pc, X, Y, PETSC_FALSE)); 575 PetscFunctionReturn(PETSC_SUCCESS); 576 } 577 578 static PetscErrorCode PCMatApplyTranspose_ASM(PC pc, Mat X, Mat Y) 579 { 580 PetscFunctionBegin; 581 PetscCall(PCMatApply_ASM_Private(pc, X, Y, PETSC_TRUE)); 582 PetscFunctionReturn(PETSC_SUCCESS); 583 } 584 585 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y) 586 { 587 PC_ASM *osm = (PC_ASM *)pc->data; 588 PetscInt i, n_local_true = osm->n_local_true; 589 ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE; 590 591 PetscFunctionBegin; 592 PetscCheck(osm->n_local_true <= 1 || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented"); 593 /* 594 Support for limiting the restriction or interpolation to only local 595 subdomain values (leaving the other values 0). 596 597 Note: these are reversed from the PCApply_ASM() because we are applying the 598 transpose of the three terms 599 */ 600 601 if (!(osm->type & PC_ASM_INTERPOLATE)) { 602 forward = SCATTER_FORWARD_LOCAL; 603 /* have to zero the work RHS since scatter may leave some slots empty */ 604 PetscCall(VecSet(osm->lx, 0.0)); 605 } 606 if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL; 607 608 /* zero the global and the local solutions */ 609 PetscCall(VecSet(y, 0.0)); 610 PetscCall(VecSet(osm->ly, 0.0)); 611 612 /* Copy the global RHS to local RHS including the ghost nodes */ 613 PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 614 PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 615 616 /* Restrict local RHS to the overlapping 0-block RHS */ 617 PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 618 PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 619 620 /* do the local solves */ 621 for (i = 0; i < n_local_true; ++i) { 622 /* solve the overlapping i-block */ 623 PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 624 PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i])); 625 PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i])); 626 PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 627 628 if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */ 629 PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 630 PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 631 } else { /* interpolate the overlapping i-block solution to the local solution */ 632 PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 633 PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 634 } 635 636 if (i < n_local_true - 1) { 637 /* Restrict local RHS to the overlapping (i+1)-block RHS */ 638 PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 639 PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 640 } 641 } 642 /* Add the local solution to the global solution including the ghost nodes */ 643 PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 644 PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 645 PetscFunctionReturn(PETSC_SUCCESS); 646 } 647 648 static PetscErrorCode PCReset_ASM(PC pc) 649 { 650 PC_ASM *osm = (PC_ASM *)pc->data; 651 PetscInt i; 652 653 PetscFunctionBegin; 654 if (osm->ksp) { 655 for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i])); 656 } 657 if (osm->pmat) { 658 if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat)); 659 } 660 if (osm->lrestriction) { 661 PetscCall(VecScatterDestroy(&osm->restriction)); 662 for (i = 0; i < osm->n_local_true; i++) { 663 PetscCall(VecScatterDestroy(&osm->lrestriction[i])); 664 if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i])); 665 PetscCall(VecDestroy(&osm->x[i])); 666 PetscCall(VecDestroy(&osm->y[i])); 667 } 668 PetscCall(PetscFree(osm->lrestriction)); 669 if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation)); 670 PetscCall(PetscFree(osm->x)); 671 PetscCall(PetscFree(osm->y)); 672 } 673 PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local)); 674 PetscCall(ISDestroy(&osm->lis)); 675 PetscCall(VecDestroy(&osm->lx)); 676 PetscCall(VecDestroy(&osm->ly)); 677 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats)); 678 679 PetscCall(PetscFree(osm->sub_mat_type)); 680 681 osm->is = NULL; 682 osm->is_local = NULL; 683 PetscFunctionReturn(PETSC_SUCCESS); 684 } 685 686 static PetscErrorCode PCDestroy_ASM(PC pc) 687 { 688 PC_ASM *osm = (PC_ASM *)pc->data; 689 PetscInt i; 690 691 PetscFunctionBegin; 692 PetscCall(PCReset_ASM(pc)); 693 if (osm->ksp) { 694 for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i])); 695 PetscCall(PetscFree(osm->ksp)); 696 } 697 PetscCall(PetscFree(pc->data)); 698 699 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL)); 700 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL)); 701 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL)); 702 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL)); 703 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL)); 704 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL)); 705 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL)); 706 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL)); 707 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL)); 708 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL)); 709 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL)); 710 PetscFunctionReturn(PETSC_SUCCESS); 711 } 712 713 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems PetscOptionsObject) 714 { 715 PC_ASM *osm = (PC_ASM *)pc->data; 716 PetscInt blocks, ovl; 717 PetscBool flg; 718 PCASMType asmtype; 719 PCCompositeType loctype; 720 char sub_mat_type[256]; 721 722 PetscFunctionBegin; 723 PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options"); 724 PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg)); 725 PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg)); 726 if (flg) { 727 PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL)); 728 osm->dm_subdomains = PETSC_FALSE; 729 } 730 PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg)); 731 if (flg) { 732 PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL)); 733 osm->dm_subdomains = PETSC_FALSE; 734 } 735 PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg)); 736 if (flg) { 737 PetscCall(PCASMSetOverlap(pc, ovl)); 738 osm->dm_subdomains = PETSC_FALSE; 739 } 740 flg = PETSC_FALSE; 741 PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg)); 742 if (flg) PetscCall(PCASMSetType(pc, asmtype)); 743 flg = PETSC_FALSE; 744 PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg)); 745 if (flg) PetscCall(PCASMSetLocalType(pc, loctype)); 746 PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg)); 747 if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type)); 748 PetscOptionsHeadEnd(); 749 PetscFunctionReturn(PETSC_SUCCESS); 750 } 751 752 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[]) 753 { 754 PC_ASM *osm = (PC_ASM *)pc->data; 755 PetscInt i; 756 757 PetscFunctionBegin; 758 PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n); 759 PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp()."); 760 761 if (!pc->setupcalled) { 762 if (is) { 763 for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i])); 764 } 765 if (is_local) { 766 for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i])); 767 } 768 PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local)); 769 770 if (osm->ksp && osm->n_local_true != n) { 771 for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i])); 772 PetscCall(PetscFree(osm->ksp)); 773 } 774 775 osm->n_local_true = n; 776 osm->is = NULL; 777 osm->is_local = NULL; 778 if (is) { 779 PetscCall(PetscMalloc1(n, &osm->is)); 780 for (i = 0; i < n; i++) osm->is[i] = is[i]; 781 /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */ 782 osm->overlap = -1; 783 } 784 if (is_local) { 785 PetscCall(PetscMalloc1(n, &osm->is_local)); 786 for (i = 0; i < n; i++) osm->is_local[i] = is_local[i]; 787 if (!is) { 788 PetscCall(PetscMalloc1(osm->n_local_true, &osm->is)); 789 for (i = 0; i < osm->n_local_true; i++) { 790 if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */ 791 PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i])); 792 PetscCall(ISCopy(osm->is_local[i], osm->is[i])); 793 } else { 794 PetscCall(PetscObjectReference((PetscObject)osm->is_local[i])); 795 osm->is[i] = osm->is_local[i]; 796 } 797 } 798 } 799 } 800 } 801 PetscFunctionReturn(PETSC_SUCCESS); 802 } 803 804 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local) 805 { 806 PC_ASM *osm = (PC_ASM *)pc->data; 807 PetscMPIInt rank, size; 808 PetscInt n; 809 810 PetscFunctionBegin; 811 PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N); 812 PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets, they cannot be set globally yet."); 813 814 /* 815 Split the subdomains equally among all processors 816 */ 817 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 818 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 819 n = N / size + ((N % size) > rank); 820 PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, rank, size, N); 821 PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp()."); 822 if (!pc->setupcalled) { 823 PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local)); 824 825 osm->n_local_true = n; 826 osm->is = NULL; 827 osm->is_local = NULL; 828 } 829 PetscFunctionReturn(PETSC_SUCCESS); 830 } 831 832 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl) 833 { 834 PC_ASM *osm = (PC_ASM *)pc->data; 835 836 PetscFunctionBegin; 837 PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested"); 838 PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp()."); 839 if (!pc->setupcalled) osm->overlap = ovl; 840 PetscFunctionReturn(PETSC_SUCCESS); 841 } 842 843 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type) 844 { 845 PC_ASM *osm = (PC_ASM *)pc->data; 846 847 PetscFunctionBegin; 848 osm->type = type; 849 osm->type_set = PETSC_TRUE; 850 PetscFunctionReturn(PETSC_SUCCESS); 851 } 852 853 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type) 854 { 855 PC_ASM *osm = (PC_ASM *)pc->data; 856 857 PetscFunctionBegin; 858 *type = osm->type; 859 PetscFunctionReturn(PETSC_SUCCESS); 860 } 861 862 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type) 863 { 864 PC_ASM *osm = (PC_ASM *)pc->data; 865 866 PetscFunctionBegin; 867 PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type"); 868 osm->loctype = type; 869 PetscFunctionReturn(PETSC_SUCCESS); 870 } 871 872 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type) 873 { 874 PC_ASM *osm = (PC_ASM *)pc->data; 875 876 PetscFunctionBegin; 877 *type = osm->loctype; 878 PetscFunctionReturn(PETSC_SUCCESS); 879 } 880 881 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort) 882 { 883 PC_ASM *osm = (PC_ASM *)pc->data; 884 885 PetscFunctionBegin; 886 osm->sort_indices = doSort; 887 PetscFunctionReturn(PETSC_SUCCESS); 888 } 889 890 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp) 891 { 892 PC_ASM *osm = (PC_ASM *)pc->data; 893 894 PetscFunctionBegin; 895 PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here"); 896 897 if (n_local) *n_local = osm->n_local_true; 898 if (first_local) { 899 PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc))); 900 *first_local -= osm->n_local_true; 901 } 902 if (ksp) *ksp = osm->ksp; 903 PetscFunctionReturn(PETSC_SUCCESS); 904 } 905 906 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type) 907 { 908 PC_ASM *osm = (PC_ASM *)pc->data; 909 910 PetscFunctionBegin; 911 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 912 PetscAssertPointer(sub_mat_type, 2); 913 *sub_mat_type = osm->sub_mat_type; 914 PetscFunctionReturn(PETSC_SUCCESS); 915 } 916 917 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type) 918 { 919 PC_ASM *osm = (PC_ASM *)pc->data; 920 921 PetscFunctionBegin; 922 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 923 PetscCall(PetscFree(osm->sub_mat_type)); 924 PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type)); 925 PetscFunctionReturn(PETSC_SUCCESS); 926 } 927 928 /*@ 929 PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`. 930 931 Collective 932 933 Input Parameters: 934 + pc - the preconditioner context 935 . n - the number of subdomains for this processor (default value = 1) 936 . is - the index set that defines the subdomains for this processor (or `NULL` for PETSc to determine subdomains) 937 the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call 938 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless `PCASMType` is `PC_ASM_RESTRICT` 939 (or `NULL` to not provide these). The values of the `is_local` array are copied so you can free the array 940 (not the `IS` in the array) after this call 941 942 Options Database Key: 943 . -pc_asm_local_blocks <blks> - Sets number of local blocks 944 945 Level: advanced 946 947 Notes: 948 The `IS` numbering is in the parallel, global numbering of the vector for both `is` and `is_local` 949 950 By default the `PCASM` preconditioner uses 1 block per processor. 951 952 Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors. 953 954 If `is_local` is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the `is_local` region is interpolated 955 back to form the global solution (this is the standard restricted additive Schwarz method, RASM) 956 957 If `is_local` is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is 958 no code to handle that case. 959 960 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 961 `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM` 962 @*/ 963 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[]) 964 { 965 PetscFunctionBegin; 966 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 967 PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local)); 968 PetscFunctionReturn(PETSC_SUCCESS); 969 } 970 971 /*@ 972 PCASMSetTotalSubdomains - Sets the subdomains for all processors for the 973 additive Schwarz preconditioner, `PCASM`. 974 975 Collective, all MPI ranks must pass in the same array of `IS` 976 977 Input Parameters: 978 + pc - the preconditioner context 979 . N - the number of subdomains for all processors 980 . is - the index sets that define the subdomains for all processors (or `NULL` to ask PETSc to determine the subdomains) 981 the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call 982 - is_local - the index sets that define the local part of the subdomains for this processor (or `NULL` to not provide this information) 983 The values of the `is_local` array are copied so you can free the array (not the `IS` in the array) after this call 984 985 Options Database Key: 986 . -pc_asm_blocks <blks> - Sets total blocks 987 988 Level: advanced 989 990 Notes: 991 Currently you cannot use this to set the actual subdomains with the argument `is` or `is_local`. 992 993 By default the `PCASM` preconditioner uses 1 block per processor. 994 995 These index sets cannot be destroyed until after completion of the 996 linear solves for which the `PCASM` preconditioner is being used. 997 998 Use `PCASMSetLocalSubdomains()` to set local subdomains. 999 1000 The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local 1001 1002 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 1003 `PCASMCreateSubdomains2D()`, `PCGASM` 1004 @*/ 1005 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[]) 1006 { 1007 PetscFunctionBegin; 1008 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1009 PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local)); 1010 PetscFunctionReturn(PETSC_SUCCESS); 1011 } 1012 1013 /*@ 1014 PCASMSetOverlap - Sets the overlap between a pair of subdomains for the 1015 additive Schwarz preconditioner, `PCASM`. 1016 1017 Logically Collective 1018 1019 Input Parameters: 1020 + pc - the preconditioner context 1021 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1) 1022 1023 Options Database Key: 1024 . -pc_asm_overlap <ovl> - Sets overlap 1025 1026 Level: intermediate 1027 1028 Notes: 1029 By default the `PCASM` preconditioner uses 1 block per processor. To use 1030 multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and 1031 `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>). 1032 1033 The overlap defaults to 1, so if one desires that no additional 1034 overlap be computed beyond what may have been set with a call to 1035 `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl 1036 must be set to be 0. In particular, if one does not explicitly set 1037 the subdomains an application code, then all overlap would be computed 1038 internally by PETSc, and using an overlap of 0 would result in an `PCASM` 1039 variant that is equivalent to the block Jacobi preconditioner. 1040 1041 The default algorithm used by PETSc to increase overlap is fast, but not scalable, 1042 use the option -mat_increase_overlap_scalable when the problem and number of processes is large. 1043 1044 One can define initial index sets with any overlap via 1045 `PCASMSetLocalSubdomains()`; the routine 1046 `PCASMSetOverlap()` merely allows PETSc to extend that overlap further 1047 if desired. 1048 1049 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`, 1050 `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM` 1051 @*/ 1052 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl) 1053 { 1054 PetscFunctionBegin; 1055 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1056 PetscValidLogicalCollectiveInt(pc, ovl, 2); 1057 PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl)); 1058 PetscFunctionReturn(PETSC_SUCCESS); 1059 } 1060 1061 /*@ 1062 PCASMSetType - Sets the type of restriction and interpolation used 1063 for local problems in the additive Schwarz method, `PCASM`. 1064 1065 Logically Collective 1066 1067 Input Parameters: 1068 + pc - the preconditioner context 1069 - type - variant of `PCASM`, one of 1070 .vb 1071 PC_ASM_BASIC - full interpolation and restriction 1072 PC_ASM_RESTRICT - full restriction, local processor interpolation (default) 1073 PC_ASM_INTERPOLATE - full interpolation, local processor restriction 1074 PC_ASM_NONE - local processor restriction and interpolation 1075 .ve 1076 1077 Options Database Key: 1078 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType` 1079 1080 Level: intermediate 1081 1082 Note: 1083 if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected 1084 to limit the local processor interpolation 1085 1086 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, 1087 `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM` 1088 @*/ 1089 PetscErrorCode PCASMSetType(PC pc, PCASMType type) 1090 { 1091 PetscFunctionBegin; 1092 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1093 PetscValidLogicalCollectiveEnum(pc, type, 2); 1094 PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type)); 1095 PetscFunctionReturn(PETSC_SUCCESS); 1096 } 1097 1098 /*@ 1099 PCASMGetType - Gets the type of restriction and interpolation used 1100 for local problems in the additive Schwarz method, `PCASM`. 1101 1102 Logically Collective 1103 1104 Input Parameter: 1105 . pc - the preconditioner context 1106 1107 Output Parameter: 1108 . type - variant of `PCASM`, one of 1109 .vb 1110 PC_ASM_BASIC - full interpolation and restriction 1111 PC_ASM_RESTRICT - full restriction, local processor interpolation 1112 PC_ASM_INTERPOLATE - full interpolation, local processor restriction 1113 PC_ASM_NONE - local processor restriction and interpolation 1114 .ve 1115 1116 Options Database Key: 1117 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type 1118 1119 Level: intermediate 1120 1121 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`, 1122 `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()` 1123 @*/ 1124 PetscErrorCode PCASMGetType(PC pc, PCASMType *type) 1125 { 1126 PetscFunctionBegin; 1127 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1128 PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type)); 1129 PetscFunctionReturn(PETSC_SUCCESS); 1130 } 1131 1132 /*@ 1133 PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`. 1134 1135 Logically Collective 1136 1137 Input Parameters: 1138 + pc - the preconditioner context 1139 - type - type of composition, one of 1140 .vb 1141 PC_COMPOSITE_ADDITIVE - local additive combination 1142 PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination 1143 .ve 1144 1145 Options Database Key: 1146 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type 1147 1148 Level: intermediate 1149 1150 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType` 1151 @*/ 1152 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type) 1153 { 1154 PetscFunctionBegin; 1155 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1156 PetscValidLogicalCollectiveEnum(pc, type, 2); 1157 PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type)); 1158 PetscFunctionReturn(PETSC_SUCCESS); 1159 } 1160 1161 /*@ 1162 PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`. 1163 1164 Logically Collective 1165 1166 Input Parameter: 1167 . pc - the preconditioner context 1168 1169 Output Parameter: 1170 . type - type of composition, one of 1171 .vb 1172 PC_COMPOSITE_ADDITIVE - local additive combination 1173 PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination 1174 .ve 1175 1176 Options Database Key: 1177 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type 1178 1179 Level: intermediate 1180 1181 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMType`, `PCCompositeType` 1182 @*/ 1183 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type) 1184 { 1185 PetscFunctionBegin; 1186 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1187 PetscAssertPointer(type, 2); 1188 PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type)); 1189 PetscFunctionReturn(PETSC_SUCCESS); 1190 } 1191 1192 /*@ 1193 PCASMSetSortIndices - Determines whether subdomain indices are sorted. 1194 1195 Logically Collective 1196 1197 Input Parameters: 1198 + pc - the preconditioner context 1199 - doSort - sort the subdomain indices 1200 1201 Level: intermediate 1202 1203 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, 1204 `PCASMCreateSubdomains2D()` 1205 @*/ 1206 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort) 1207 { 1208 PetscFunctionBegin; 1209 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1210 PetscValidLogicalCollectiveBool(pc, doSort, 2); 1211 PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort)); 1212 PetscFunctionReturn(PETSC_SUCCESS); 1213 } 1214 1215 /*@C 1216 PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on 1217 this processor. 1218 1219 Collective iff first_local is requested 1220 1221 Input Parameter: 1222 . pc - the preconditioner context 1223 1224 Output Parameters: 1225 + n_local - the number of blocks on this processor or `NULL` 1226 . first_local - the global number of the first block on this processor or `NULL`, all processors must request or all must pass `NULL` 1227 - ksp - the array of `KSP` contexts 1228 1229 Level: advanced 1230 1231 Notes: 1232 After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed. 1233 1234 You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`. 1235 1236 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, 1237 `PCASMCreateSubdomains2D()`, 1238 @*/ 1239 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[]) 1240 { 1241 PetscFunctionBegin; 1242 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1243 PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp)); 1244 PetscFunctionReturn(PETSC_SUCCESS); 1245 } 1246 1247 /*MC 1248 PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with 1249 its own `KSP` object, {cite}`dryja1987additive` and {cite}`1sbg` 1250 1251 Options Database Keys: 1252 + -pc_asm_blocks <blks> - Sets total blocks. Defaults to one block per MPI process. 1253 . -pc_asm_overlap <ovl> - Sets overlap 1254 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()` 1255 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()` 1256 - -pc_asm_local_type [additive, multiplicative] - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()` 1257 1258 Level: beginner 1259 1260 Notes: 1261 If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you 1262 will get a different convergence rate due to the default option of `-pc_asm_type restrict`. Use 1263 `-pc_asm_type basic` to get the same convergence behavior 1264 1265 Each processor can have one or more blocks, but a block cannot be shared by more 1266 than one processor. Use `PCGASM` for subdomains shared by multiple processes. 1267 1268 To set options on the solvers for each block append `-sub_` to all the `KSP`, and `PC` 1269 options database keys. For example, `-sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly` 1270 1271 To set the options on the solvers separate for each block call `PCASMGetSubKSP()` 1272 and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`) 1273 1274 If the `PC` has an associated `DM`, then, by default, `DMCreateDomainDecomposition()` is used to create the subdomains 1275 1276 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`, 1277 `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()` 1278 `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType` 1279 M*/ 1280 1281 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc) 1282 { 1283 PC_ASM *osm; 1284 1285 PetscFunctionBegin; 1286 PetscCall(PetscNew(&osm)); 1287 1288 osm->n = PETSC_DECIDE; 1289 osm->n_local = 0; 1290 osm->n_local_true = PETSC_DECIDE; 1291 osm->overlap = 1; 1292 osm->ksp = NULL; 1293 osm->restriction = NULL; 1294 osm->lprolongation = NULL; 1295 osm->lrestriction = NULL; 1296 osm->x = NULL; 1297 osm->y = NULL; 1298 osm->is = NULL; 1299 osm->is_local = NULL; 1300 osm->mat = NULL; 1301 osm->pmat = NULL; 1302 osm->type = PC_ASM_RESTRICT; 1303 osm->loctype = PC_COMPOSITE_ADDITIVE; 1304 osm->sort_indices = PETSC_TRUE; 1305 osm->dm_subdomains = PETSC_FALSE; 1306 osm->sub_mat_type = NULL; 1307 1308 pc->data = (void *)osm; 1309 pc->ops->apply = PCApply_ASM; 1310 pc->ops->matapply = PCMatApply_ASM; 1311 pc->ops->applytranspose = PCApplyTranspose_ASM; 1312 pc->ops->matapplytranspose = PCMatApplyTranspose_ASM; 1313 pc->ops->setup = PCSetUp_ASM; 1314 pc->ops->reset = PCReset_ASM; 1315 pc->ops->destroy = PCDestroy_ASM; 1316 pc->ops->setfromoptions = PCSetFromOptions_ASM; 1317 pc->ops->setuponblocks = PCSetUpOnBlocks_ASM; 1318 pc->ops->view = PCView_ASM; 1319 pc->ops->applyrichardson = NULL; 1320 1321 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM)); 1322 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM)); 1323 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM)); 1324 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM)); 1325 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM)); 1326 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM)); 1327 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM)); 1328 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM)); 1329 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM)); 1330 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM)); 1331 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM)); 1332 PetscFunctionReturn(PETSC_SUCCESS); 1333 } 1334 1335 /*@C 1336 PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz 1337 preconditioner, `PCASM`, for any problem on a general grid. 1338 1339 Collective 1340 1341 Input Parameters: 1342 + A - The global matrix operator 1343 - n - the number of local blocks 1344 1345 Output Parameter: 1346 . outis - the array of index sets defining the subdomains 1347 1348 Level: advanced 1349 1350 Note: 1351 This generates nonoverlapping subdomains; the `PCASM` will generate the overlap 1352 from these if you use `PCASMSetLocalSubdomains()` 1353 1354 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()` 1355 @*/ 1356 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[]) 1357 { 1358 MatPartitioning mpart; 1359 const char *prefix; 1360 PetscInt i, j, rstart, rend, bs; 1361 PetscBool hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE; 1362 Mat Ad = NULL, adj; 1363 IS ispart, isnumb, *is; 1364 1365 PetscFunctionBegin; 1366 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1367 PetscAssertPointer(outis, 3); 1368 PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n); 1369 1370 /* Get prefix, row distribution, and block size */ 1371 PetscCall(MatGetOptionsPrefix(A, &prefix)); 1372 PetscCall(MatGetOwnershipRange(A, &rstart, &rend)); 1373 PetscCall(MatGetBlockSize(A, &bs)); 1374 PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs); 1375 1376 /* Get diagonal block from matrix if possible */ 1377 PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop)); 1378 if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad)); 1379 if (Ad) { 1380 PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij)); 1381 if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij)); 1382 } 1383 if (Ad && n > 1) { 1384 PetscBool match, done; 1385 /* Try to setup a good matrix partitioning if available */ 1386 PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart)); 1387 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix)); 1388 PetscCall(MatPartitioningSetFromOptions(mpart)); 1389 PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match)); 1390 if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match)); 1391 if (!match) { /* assume a "good" partitioner is available */ 1392 PetscInt na; 1393 const PetscInt *ia, *ja; 1394 PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done)); 1395 if (done) { 1396 /* Build adjacency matrix by hand. Unfortunately a call to 1397 MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will 1398 remove the block-aij structure and we cannot expect 1399 MatPartitioning to split vertices as we need */ 1400 PetscInt i, j, len, nnz, cnt, *iia = NULL, *jja = NULL; 1401 const PetscInt *row; 1402 nnz = 0; 1403 for (i = 0; i < na; i++) { /* count number of nonzeros */ 1404 len = ia[i + 1] - ia[i]; 1405 row = ja + ia[i]; 1406 for (j = 0; j < len; j++) { 1407 if (row[j] == i) { /* don't count diagonal */ 1408 len--; 1409 break; 1410 } 1411 } 1412 nnz += len; 1413 } 1414 PetscCall(PetscMalloc1(na + 1, &iia)); 1415 PetscCall(PetscMalloc1(nnz, &jja)); 1416 nnz = 0; 1417 iia[0] = 0; 1418 for (i = 0; i < na; i++) { /* fill adjacency */ 1419 cnt = 0; 1420 len = ia[i + 1] - ia[i]; 1421 row = ja + ia[i]; 1422 for (j = 0; j < len; j++) { 1423 if (row[j] != i) { /* if not diagonal */ 1424 jja[nnz + cnt++] = row[j]; 1425 } 1426 } 1427 nnz += cnt; 1428 iia[i + 1] = nnz; 1429 } 1430 /* Partitioning of the adjacency matrix */ 1431 PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj)); 1432 PetscCall(MatPartitioningSetAdjacency(mpart, adj)); 1433 PetscCall(MatPartitioningSetNParts(mpart, n)); 1434 PetscCall(MatPartitioningApply(mpart, &ispart)); 1435 PetscCall(ISPartitioningToNumbering(ispart, &isnumb)); 1436 PetscCall(MatDestroy(&adj)); 1437 foundpart = PETSC_TRUE; 1438 } 1439 PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done)); 1440 } 1441 PetscCall(MatPartitioningDestroy(&mpart)); 1442 } 1443 1444 PetscCall(PetscMalloc1(n, &is)); 1445 *outis = is; 1446 1447 if (!foundpart) { 1448 /* Partitioning by contiguous chunks of rows */ 1449 1450 PetscInt mbs = (rend - rstart) / bs; 1451 PetscInt start = rstart; 1452 for (i = 0; i < n; i++) { 1453 PetscInt count = (mbs / n + ((mbs % n) > i)) * bs; 1454 PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i])); 1455 start += count; 1456 } 1457 1458 } else { 1459 /* Partitioning by adjacency of diagonal block */ 1460 1461 const PetscInt *numbering; 1462 PetscInt *count, nidx, *indices, *newidx, start = 0; 1463 /* Get node count in each partition */ 1464 PetscCall(PetscMalloc1(n, &count)); 1465 PetscCall(ISPartitioningCount(ispart, n, count)); 1466 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1467 for (i = 0; i < n; i++) count[i] *= bs; 1468 } 1469 /* Build indices from node numbering */ 1470 PetscCall(ISGetLocalSize(isnumb, &nidx)); 1471 PetscCall(PetscMalloc1(nidx, &indices)); 1472 for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */ 1473 PetscCall(ISGetIndices(isnumb, &numbering)); 1474 PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices)); 1475 PetscCall(ISRestoreIndices(isnumb, &numbering)); 1476 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1477 PetscCall(PetscMalloc1(nidx * bs, &newidx)); 1478 for (i = 0; i < nidx; i++) { 1479 for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j; 1480 } 1481 PetscCall(PetscFree(indices)); 1482 nidx *= bs; 1483 indices = newidx; 1484 } 1485 /* Shift to get global indices */ 1486 for (i = 0; i < nidx; i++) indices[i] += rstart; 1487 1488 /* Build the index sets for each block */ 1489 for (i = 0; i < n; i++) { 1490 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i])); 1491 PetscCall(ISSort(is[i])); 1492 start += count[i]; 1493 } 1494 1495 PetscCall(PetscFree(count)); 1496 PetscCall(PetscFree(indices)); 1497 PetscCall(ISDestroy(&isnumb)); 1498 PetscCall(ISDestroy(&ispart)); 1499 } 1500 PetscFunctionReturn(PETSC_SUCCESS); 1501 } 1502 1503 /*@C 1504 PCASMDestroySubdomains - Destroys the index sets created with 1505 `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`. 1506 1507 Collective 1508 1509 Input Parameters: 1510 + n - the number of index sets 1511 . is - the array of index sets 1512 - is_local - the array of local index sets, can be `NULL` 1513 1514 Level: advanced 1515 1516 Developer Note: 1517 The `IS` arguments should be a *[] 1518 1519 .seealso: [](ch_ksp), `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()` 1520 @*/ 1521 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS *is[], IS *is_local[]) 1522 { 1523 PetscInt i; 1524 1525 PetscFunctionBegin; 1526 if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS); 1527 if (*is) { 1528 PetscAssertPointer(*is, 2); 1529 for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*is)[i])); 1530 PetscCall(PetscFree(*is)); 1531 } 1532 if (is_local && *is_local) { 1533 PetscAssertPointer(*is_local, 3); 1534 for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*is_local)[i])); 1535 PetscCall(PetscFree(*is_local)); 1536 } 1537 PetscFunctionReturn(PETSC_SUCCESS); 1538 } 1539 1540 /*@C 1541 PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz 1542 preconditioner, `PCASM`, for a two-dimensional problem on a regular grid. 1543 1544 Not Collective 1545 1546 Input Parameters: 1547 + m - the number of mesh points in the x direction 1548 . n - the number of mesh points in the y direction 1549 . M - the number of subdomains in the x direction 1550 . N - the number of subdomains in the y direction 1551 . dof - degrees of freedom per node 1552 - overlap - overlap in mesh lines 1553 1554 Output Parameters: 1555 + Nsub - the number of subdomains created 1556 . is - array of index sets defining overlapping (if overlap > 0) subdomains 1557 - is_local - array of index sets defining non-overlapping subdomains 1558 1559 Level: advanced 1560 1561 Note: 1562 Presently `PCAMSCreateSubdomains2d()` is valid only for sequential 1563 preconditioners. More general related routines are 1564 `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`. 1565 1566 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`, 1567 `PCASMSetOverlap()` 1568 @*/ 1569 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS *is[], IS *is_local[]) 1570 { 1571 PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer; 1572 PetscInt nidx, *idx, loc, ii, jj, count; 1573 1574 PetscFunctionBegin; 1575 PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1"); 1576 1577 *Nsub = N * M; 1578 PetscCall(PetscMalloc1(*Nsub, is)); 1579 PetscCall(PetscMalloc1(*Nsub, is_local)); 1580 ystart = 0; 1581 loc_outer = 0; 1582 for (i = 0; i < N; i++) { 1583 height = n / N + ((n % N) > i); /* height of subdomain */ 1584 PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n"); 1585 yleft = ystart - overlap; 1586 if (yleft < 0) yleft = 0; 1587 yright = ystart + height + overlap; 1588 if (yright > n) yright = n; 1589 xstart = 0; 1590 for (j = 0; j < M; j++) { 1591 width = m / M + ((m % M) > j); /* width of subdomain */ 1592 PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m"); 1593 xleft = xstart - overlap; 1594 if (xleft < 0) xleft = 0; 1595 xright = xstart + width + overlap; 1596 if (xright > m) xright = m; 1597 nidx = (xright - xleft) * (yright - yleft); 1598 PetscCall(PetscMalloc1(nidx, &idx)); 1599 loc = 0; 1600 for (ii = yleft; ii < yright; ii++) { 1601 count = m * ii + xleft; 1602 for (jj = xleft; jj < xright; jj++) idx[loc++] = count++; 1603 } 1604 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer)); 1605 if (overlap == 0) { 1606 PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer])); 1607 1608 (*is_local)[loc_outer] = (*is)[loc_outer]; 1609 } else { 1610 for (loc = 0, ii = ystart; ii < ystart + height; ii++) { 1611 for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj; 1612 } 1613 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer)); 1614 } 1615 PetscCall(PetscFree(idx)); 1616 xstart += width; 1617 loc_outer++; 1618 } 1619 ystart += height; 1620 } 1621 for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i])); 1622 PetscFunctionReturn(PETSC_SUCCESS); 1623 } 1624 1625 /*@C 1626 PCASMGetLocalSubdomains - Gets the local subdomains (for this processor 1627 only) for the additive Schwarz preconditioner, `PCASM`. 1628 1629 Not Collective 1630 1631 Input Parameter: 1632 . pc - the preconditioner context 1633 1634 Output Parameters: 1635 + n - if requested, the number of subdomains for this processor (default value = 1) 1636 . is - if requested, the index sets that define the subdomains for this processor 1637 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`) 1638 1639 Level: advanced 1640 1641 Note: 1642 The `IS` numbering is in the parallel, global numbering of the vector. 1643 1644 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 1645 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()` 1646 @*/ 1647 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[]) 1648 { 1649 PC_ASM *osm = (PC_ASM *)pc->data; 1650 PetscBool match; 1651 1652 PetscFunctionBegin; 1653 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1654 if (n) PetscAssertPointer(n, 2); 1655 if (is) PetscAssertPointer(is, 3); 1656 if (is_local) PetscAssertPointer(is_local, 4); 1657 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1658 PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM"); 1659 if (n) *n = osm->n_local_true; 1660 if (is) *is = osm->is; 1661 if (is_local) *is_local = osm->is_local; 1662 PetscFunctionReturn(PETSC_SUCCESS); 1663 } 1664 1665 /*@C 1666 PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor 1667 only) for the additive Schwarz preconditioner, `PCASM`. 1668 1669 Not Collective 1670 1671 Input Parameter: 1672 . pc - the preconditioner context 1673 1674 Output Parameters: 1675 + n - if requested, the number of matrices for this processor (default value = 1) 1676 - mat - if requested, the matrices 1677 1678 Level: advanced 1679 1680 Notes: 1681 Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`) 1682 1683 Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner. 1684 1685 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 1686 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()` 1687 @*/ 1688 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[]) 1689 { 1690 PC_ASM *osm; 1691 PetscBool match; 1692 1693 PetscFunctionBegin; 1694 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1695 if (n) PetscAssertPointer(n, 2); 1696 if (mat) PetscAssertPointer(mat, 3); 1697 PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp()."); 1698 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1699 if (!match) { 1700 if (n) *n = 0; 1701 if (mat) *mat = NULL; 1702 } else { 1703 osm = (PC_ASM *)pc->data; 1704 if (n) *n = osm->n_local_true; 1705 if (mat) *mat = osm->pmat; 1706 } 1707 PetscFunctionReturn(PETSC_SUCCESS); 1708 } 1709 1710 /*@ 1711 PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible. 1712 1713 Logically Collective 1714 1715 Input Parameters: 1716 + pc - the preconditioner 1717 - flg - boolean indicating whether to use subdomains defined by the `DM` 1718 1719 Options Database Key: 1720 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()` 1721 1722 Level: intermediate 1723 1724 Note: 1725 `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`, 1726 so setting either of the first two effectively turns the latter off. 1727 1728 Developer Note: 1729 This should be `PCASMSetUseDMSubdomains()`, similarly for the options database key 1730 1731 .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()` 1732 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()` 1733 @*/ 1734 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg) 1735 { 1736 PC_ASM *osm = (PC_ASM *)pc->data; 1737 PetscBool match; 1738 1739 PetscFunctionBegin; 1740 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1741 PetscValidLogicalCollectiveBool(pc, flg, 2); 1742 PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC."); 1743 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1744 if (match) osm->dm_subdomains = flg; 1745 PetscFunctionReturn(PETSC_SUCCESS); 1746 } 1747 1748 /*@ 1749 PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible. 1750 1751 Not Collective 1752 1753 Input Parameter: 1754 . pc - the preconditioner 1755 1756 Output Parameter: 1757 . flg - boolean indicating whether to use subdomains defined by the `DM` 1758 1759 Level: intermediate 1760 1761 Developer Note: 1762 This should be `PCASMSetUseDMSubdomains()` 1763 1764 .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()` 1765 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()` 1766 @*/ 1767 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg) 1768 { 1769 PC_ASM *osm = (PC_ASM *)pc->data; 1770 PetscBool match; 1771 1772 PetscFunctionBegin; 1773 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1774 PetscAssertPointer(flg, 2); 1775 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1776 if (match) *flg = osm->dm_subdomains; 1777 else *flg = PETSC_FALSE; 1778 PetscFunctionReturn(PETSC_SUCCESS); 1779 } 1780 1781 /*@ 1782 PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string. 1783 1784 Not Collective 1785 1786 Input Parameter: 1787 . pc - the `PC` 1788 1789 Output Parameter: 1790 . sub_mat_type - name of matrix type 1791 1792 Level: advanced 1793 1794 .seealso: [](ch_ksp), `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat` 1795 @*/ 1796 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type) 1797 { 1798 PetscFunctionBegin; 1799 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1800 PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type)); 1801 PetscFunctionReturn(PETSC_SUCCESS); 1802 } 1803 1804 /*@ 1805 PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves 1806 1807 Collective 1808 1809 Input Parameters: 1810 + pc - the `PC` object 1811 - sub_mat_type - the `MatType` 1812 1813 Options Database Key: 1814 . -pc_asm_sub_mat_type <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. 1815 If you specify a base name like aijviennacl, the corresponding sequential type is assumed. 1816 1817 Note: 1818 See `MatType` for available types 1819 1820 Level: advanced 1821 1822 .seealso: [](ch_ksp), `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat` 1823 @*/ 1824 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type) 1825 { 1826 PetscFunctionBegin; 1827 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1828 PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type)); 1829 PetscFunctionReturn(PETSC_SUCCESS); 1830 } 1831