1 /* 2 This file defines an additive Schwarz preconditioner for any Mat implementation. 3 4 Note that each processor may have any number of subdomains. But in order to 5 deal easily with the VecScatter(), we treat each processor as if it has the 6 same number of subdomains. 7 8 n - total number of true subdomains on all processors 9 n_local_true - actual number of subdomains on this processor 10 n_local = maximum over all processors of n_local_true 11 */ 12 13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/ 14 #include <petsc/private/matimpl.h> 15 16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer) 17 { 18 PC_ASM *osm = (PC_ASM *)pc->data; 19 PetscMPIInt rank; 20 PetscInt i, bsz; 21 PetscBool iascii, isstring; 22 PetscViewer sviewer; 23 PetscViewerFormat format; 24 const char *prefix; 25 26 PetscFunctionBegin; 27 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 28 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring)); 29 if (iascii) { 30 char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set"; 31 if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap)); 32 if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n)); 33 PetscCall(PetscViewerASCIIPrintf(viewer, " %s, %s\n", blocks, overlaps)); 34 PetscCall(PetscViewerASCIIPrintf(viewer, " restriction/interpolation type - %s\n", PCASMTypes[osm->type])); 35 if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, " Additive Schwarz: using DM to define subdomains\n")); 36 if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, " Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype])); 37 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 38 PetscCall(PetscViewerGetFormat(viewer, &format)); 39 if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) { 40 if (osm->ksp) { 41 PetscCall(PetscViewerASCIIPrintf(viewer, " Local solver information for first block is in the following KSP and PC objects on rank 0:\n")); 42 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 43 PetscCall(PetscViewerASCIIPrintf(viewer, " Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : "")); 44 PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 45 if (rank == 0) { 46 PetscCall(PetscViewerASCIIPushTab(sviewer)); 47 PetscCall(KSPView(osm->ksp[0], sviewer)); 48 PetscCall(PetscViewerASCIIPopTab(sviewer)); 49 } 50 PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 51 } 52 } else { 53 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 54 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true)); 55 PetscCall(PetscViewerFlush(viewer)); 56 PetscCall(PetscViewerASCIIPrintf(viewer, " Local solver information for each block is in the following KSP and PC objects:\n")); 57 PetscCall(PetscViewerASCIIPushTab(viewer)); 58 PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n")); 59 PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 60 for (i = 0; i < osm->n_local_true; i++) { 61 PetscCall(ISGetLocalSize(osm->is[i], &bsz)); 62 PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz)); 63 PetscCall(KSPView(osm->ksp[i], sviewer)); 64 PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n")); 65 } 66 PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 67 PetscCall(PetscViewerASCIIPopTab(viewer)); 68 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 69 } 70 } else if (isstring) { 71 PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type])); 72 PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 73 if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer)); 74 PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 75 } 76 PetscFunctionReturn(PETSC_SUCCESS); 77 } 78 79 static PetscErrorCode PCASMPrintSubdomains(PC pc) 80 { 81 PC_ASM *osm = (PC_ASM *)pc->data; 82 const char *prefix; 83 char fname[PETSC_MAX_PATH_LEN + 1]; 84 PetscViewer viewer, sviewer; 85 char *s; 86 PetscInt i, j, nidx; 87 const PetscInt *idx; 88 PetscMPIInt rank, size; 89 90 PetscFunctionBegin; 91 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 92 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 93 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 94 PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL)); 95 if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname))); 96 PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer)); 97 for (i = 0; i < osm->n_local; i++) { 98 if (i < osm->n_local_true) { 99 PetscCall(ISGetLocalSize(osm->is[i], &nidx)); 100 PetscCall(ISGetIndices(osm->is[i], &idx)); 101 /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/ 102 #define len 16 * (nidx + 1) + 512 103 PetscCall(PetscMalloc1(len, &s)); 104 PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer)); 105 #undef len 106 PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i)); 107 for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j])); 108 PetscCall(ISRestoreIndices(osm->is[i], &idx)); 109 PetscCall(PetscViewerStringSPrintf(sviewer, "\n")); 110 PetscCall(PetscViewerDestroy(&sviewer)); 111 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 112 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s)); 113 PetscCall(PetscViewerFlush(viewer)); 114 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 115 PetscCall(PetscFree(s)); 116 if (osm->is_local) { 117 /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/ 118 #define len 16 * (nidx + 1) + 512 119 PetscCall(PetscMalloc1(len, &s)); 120 PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer)); 121 #undef len 122 PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i)); 123 PetscCall(ISGetLocalSize(osm->is_local[i], &nidx)); 124 PetscCall(ISGetIndices(osm->is_local[i], &idx)); 125 for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j])); 126 PetscCall(ISRestoreIndices(osm->is_local[i], &idx)); 127 PetscCall(PetscViewerStringSPrintf(sviewer, "\n")); 128 PetscCall(PetscViewerDestroy(&sviewer)); 129 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 130 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s)); 131 PetscCall(PetscViewerFlush(viewer)); 132 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 133 PetscCall(PetscFree(s)); 134 } 135 } else { 136 /* Participate in collective viewer calls. */ 137 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 138 PetscCall(PetscViewerFlush(viewer)); 139 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 140 /* Assume either all ranks have is_local or none do. */ 141 if (osm->is_local) { 142 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 143 PetscCall(PetscViewerFlush(viewer)); 144 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 145 } 146 } 147 } 148 PetscCall(PetscViewerFlush(viewer)); 149 PetscCall(PetscViewerDestroy(&viewer)); 150 PetscFunctionReturn(PETSC_SUCCESS); 151 } 152 153 static PetscErrorCode PCSetUp_ASM(PC pc) 154 { 155 PC_ASM *osm = (PC_ASM *)pc->data; 156 PetscBool flg; 157 PetscInt i, m, m_local; 158 MatReuse scall = MAT_REUSE_MATRIX; 159 IS isl; 160 KSP ksp; 161 PC subpc; 162 const char *prefix, *pprefix; 163 Vec vec; 164 DM *domain_dm = NULL; 165 MatNullSpace *nullsp = NULL; 166 167 PetscFunctionBegin; 168 if (!pc->setupcalled) { 169 PetscInt m; 170 171 /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */ 172 if (osm->n_local_true == PETSC_DECIDE) { 173 /* no subdomains given */ 174 /* try pc->dm first, if allowed */ 175 if (osm->dm_subdomains && pc->dm) { 176 PetscInt num_domains, d; 177 char **domain_names; 178 IS *inner_domain_is, *outer_domain_is; 179 PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm)); 180 osm->overlap = -1; /* We do not want to increase the overlap of the IS. 181 A future improvement of this code might allow one to use 182 DM-defined subdomains and also increase the overlap, 183 but that is not currently supported */ 184 if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is)); 185 for (d = 0; d < num_domains; ++d) { 186 if (domain_names) PetscCall(PetscFree(domain_names[d])); 187 if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d])); 188 if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d])); 189 } 190 PetscCall(PetscFree(domain_names)); 191 PetscCall(PetscFree(inner_domain_is)); 192 PetscCall(PetscFree(outer_domain_is)); 193 } 194 if (osm->n_local_true == PETSC_DECIDE) { 195 /* still no subdomains; use one subdomain per processor */ 196 osm->n_local_true = 1; 197 } 198 } 199 { /* determine the global and max number of subdomains */ 200 struct { 201 PetscInt max, sum; 202 } inwork, outwork; 203 PetscMPIInt size; 204 205 inwork.max = osm->n_local_true; 206 inwork.sum = osm->n_local_true; 207 PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc))); 208 osm->n_local = outwork.max; 209 osm->n = outwork.sum; 210 211 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 212 if (outwork.max == 1 && outwork.sum == size) { 213 /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */ 214 PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE)); 215 } 216 } 217 if (!osm->is) { /* create the index sets */ 218 PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is)); 219 } 220 if (osm->n_local_true > 1 && !osm->is_local) { 221 PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local)); 222 for (i = 0; i < osm->n_local_true; i++) { 223 if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */ 224 PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i])); 225 PetscCall(ISCopy(osm->is[i], osm->is_local[i])); 226 } else { 227 PetscCall(PetscObjectReference((PetscObject)osm->is[i])); 228 osm->is_local[i] = osm->is[i]; 229 } 230 } 231 } 232 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 233 if (osm->overlap > 0) { 234 /* Extend the "overlapping" regions by a number of steps */ 235 PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap)); 236 } 237 if (osm->sort_indices) { 238 for (i = 0; i < osm->n_local_true; i++) { 239 PetscCall(ISSort(osm->is[i])); 240 if (osm->is_local) PetscCall(ISSort(osm->is_local[i])); 241 } 242 } 243 flg = PETSC_FALSE; 244 PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg)); 245 if (flg) PetscCall(PCASMPrintSubdomains(pc)); 246 if (!osm->ksp) { 247 /* Create the local solvers */ 248 PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp)); 249 if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n")); 250 for (i = 0; i < osm->n_local_true; i++) { 251 PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp)); 252 PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel)); 253 PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure)); 254 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1)); 255 PetscCall(KSPSetType(ksp, KSPPREONLY)); 256 PetscCall(KSPGetPC(ksp, &subpc)); 257 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 258 PetscCall(KSPSetOptionsPrefix(ksp, prefix)); 259 PetscCall(KSPAppendOptionsPrefix(ksp, "sub_")); 260 if (domain_dm) { 261 PetscCall(KSPSetDM(ksp, domain_dm[i])); 262 PetscCall(KSPSetDMActive(ksp, PETSC_FALSE)); 263 PetscCall(DMDestroy(&domain_dm[i])); 264 } 265 osm->ksp[i] = ksp; 266 } 267 if (domain_dm) PetscCall(PetscFree(domain_dm)); 268 } 269 270 PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis)); 271 PetscCall(ISSortRemoveDups(osm->lis)); 272 PetscCall(ISGetLocalSize(osm->lis, &m)); 273 274 scall = MAT_INITIAL_MATRIX; 275 } else { 276 /* 277 Destroy the blocks from the previous iteration 278 */ 279 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 280 PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp)); 281 PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat)); 282 scall = MAT_INITIAL_MATRIX; 283 } 284 } 285 286 /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */ 287 if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) { 288 PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp)); 289 if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat)); 290 scall = MAT_INITIAL_MATRIX; 291 } 292 293 /* 294 Extract out the submatrices 295 */ 296 PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat)); 297 if (scall == MAT_INITIAL_MATRIX) { 298 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix)); 299 for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix)); 300 if (nullsp) PetscCall(MatRestoreNullSpaces(osm->n_local_true, osm->pmat, &nullsp)); 301 } 302 303 /* Convert the types of the submatrices (if needbe) */ 304 if (osm->sub_mat_type) { 305 for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &osm->pmat[i])); 306 } 307 308 if (!pc->setupcalled) { 309 VecType vtype; 310 311 /* Create the local work vectors (from the local matrices) and scatter contexts */ 312 PetscCall(MatCreateVecs(pc->pmat, &vec, NULL)); 313 314 PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain"); 315 if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation)); 316 PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction)); 317 PetscCall(PetscMalloc1(osm->n_local_true, &osm->x)); 318 PetscCall(PetscMalloc1(osm->n_local_true, &osm->y)); 319 320 PetscCall(ISGetLocalSize(osm->lis, &m)); 321 PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl)); 322 PetscCall(MatGetVecType(osm->pmat[0], &vtype)); 323 PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx)); 324 PetscCall(VecSetSizes(osm->lx, m, m)); 325 PetscCall(VecSetType(osm->lx, vtype)); 326 PetscCall(VecDuplicate(osm->lx, &osm->ly)); 327 PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction)); 328 PetscCall(ISDestroy(&isl)); 329 330 for (i = 0; i < osm->n_local_true; ++i) { 331 ISLocalToGlobalMapping ltog; 332 IS isll; 333 const PetscInt *idx_is; 334 PetscInt *idx_lis, nout; 335 336 PetscCall(ISGetLocalSize(osm->is[i], &m)); 337 PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL)); 338 PetscCall(VecDuplicate(osm->x[i], &osm->y[i])); 339 340 /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */ 341 PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, <og)); 342 PetscCall(ISGetLocalSize(osm->is[i], &m)); 343 PetscCall(ISGetIndices(osm->is[i], &idx_is)); 344 PetscCall(PetscMalloc1(m, &idx_lis)); 345 PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis)); 346 PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis"); 347 PetscCall(ISRestoreIndices(osm->is[i], &idx_is)); 348 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll)); 349 PetscCall(ISLocalToGlobalMappingDestroy(<og)); 350 PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl)); 351 PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i])); 352 PetscCall(ISDestroy(&isll)); 353 PetscCall(ISDestroy(&isl)); 354 if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the non-overlapping is_local[i] entries */ 355 ISLocalToGlobalMapping ltog; 356 IS isll, isll_local; 357 const PetscInt *idx_local; 358 PetscInt *idx1, *idx2, nout; 359 360 PetscCall(ISGetLocalSize(osm->is_local[i], &m_local)); 361 PetscCall(ISGetIndices(osm->is_local[i], &idx_local)); 362 363 PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], <og)); 364 PetscCall(PetscMalloc1(m_local, &idx1)); 365 PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1)); 366 PetscCall(ISLocalToGlobalMappingDestroy(<og)); 367 PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is"); 368 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll)); 369 370 PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, <og)); 371 PetscCall(PetscMalloc1(m_local, &idx2)); 372 PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2)); 373 PetscCall(ISLocalToGlobalMappingDestroy(<og)); 374 PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis"); 375 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local)); 376 377 PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local)); 378 PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i])); 379 380 PetscCall(ISDestroy(&isll)); 381 PetscCall(ISDestroy(&isll_local)); 382 } 383 } 384 PetscCall(VecDestroy(&vec)); 385 } 386 387 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) { 388 IS *cis; 389 PetscInt c; 390 391 PetscCall(PetscMalloc1(osm->n_local_true, &cis)); 392 for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis; 393 PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats)); 394 PetscCall(PetscFree(cis)); 395 } 396 397 /* Return control to the user so that the submatrices can be modified (e.g., to apply 398 different boundary conditions for the submatrices than for the global problem) */ 399 PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP)); 400 401 /* 402 Loop over subdomains putting them into local ksp 403 */ 404 PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix)); 405 for (i = 0; i < osm->n_local_true; i++) { 406 PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i])); 407 PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix)); 408 if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i])); 409 } 410 PetscFunctionReturn(PETSC_SUCCESS); 411 } 412 413 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc) 414 { 415 PC_ASM *osm = (PC_ASM *)pc->data; 416 PetscInt i; 417 KSPConvergedReason reason; 418 419 PetscFunctionBegin; 420 for (i = 0; i < osm->n_local_true; i++) { 421 PetscCall(KSPSetUp(osm->ksp[i])); 422 PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason)); 423 if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR; 424 } 425 PetscFunctionReturn(PETSC_SUCCESS); 426 } 427 428 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y) 429 { 430 PC_ASM *osm = (PC_ASM *)pc->data; 431 PetscInt i, n_local_true = osm->n_local_true; 432 ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE; 433 434 PetscFunctionBegin; 435 /* 436 support for limiting the restriction or interpolation to only local 437 subdomain values (leaving the other values 0). 438 */ 439 if (!(osm->type & PC_ASM_RESTRICT)) { 440 forward = SCATTER_FORWARD_LOCAL; 441 /* have to zero the work RHS since scatter may leave some slots empty */ 442 PetscCall(VecSet(osm->lx, 0.0)); 443 } 444 if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL; 445 446 PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]); 447 /* zero the global and the local solutions */ 448 PetscCall(VecSet(y, 0.0)); 449 PetscCall(VecSet(osm->ly, 0.0)); 450 451 /* copy the global RHS to local RHS including the ghost nodes */ 452 PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 453 PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 454 455 /* restrict local RHS to the overlapping 0-block RHS */ 456 PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 457 PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 458 459 /* do the local solves */ 460 for (i = 0; i < n_local_true; ++i) { 461 /* solve the overlapping i-block */ 462 PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 463 PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i])); 464 PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i])); 465 PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 466 467 if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */ 468 PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 469 PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 470 } else { /* interpolate the overlapping i-block solution to the local solution */ 471 PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 472 PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 473 } 474 475 if (i < n_local_true - 1) { 476 /* restrict local RHS to the overlapping (i+1)-block RHS */ 477 PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 478 PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 479 480 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) { 481 /* update the overlapping (i+1)-block RHS using the current local solution */ 482 PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1])); 483 PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1])); 484 } 485 } 486 } 487 /* add the local solution to the global solution including the ghost nodes */ 488 PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 489 PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 490 PetscFunctionReturn(PETSC_SUCCESS); 491 } 492 493 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y) 494 { 495 PC_ASM *osm = (PC_ASM *)pc->data; 496 Mat Z, W; 497 Vec x; 498 PetscInt i, m, N; 499 ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE; 500 501 PetscFunctionBegin; 502 PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented"); 503 /* 504 support for limiting the restriction or interpolation to only local 505 subdomain values (leaving the other values 0). 506 */ 507 if (!(osm->type & PC_ASM_RESTRICT)) { 508 forward = SCATTER_FORWARD_LOCAL; 509 /* have to zero the work RHS since scatter may leave some slots empty */ 510 PetscCall(VecSet(osm->lx, 0.0)); 511 } 512 if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL; 513 PetscCall(VecGetLocalSize(osm->x[0], &m)); 514 PetscCall(MatGetSize(X, NULL, &N)); 515 PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z)); 516 517 PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]); 518 /* zero the global and the local solutions */ 519 PetscCall(MatZeroEntries(Y)); 520 PetscCall(VecSet(osm->ly, 0.0)); 521 522 for (i = 0; i < N; ++i) { 523 PetscCall(MatDenseGetColumnVecRead(X, i, &x)); 524 /* copy the global RHS to local RHS including the ghost nodes */ 525 PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 526 PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 527 PetscCall(MatDenseRestoreColumnVecRead(X, i, &x)); 528 529 PetscCall(MatDenseGetColumnVecWrite(Z, i, &x)); 530 /* restrict local RHS to the overlapping 0-block RHS */ 531 PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward)); 532 PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward)); 533 PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x)); 534 } 535 PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W)); 536 /* solve the overlapping 0-block */ 537 PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0)); 538 PetscCall(KSPMatSolve(osm->ksp[0], Z, W)); 539 PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL)); 540 PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0)); 541 PetscCall(MatDestroy(&Z)); 542 543 for (i = 0; i < N; ++i) { 544 PetscCall(VecSet(osm->ly, 0.0)); 545 PetscCall(MatDenseGetColumnVecRead(W, i, &x)); 546 if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */ 547 PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward)); 548 PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward)); 549 } else { /* interpolate the overlapping 0-block solution to the local solution */ 550 PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse)); 551 PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse)); 552 } 553 PetscCall(MatDenseRestoreColumnVecRead(W, i, &x)); 554 555 PetscCall(MatDenseGetColumnVecWrite(Y, i, &x)); 556 /* add the local solution to the global solution including the ghost nodes */ 557 PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse)); 558 PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse)); 559 PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x)); 560 } 561 PetscCall(MatDestroy(&W)); 562 PetscFunctionReturn(PETSC_SUCCESS); 563 } 564 565 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y) 566 { 567 PC_ASM *osm = (PC_ASM *)pc->data; 568 PetscInt i, n_local_true = osm->n_local_true; 569 ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE; 570 571 PetscFunctionBegin; 572 /* 573 Support for limiting the restriction or interpolation to only local 574 subdomain values (leaving the other values 0). 575 576 Note: these are reversed from the PCApply_ASM() because we are applying the 577 transpose of the three terms 578 */ 579 580 if (!(osm->type & PC_ASM_INTERPOLATE)) { 581 forward = SCATTER_FORWARD_LOCAL; 582 /* have to zero the work RHS since scatter may leave some slots empty */ 583 PetscCall(VecSet(osm->lx, 0.0)); 584 } 585 if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL; 586 587 /* zero the global and the local solutions */ 588 PetscCall(VecSet(y, 0.0)); 589 PetscCall(VecSet(osm->ly, 0.0)); 590 591 /* Copy the global RHS to local RHS including the ghost nodes */ 592 PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 593 PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 594 595 /* Restrict local RHS to the overlapping 0-block RHS */ 596 PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 597 PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 598 599 /* do the local solves */ 600 for (i = 0; i < n_local_true; ++i) { 601 /* solve the overlapping i-block */ 602 PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 603 PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i])); 604 PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i])); 605 PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 606 607 if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */ 608 PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 609 PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 610 } else { /* interpolate the overlapping i-block solution to the local solution */ 611 PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 612 PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 613 } 614 615 if (i < n_local_true - 1) { 616 /* Restrict local RHS to the overlapping (i+1)-block RHS */ 617 PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 618 PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 619 } 620 } 621 /* Add the local solution to the global solution including the ghost nodes */ 622 PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 623 PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 624 PetscFunctionReturn(PETSC_SUCCESS); 625 } 626 627 static PetscErrorCode PCReset_ASM(PC pc) 628 { 629 PC_ASM *osm = (PC_ASM *)pc->data; 630 PetscInt i; 631 632 PetscFunctionBegin; 633 if (osm->ksp) { 634 for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i])); 635 } 636 if (osm->pmat) { 637 if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat)); 638 } 639 if (osm->lrestriction) { 640 PetscCall(VecScatterDestroy(&osm->restriction)); 641 for (i = 0; i < osm->n_local_true; i++) { 642 PetscCall(VecScatterDestroy(&osm->lrestriction[i])); 643 if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i])); 644 PetscCall(VecDestroy(&osm->x[i])); 645 PetscCall(VecDestroy(&osm->y[i])); 646 } 647 PetscCall(PetscFree(osm->lrestriction)); 648 if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation)); 649 PetscCall(PetscFree(osm->x)); 650 PetscCall(PetscFree(osm->y)); 651 } 652 PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local)); 653 PetscCall(ISDestroy(&osm->lis)); 654 PetscCall(VecDestroy(&osm->lx)); 655 PetscCall(VecDestroy(&osm->ly)); 656 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats)); 657 658 PetscCall(PetscFree(osm->sub_mat_type)); 659 660 osm->is = NULL; 661 osm->is_local = NULL; 662 PetscFunctionReturn(PETSC_SUCCESS); 663 } 664 665 static PetscErrorCode PCDestroy_ASM(PC pc) 666 { 667 PC_ASM *osm = (PC_ASM *)pc->data; 668 PetscInt i; 669 670 PetscFunctionBegin; 671 PetscCall(PCReset_ASM(pc)); 672 if (osm->ksp) { 673 for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i])); 674 PetscCall(PetscFree(osm->ksp)); 675 } 676 PetscCall(PetscFree(pc->data)); 677 678 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL)); 679 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL)); 680 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL)); 681 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL)); 682 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL)); 683 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL)); 684 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL)); 685 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL)); 686 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL)); 687 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL)); 688 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL)); 689 PetscFunctionReturn(PETSC_SUCCESS); 690 } 691 692 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject) 693 { 694 PC_ASM *osm = (PC_ASM *)pc->data; 695 PetscInt blocks, ovl; 696 PetscBool flg; 697 PCASMType asmtype; 698 PCCompositeType loctype; 699 char sub_mat_type[256]; 700 701 PetscFunctionBegin; 702 PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options"); 703 PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg)); 704 PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg)); 705 if (flg) { 706 PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL)); 707 osm->dm_subdomains = PETSC_FALSE; 708 } 709 PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg)); 710 if (flg) { 711 PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL)); 712 osm->dm_subdomains = PETSC_FALSE; 713 } 714 PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg)); 715 if (flg) { 716 PetscCall(PCASMSetOverlap(pc, ovl)); 717 osm->dm_subdomains = PETSC_FALSE; 718 } 719 flg = PETSC_FALSE; 720 PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg)); 721 if (flg) PetscCall(PCASMSetType(pc, asmtype)); 722 flg = PETSC_FALSE; 723 PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg)); 724 if (flg) PetscCall(PCASMSetLocalType(pc, loctype)); 725 PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg)); 726 if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type)); 727 PetscOptionsHeadEnd(); 728 PetscFunctionReturn(PETSC_SUCCESS); 729 } 730 731 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[]) 732 { 733 PC_ASM *osm = (PC_ASM *)pc->data; 734 PetscInt i; 735 736 PetscFunctionBegin; 737 PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n); 738 PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp()."); 739 740 if (!pc->setupcalled) { 741 if (is) { 742 for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i])); 743 } 744 if (is_local) { 745 for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i])); 746 } 747 PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local)); 748 749 if (osm->ksp && osm->n_local_true != n) { 750 for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i])); 751 PetscCall(PetscFree(osm->ksp)); 752 } 753 754 osm->n_local_true = n; 755 osm->is = NULL; 756 osm->is_local = NULL; 757 if (is) { 758 PetscCall(PetscMalloc1(n, &osm->is)); 759 for (i = 0; i < n; i++) osm->is[i] = is[i]; 760 /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */ 761 osm->overlap = -1; 762 } 763 if (is_local) { 764 PetscCall(PetscMalloc1(n, &osm->is_local)); 765 for (i = 0; i < n; i++) osm->is_local[i] = is_local[i]; 766 if (!is) { 767 PetscCall(PetscMalloc1(osm->n_local_true, &osm->is)); 768 for (i = 0; i < osm->n_local_true; i++) { 769 if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */ 770 PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i])); 771 PetscCall(ISCopy(osm->is_local[i], osm->is[i])); 772 } else { 773 PetscCall(PetscObjectReference((PetscObject)osm->is_local[i])); 774 osm->is[i] = osm->is_local[i]; 775 } 776 } 777 } 778 } 779 } 780 PetscFunctionReturn(PETSC_SUCCESS); 781 } 782 783 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local) 784 { 785 PC_ASM *osm = (PC_ASM *)pc->data; 786 PetscMPIInt rank, size; 787 PetscInt n; 788 789 PetscFunctionBegin; 790 PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N); 791 PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets, they cannot be set globally yet."); 792 793 /* 794 Split the subdomains equally among all processors 795 */ 796 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 797 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 798 n = N / size + ((N % size) > rank); 799 PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N); 800 PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp()."); 801 if (!pc->setupcalled) { 802 PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local)); 803 804 osm->n_local_true = n; 805 osm->is = NULL; 806 osm->is_local = NULL; 807 } 808 PetscFunctionReturn(PETSC_SUCCESS); 809 } 810 811 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl) 812 { 813 PC_ASM *osm = (PC_ASM *)pc->data; 814 815 PetscFunctionBegin; 816 PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested"); 817 PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp()."); 818 if (!pc->setupcalled) osm->overlap = ovl; 819 PetscFunctionReturn(PETSC_SUCCESS); 820 } 821 822 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type) 823 { 824 PC_ASM *osm = (PC_ASM *)pc->data; 825 826 PetscFunctionBegin; 827 osm->type = type; 828 osm->type_set = PETSC_TRUE; 829 PetscFunctionReturn(PETSC_SUCCESS); 830 } 831 832 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type) 833 { 834 PC_ASM *osm = (PC_ASM *)pc->data; 835 836 PetscFunctionBegin; 837 *type = osm->type; 838 PetscFunctionReturn(PETSC_SUCCESS); 839 } 840 841 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type) 842 { 843 PC_ASM *osm = (PC_ASM *)pc->data; 844 845 PetscFunctionBegin; 846 PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type"); 847 osm->loctype = type; 848 PetscFunctionReturn(PETSC_SUCCESS); 849 } 850 851 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type) 852 { 853 PC_ASM *osm = (PC_ASM *)pc->data; 854 855 PetscFunctionBegin; 856 *type = osm->loctype; 857 PetscFunctionReturn(PETSC_SUCCESS); 858 } 859 860 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort) 861 { 862 PC_ASM *osm = (PC_ASM *)pc->data; 863 864 PetscFunctionBegin; 865 osm->sort_indices = doSort; 866 PetscFunctionReturn(PETSC_SUCCESS); 867 } 868 869 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp) 870 { 871 PC_ASM *osm = (PC_ASM *)pc->data; 872 873 PetscFunctionBegin; 874 PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here"); 875 876 if (n_local) *n_local = osm->n_local_true; 877 if (first_local) { 878 PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc))); 879 *first_local -= osm->n_local_true; 880 } 881 if (ksp) *ksp = osm->ksp; 882 PetscFunctionReturn(PETSC_SUCCESS); 883 } 884 885 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type) 886 { 887 PC_ASM *osm = (PC_ASM *)pc->data; 888 889 PetscFunctionBegin; 890 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 891 PetscAssertPointer(sub_mat_type, 2); 892 *sub_mat_type = osm->sub_mat_type; 893 PetscFunctionReturn(PETSC_SUCCESS); 894 } 895 896 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type) 897 { 898 PC_ASM *osm = (PC_ASM *)pc->data; 899 900 PetscFunctionBegin; 901 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 902 PetscCall(PetscFree(osm->sub_mat_type)); 903 PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type)); 904 PetscFunctionReturn(PETSC_SUCCESS); 905 } 906 907 /*@C 908 PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`. 909 910 Collective 911 912 Input Parameters: 913 + pc - the preconditioner context 914 . n - the number of subdomains for this processor (default value = 1) 915 . is - the index set that defines the subdomains for this processor (or `NULL` for PETSc to determine subdomains) 916 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT 917 (or `NULL` to not provide these) 918 919 Options Database Key: 920 . -pc_asm_local_blocks <blks> - Sets number of local blocks 921 922 Level: advanced 923 924 Notes: 925 The `IS` numbering is in the parallel, global numbering of the vector for both `is` and `is_local` 926 927 By default the `PCASM` preconditioner uses 1 block per processor. 928 929 Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors. 930 931 If `is_local` is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the `is_local` region is interpolated 932 back to form the global solution (this is the standard restricted additive Schwarz method, RASM) 933 934 If `is_local` is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is 935 no code to handle that case. 936 937 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 938 `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM` 939 @*/ 940 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[]) 941 { 942 PetscFunctionBegin; 943 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 944 PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local)); 945 PetscFunctionReturn(PETSC_SUCCESS); 946 } 947 948 /*@C 949 PCASMSetTotalSubdomains - Sets the subdomains for all processors for the 950 additive Schwarz preconditioner, `PCASM`. 951 952 Collective, all MPI ranks must pass in the same array of `IS` 953 954 Input Parameters: 955 + pc - the preconditioner context 956 . N - the number of subdomains for all processors 957 . is - the index sets that define the subdomains for all processors (or `NULL` to ask PETSc to determine the subdomains) 958 - is_local - the index sets that define the local part of the subdomains for this processor (or `NULL` to not provide this information) 959 960 Options Database Key: 961 . -pc_asm_blocks <blks> - Sets total blocks 962 963 Level: advanced 964 965 Notes: 966 Currently you cannot use this to set the actual subdomains with the argument `is` or `is_local`. 967 968 By default the `PCASM` preconditioner uses 1 block per processor. 969 970 These index sets cannot be destroyed until after completion of the 971 linear solves for which the `PCASM` preconditioner is being used. 972 973 Use `PCASMSetLocalSubdomains()` to set local subdomains. 974 975 The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local 976 977 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 978 `PCASMCreateSubdomains2D()`, `PCGASM` 979 @*/ 980 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[]) 981 { 982 PetscFunctionBegin; 983 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 984 PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local)); 985 PetscFunctionReturn(PETSC_SUCCESS); 986 } 987 988 /*@ 989 PCASMSetOverlap - Sets the overlap between a pair of subdomains for the 990 additive Schwarz preconditioner, `PCASM`. 991 992 Logically Collective 993 994 Input Parameters: 995 + pc - the preconditioner context 996 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1) 997 998 Options Database Key: 999 . -pc_asm_overlap <ovl> - Sets overlap 1000 1001 Level: intermediate 1002 1003 Notes: 1004 By default the `PCASM` preconditioner uses 1 block per processor. To use 1005 multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and 1006 `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>). 1007 1008 The overlap defaults to 1, so if one desires that no additional 1009 overlap be computed beyond what may have been set with a call to 1010 `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl 1011 must be set to be 0. In particular, if one does not explicitly set 1012 the subdomains an application code, then all overlap would be computed 1013 internally by PETSc, and using an overlap of 0 would result in an `PCASM` 1014 variant that is equivalent to the block Jacobi preconditioner. 1015 1016 The default algorithm used by PETSc to increase overlap is fast, but not scalable, 1017 use the option -mat_increase_overlap_scalable when the problem and number of processes is large. 1018 1019 One can define initial index sets with any overlap via 1020 `PCASMSetLocalSubdomains()`; the routine 1021 `PCASMSetOverlap()` merely allows PETSc to extend that overlap further 1022 if desired. 1023 1024 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`, 1025 `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM` 1026 @*/ 1027 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl) 1028 { 1029 PetscFunctionBegin; 1030 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1031 PetscValidLogicalCollectiveInt(pc, ovl, 2); 1032 PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl)); 1033 PetscFunctionReturn(PETSC_SUCCESS); 1034 } 1035 1036 /*@ 1037 PCASMSetType - Sets the type of restriction and interpolation used 1038 for local problems in the additive Schwarz method, `PCASM`. 1039 1040 Logically Collective 1041 1042 Input Parameters: 1043 + pc - the preconditioner context 1044 - type - variant of `PCASM`, one of 1045 .vb 1046 PC_ASM_BASIC - full interpolation and restriction 1047 PC_ASM_RESTRICT - full restriction, local processor interpolation (default) 1048 PC_ASM_INTERPOLATE - full interpolation, local processor restriction 1049 PC_ASM_NONE - local processor restriction and interpolation 1050 .ve 1051 1052 Options Database Key: 1053 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType` 1054 1055 Level: intermediate 1056 1057 Note: 1058 if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected 1059 to limit the local processor interpolation 1060 1061 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, 1062 `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM` 1063 @*/ 1064 PetscErrorCode PCASMSetType(PC pc, PCASMType type) 1065 { 1066 PetscFunctionBegin; 1067 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1068 PetscValidLogicalCollectiveEnum(pc, type, 2); 1069 PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type)); 1070 PetscFunctionReturn(PETSC_SUCCESS); 1071 } 1072 1073 /*@ 1074 PCASMGetType - Gets the type of restriction and interpolation used 1075 for local problems in the additive Schwarz method, `PCASM`. 1076 1077 Logically Collective 1078 1079 Input Parameter: 1080 . pc - the preconditioner context 1081 1082 Output Parameter: 1083 . type - variant of `PCASM`, one of 1084 .vb 1085 PC_ASM_BASIC - full interpolation and restriction 1086 PC_ASM_RESTRICT - full restriction, local processor interpolation 1087 PC_ASM_INTERPOLATE - full interpolation, local processor restriction 1088 PC_ASM_NONE - local processor restriction and interpolation 1089 .ve 1090 1091 Options Database Key: 1092 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type 1093 1094 Level: intermediate 1095 1096 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`, 1097 `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()` 1098 @*/ 1099 PetscErrorCode PCASMGetType(PC pc, PCASMType *type) 1100 { 1101 PetscFunctionBegin; 1102 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1103 PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type)); 1104 PetscFunctionReturn(PETSC_SUCCESS); 1105 } 1106 1107 /*@ 1108 PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`. 1109 1110 Logically Collective 1111 1112 Input Parameters: 1113 + pc - the preconditioner context 1114 - type - type of composition, one of 1115 .vb 1116 PC_COMPOSITE_ADDITIVE - local additive combination 1117 PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination 1118 .ve 1119 1120 Options Database Key: 1121 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type 1122 1123 Level: intermediate 1124 1125 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType` 1126 @*/ 1127 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type) 1128 { 1129 PetscFunctionBegin; 1130 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1131 PetscValidLogicalCollectiveEnum(pc, type, 2); 1132 PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type)); 1133 PetscFunctionReturn(PETSC_SUCCESS); 1134 } 1135 1136 /*@ 1137 PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`. 1138 1139 Logically Collective 1140 1141 Input Parameter: 1142 . pc - the preconditioner context 1143 1144 Output Parameter: 1145 . type - type of composition, one of 1146 .vb 1147 PC_COMPOSITE_ADDITIVE - local additive combination 1148 PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination 1149 .ve 1150 1151 Options Database Key: 1152 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type 1153 1154 Level: intermediate 1155 1156 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCCompositeType` 1157 @*/ 1158 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type) 1159 { 1160 PetscFunctionBegin; 1161 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1162 PetscAssertPointer(type, 2); 1163 PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type)); 1164 PetscFunctionReturn(PETSC_SUCCESS); 1165 } 1166 1167 /*@ 1168 PCASMSetSortIndices - Determines whether subdomain indices are sorted. 1169 1170 Logically Collective 1171 1172 Input Parameters: 1173 + pc - the preconditioner context 1174 - doSort - sort the subdomain indices 1175 1176 Level: intermediate 1177 1178 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, 1179 `PCASMCreateSubdomains2D()` 1180 @*/ 1181 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort) 1182 { 1183 PetscFunctionBegin; 1184 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1185 PetscValidLogicalCollectiveBool(pc, doSort, 2); 1186 PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort)); 1187 PetscFunctionReturn(PETSC_SUCCESS); 1188 } 1189 1190 /*@C 1191 PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on 1192 this processor. 1193 1194 Collective iff first_local is requested 1195 1196 Input Parameter: 1197 . pc - the preconditioner context 1198 1199 Output Parameters: 1200 + n_local - the number of blocks on this processor or `NULL` 1201 . first_local - the global number of the first block on this processor or `NULL`, all processors must request or all must pass `NULL` 1202 - ksp - the array of `KSP` contexts 1203 1204 Level: advanced 1205 1206 Notes: 1207 After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed. 1208 1209 You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`. 1210 1211 Fortran Notes: 1212 The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length. 1213 1214 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, 1215 `PCASMCreateSubdomains2D()`, 1216 @*/ 1217 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[]) 1218 { 1219 PetscFunctionBegin; 1220 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1221 PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp)); 1222 PetscFunctionReturn(PETSC_SUCCESS); 1223 } 1224 1225 /*MC 1226 PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with 1227 its own `KSP` object, {cite}`dryja1987additive` and {cite}`1sbg` 1228 1229 Options Database Keys: 1230 + -pc_asm_blocks <blks> - Sets total blocks. Defaults to one block per MPI process. 1231 . -pc_asm_overlap <ovl> - Sets overlap 1232 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()` 1233 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()` 1234 - -pc_asm_local_type [additive, multiplicative] - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()` 1235 1236 Level: beginner 1237 1238 Notes: 1239 If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you 1240 will get a different convergence rate due to the default option of `-pc_asm_type restrict`. Use 1241 `-pc_asm_type basic` to get the same convergence behavior 1242 1243 Each processor can have one or more blocks, but a block cannot be shared by more 1244 than one processor. Use `PCGASM` for subdomains shared by multiple processes. 1245 1246 To set options on the solvers for each block append `-sub_` to all the `KSP`, and `PC` 1247 options database keys. For example, `-sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly` 1248 1249 To set the options on the solvers separate for each block call `PCASMGetSubKSP()` 1250 and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`) 1251 1252 If the `PC` has an associated `DM`, then, by default, `DMCreateDomainDecomposition()` is used to create the subdomains 1253 1254 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`, 1255 `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()` 1256 `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType` 1257 M*/ 1258 1259 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc) 1260 { 1261 PC_ASM *osm; 1262 1263 PetscFunctionBegin; 1264 PetscCall(PetscNew(&osm)); 1265 1266 osm->n = PETSC_DECIDE; 1267 osm->n_local = 0; 1268 osm->n_local_true = PETSC_DECIDE; 1269 osm->overlap = 1; 1270 osm->ksp = NULL; 1271 osm->restriction = NULL; 1272 osm->lprolongation = NULL; 1273 osm->lrestriction = NULL; 1274 osm->x = NULL; 1275 osm->y = NULL; 1276 osm->is = NULL; 1277 osm->is_local = NULL; 1278 osm->mat = NULL; 1279 osm->pmat = NULL; 1280 osm->type = PC_ASM_RESTRICT; 1281 osm->loctype = PC_COMPOSITE_ADDITIVE; 1282 osm->sort_indices = PETSC_TRUE; 1283 osm->dm_subdomains = PETSC_FALSE; 1284 osm->sub_mat_type = NULL; 1285 1286 pc->data = (void *)osm; 1287 pc->ops->apply = PCApply_ASM; 1288 pc->ops->matapply = PCMatApply_ASM; 1289 pc->ops->applytranspose = PCApplyTranspose_ASM; 1290 pc->ops->setup = PCSetUp_ASM; 1291 pc->ops->reset = PCReset_ASM; 1292 pc->ops->destroy = PCDestroy_ASM; 1293 pc->ops->setfromoptions = PCSetFromOptions_ASM; 1294 pc->ops->setuponblocks = PCSetUpOnBlocks_ASM; 1295 pc->ops->view = PCView_ASM; 1296 pc->ops->applyrichardson = NULL; 1297 1298 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM)); 1299 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM)); 1300 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM)); 1301 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM)); 1302 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM)); 1303 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM)); 1304 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM)); 1305 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM)); 1306 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM)); 1307 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM)); 1308 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM)); 1309 PetscFunctionReturn(PETSC_SUCCESS); 1310 } 1311 1312 /*@C 1313 PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz 1314 preconditioner, `PCASM`, for any problem on a general grid. 1315 1316 Collective 1317 1318 Input Parameters: 1319 + A - The global matrix operator 1320 - n - the number of local blocks 1321 1322 Output Parameter: 1323 . outis - the array of index sets defining the subdomains 1324 1325 Level: advanced 1326 1327 Note: 1328 This generates nonoverlapping subdomains; the `PCASM` will generate the overlap 1329 from these if you use `PCASMSetLocalSubdomains()` 1330 1331 Fortran Notes: 1332 You must provide the array outis[] already allocated of length n. 1333 1334 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()` 1335 @*/ 1336 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[]) 1337 { 1338 MatPartitioning mpart; 1339 const char *prefix; 1340 PetscInt i, j, rstart, rend, bs; 1341 PetscBool hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE; 1342 Mat Ad = NULL, adj; 1343 IS ispart, isnumb, *is; 1344 1345 PetscFunctionBegin; 1346 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1347 PetscAssertPointer(outis, 3); 1348 PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n); 1349 1350 /* Get prefix, row distribution, and block size */ 1351 PetscCall(MatGetOptionsPrefix(A, &prefix)); 1352 PetscCall(MatGetOwnershipRange(A, &rstart, &rend)); 1353 PetscCall(MatGetBlockSize(A, &bs)); 1354 PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs); 1355 1356 /* Get diagonal block from matrix if possible */ 1357 PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop)); 1358 if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad)); 1359 if (Ad) { 1360 PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij)); 1361 if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij)); 1362 } 1363 if (Ad && n > 1) { 1364 PetscBool match, done; 1365 /* Try to setup a good matrix partitioning if available */ 1366 PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart)); 1367 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix)); 1368 PetscCall(MatPartitioningSetFromOptions(mpart)); 1369 PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match)); 1370 if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match)); 1371 if (!match) { /* assume a "good" partitioner is available */ 1372 PetscInt na; 1373 const PetscInt *ia, *ja; 1374 PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done)); 1375 if (done) { 1376 /* Build adjacency matrix by hand. Unfortunately a call to 1377 MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will 1378 remove the block-aij structure and we cannot expect 1379 MatPartitioning to split vertices as we need */ 1380 PetscInt i, j, len, nnz, cnt, *iia = NULL, *jja = NULL; 1381 const PetscInt *row; 1382 nnz = 0; 1383 for (i = 0; i < na; i++) { /* count number of nonzeros */ 1384 len = ia[i + 1] - ia[i]; 1385 row = ja + ia[i]; 1386 for (j = 0; j < len; j++) { 1387 if (row[j] == i) { /* don't count diagonal */ 1388 len--; 1389 break; 1390 } 1391 } 1392 nnz += len; 1393 } 1394 PetscCall(PetscMalloc1(na + 1, &iia)); 1395 PetscCall(PetscMalloc1(nnz, &jja)); 1396 nnz = 0; 1397 iia[0] = 0; 1398 for (i = 0; i < na; i++) { /* fill adjacency */ 1399 cnt = 0; 1400 len = ia[i + 1] - ia[i]; 1401 row = ja + ia[i]; 1402 for (j = 0; j < len; j++) { 1403 if (row[j] != i) { /* if not diagonal */ 1404 jja[nnz + cnt++] = row[j]; 1405 } 1406 } 1407 nnz += cnt; 1408 iia[i + 1] = nnz; 1409 } 1410 /* Partitioning of the adjacency matrix */ 1411 PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj)); 1412 PetscCall(MatPartitioningSetAdjacency(mpart, adj)); 1413 PetscCall(MatPartitioningSetNParts(mpart, n)); 1414 PetscCall(MatPartitioningApply(mpart, &ispart)); 1415 PetscCall(ISPartitioningToNumbering(ispart, &isnumb)); 1416 PetscCall(MatDestroy(&adj)); 1417 foundpart = PETSC_TRUE; 1418 } 1419 PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done)); 1420 } 1421 PetscCall(MatPartitioningDestroy(&mpart)); 1422 } 1423 1424 PetscCall(PetscMalloc1(n, &is)); 1425 *outis = is; 1426 1427 if (!foundpart) { 1428 /* Partitioning by contiguous chunks of rows */ 1429 1430 PetscInt mbs = (rend - rstart) / bs; 1431 PetscInt start = rstart; 1432 for (i = 0; i < n; i++) { 1433 PetscInt count = (mbs / n + ((mbs % n) > i)) * bs; 1434 PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i])); 1435 start += count; 1436 } 1437 1438 } else { 1439 /* Partitioning by adjacency of diagonal block */ 1440 1441 const PetscInt *numbering; 1442 PetscInt *count, nidx, *indices, *newidx, start = 0; 1443 /* Get node count in each partition */ 1444 PetscCall(PetscMalloc1(n, &count)); 1445 PetscCall(ISPartitioningCount(ispart, n, count)); 1446 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1447 for (i = 0; i < n; i++) count[i] *= bs; 1448 } 1449 /* Build indices from node numbering */ 1450 PetscCall(ISGetLocalSize(isnumb, &nidx)); 1451 PetscCall(PetscMalloc1(nidx, &indices)); 1452 for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */ 1453 PetscCall(ISGetIndices(isnumb, &numbering)); 1454 PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices)); 1455 PetscCall(ISRestoreIndices(isnumb, &numbering)); 1456 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1457 PetscCall(PetscMalloc1(nidx * bs, &newidx)); 1458 for (i = 0; i < nidx; i++) { 1459 for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j; 1460 } 1461 PetscCall(PetscFree(indices)); 1462 nidx *= bs; 1463 indices = newidx; 1464 } 1465 /* Shift to get global indices */ 1466 for (i = 0; i < nidx; i++) indices[i] += rstart; 1467 1468 /* Build the index sets for each block */ 1469 for (i = 0; i < n; i++) { 1470 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i])); 1471 PetscCall(ISSort(is[i])); 1472 start += count[i]; 1473 } 1474 1475 PetscCall(PetscFree(count)); 1476 PetscCall(PetscFree(indices)); 1477 PetscCall(ISDestroy(&isnumb)); 1478 PetscCall(ISDestroy(&ispart)); 1479 } 1480 PetscFunctionReturn(PETSC_SUCCESS); 1481 } 1482 1483 /*@C 1484 PCASMDestroySubdomains - Destroys the index sets created with 1485 `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`. 1486 1487 Collective 1488 1489 Input Parameters: 1490 + n - the number of index sets 1491 . is - the array of index sets 1492 - is_local - the array of local index sets, can be `NULL` 1493 1494 Level: advanced 1495 1496 .seealso: [](ch_ksp), `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()` 1497 @*/ 1498 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[]) 1499 { 1500 PetscInt i; 1501 1502 PetscFunctionBegin; 1503 if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS); 1504 if (is) { 1505 PetscAssertPointer(is, 2); 1506 for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i])); 1507 PetscCall(PetscFree(is)); 1508 } 1509 if (is_local) { 1510 PetscAssertPointer(is_local, 3); 1511 for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i])); 1512 PetscCall(PetscFree(is_local)); 1513 } 1514 PetscFunctionReturn(PETSC_SUCCESS); 1515 } 1516 1517 /*@C 1518 PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz 1519 preconditioner, `PCASM`, for a two-dimensional problem on a regular grid. 1520 1521 Not Collective 1522 1523 Input Parameters: 1524 + m - the number of mesh points in the x direction 1525 . n - the number of mesh points in the y direction 1526 . M - the number of subdomains in the x direction 1527 . N - the number of subdomains in the y direction 1528 . dof - degrees of freedom per node 1529 - overlap - overlap in mesh lines 1530 1531 Output Parameters: 1532 + Nsub - the number of subdomains created 1533 . is - array of index sets defining overlapping (if overlap > 0) subdomains 1534 - is_local - array of index sets defining non-overlapping subdomains 1535 1536 Level: advanced 1537 1538 Note: 1539 Presently `PCAMSCreateSubdomains2d()` is valid only for sequential 1540 preconditioners. More general related routines are 1541 `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`. 1542 1543 Fortran Notes: 1544 The `IS` must be declared as an array of length long enough to hold `Nsub` entries 1545 1546 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`, 1547 `PCASMSetOverlap()` 1548 @*/ 1549 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local) 1550 { 1551 PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer; 1552 PetscInt nidx, *idx, loc, ii, jj, count; 1553 1554 PetscFunctionBegin; 1555 PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1"); 1556 1557 *Nsub = N * M; 1558 PetscCall(PetscMalloc1(*Nsub, is)); 1559 PetscCall(PetscMalloc1(*Nsub, is_local)); 1560 ystart = 0; 1561 loc_outer = 0; 1562 for (i = 0; i < N; i++) { 1563 height = n / N + ((n % N) > i); /* height of subdomain */ 1564 PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n"); 1565 yleft = ystart - overlap; 1566 if (yleft < 0) yleft = 0; 1567 yright = ystart + height + overlap; 1568 if (yright > n) yright = n; 1569 xstart = 0; 1570 for (j = 0; j < M; j++) { 1571 width = m / M + ((m % M) > j); /* width of subdomain */ 1572 PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m"); 1573 xleft = xstart - overlap; 1574 if (xleft < 0) xleft = 0; 1575 xright = xstart + width + overlap; 1576 if (xright > m) xright = m; 1577 nidx = (xright - xleft) * (yright - yleft); 1578 PetscCall(PetscMalloc1(nidx, &idx)); 1579 loc = 0; 1580 for (ii = yleft; ii < yright; ii++) { 1581 count = m * ii + xleft; 1582 for (jj = xleft; jj < xright; jj++) idx[loc++] = count++; 1583 } 1584 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer)); 1585 if (overlap == 0) { 1586 PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer])); 1587 1588 (*is_local)[loc_outer] = (*is)[loc_outer]; 1589 } else { 1590 for (loc = 0, ii = ystart; ii < ystart + height; ii++) { 1591 for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj; 1592 } 1593 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer)); 1594 } 1595 PetscCall(PetscFree(idx)); 1596 xstart += width; 1597 loc_outer++; 1598 } 1599 ystart += height; 1600 } 1601 for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i])); 1602 PetscFunctionReturn(PETSC_SUCCESS); 1603 } 1604 1605 /*@C 1606 PCASMGetLocalSubdomains - Gets the local subdomains (for this processor 1607 only) for the additive Schwarz preconditioner, `PCASM`. 1608 1609 Not Collective 1610 1611 Input Parameter: 1612 . pc - the preconditioner context 1613 1614 Output Parameters: 1615 + n - if requested, the number of subdomains for this processor (default value = 1) 1616 . is - if requested, the index sets that define the subdomains for this processor 1617 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`) 1618 1619 Level: advanced 1620 1621 Note: 1622 The `IS` numbering is in the parallel, global numbering of the vector. 1623 1624 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 1625 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()` 1626 @*/ 1627 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[]) 1628 { 1629 PC_ASM *osm = (PC_ASM *)pc->data; 1630 PetscBool match; 1631 1632 PetscFunctionBegin; 1633 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1634 if (n) PetscAssertPointer(n, 2); 1635 if (is) PetscAssertPointer(is, 3); 1636 if (is_local) PetscAssertPointer(is_local, 4); 1637 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1638 PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM"); 1639 if (n) *n = osm->n_local_true; 1640 if (is) *is = osm->is; 1641 if (is_local) *is_local = osm->is_local; 1642 PetscFunctionReturn(PETSC_SUCCESS); 1643 } 1644 1645 /*@C 1646 PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor 1647 only) for the additive Schwarz preconditioner, `PCASM`. 1648 1649 Not Collective 1650 1651 Input Parameter: 1652 . pc - the preconditioner context 1653 1654 Output Parameters: 1655 + n - if requested, the number of matrices for this processor (default value = 1) 1656 - mat - if requested, the matrices 1657 1658 Level: advanced 1659 1660 Notes: 1661 Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`) 1662 1663 Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner. 1664 1665 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 1666 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()` 1667 @*/ 1668 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[]) 1669 { 1670 PC_ASM *osm; 1671 PetscBool match; 1672 1673 PetscFunctionBegin; 1674 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1675 if (n) PetscAssertPointer(n, 2); 1676 if (mat) PetscAssertPointer(mat, 3); 1677 PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp()."); 1678 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1679 if (!match) { 1680 if (n) *n = 0; 1681 if (mat) *mat = NULL; 1682 } else { 1683 osm = (PC_ASM *)pc->data; 1684 if (n) *n = osm->n_local_true; 1685 if (mat) *mat = osm->pmat; 1686 } 1687 PetscFunctionReturn(PETSC_SUCCESS); 1688 } 1689 1690 /*@ 1691 PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible. 1692 1693 Logically Collective 1694 1695 Input Parameters: 1696 + pc - the preconditioner 1697 - flg - boolean indicating whether to use subdomains defined by the `DM` 1698 1699 Options Database Key: 1700 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()` 1701 1702 Level: intermediate 1703 1704 Note: 1705 `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`, 1706 so setting either of the first two effectively turns the latter off. 1707 1708 Developer Note: 1709 This should be `PCASMSetUseDMSubdomains()`, similarly for the options database key 1710 1711 .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()` 1712 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()` 1713 @*/ 1714 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg) 1715 { 1716 PC_ASM *osm = (PC_ASM *)pc->data; 1717 PetscBool match; 1718 1719 PetscFunctionBegin; 1720 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1721 PetscValidLogicalCollectiveBool(pc, flg, 2); 1722 PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC."); 1723 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1724 if (match) osm->dm_subdomains = flg; 1725 PetscFunctionReturn(PETSC_SUCCESS); 1726 } 1727 1728 /*@ 1729 PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible. 1730 1731 Not Collective 1732 1733 Input Parameter: 1734 . pc - the preconditioner 1735 1736 Output Parameter: 1737 . flg - boolean indicating whether to use subdomains defined by the `DM` 1738 1739 Level: intermediate 1740 1741 Developer Note: 1742 This should be `PCASMSetUseDMSubdomains()` 1743 1744 .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()` 1745 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()` 1746 @*/ 1747 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg) 1748 { 1749 PC_ASM *osm = (PC_ASM *)pc->data; 1750 PetscBool match; 1751 1752 PetscFunctionBegin; 1753 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1754 PetscAssertPointer(flg, 2); 1755 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1756 if (match) *flg = osm->dm_subdomains; 1757 else *flg = PETSC_FALSE; 1758 PetscFunctionReturn(PETSC_SUCCESS); 1759 } 1760 1761 /*@C 1762 PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string. 1763 1764 Not Collective 1765 1766 Input Parameter: 1767 . pc - the `PC` 1768 1769 Output Parameter: 1770 . sub_mat_type - name of matrix type 1771 1772 Level: advanced 1773 1774 .seealso: [](ch_ksp), `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat` 1775 @*/ 1776 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type) 1777 { 1778 PetscFunctionBegin; 1779 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1780 PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type)); 1781 PetscFunctionReturn(PETSC_SUCCESS); 1782 } 1783 1784 /*@C 1785 PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves 1786 1787 Collective 1788 1789 Input Parameters: 1790 + pc - the `PC` object 1791 - sub_mat_type - the `MatType` 1792 1793 Options Database Key: 1794 . -pc_asm_sub_mat_type <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. 1795 If you specify a base name like aijviennacl, the corresponding sequential type is assumed. 1796 1797 Note: 1798 See `MatType` for available types 1799 1800 Level: advanced 1801 1802 .seealso: [](ch_ksp), `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat` 1803 @*/ 1804 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type) 1805 { 1806 PetscFunctionBegin; 1807 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1808 PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type)); 1809 PetscFunctionReturn(PETSC_SUCCESS); 1810 } 1811