1 /* 2 This file defines an additive Schwarz preconditioner for any Mat implementation. 3 4 Note that each processor may have any number of subdomains. But in order to 5 deal easily with the VecScatter(), we treat each processor as if it has the 6 same number of subdomains. 7 8 n - total number of true subdomains on all processors 9 n_local_true - actual number of subdomains on this processor 10 n_local = maximum over all processors of n_local_true 11 */ 12 13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/ 14 #include "petsc/private/matimpl.h" 15 16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer) 17 { 18 PC_ASM *osm = (PC_ASM *)pc->data; 19 PetscMPIInt rank; 20 PetscInt i, bsz; 21 PetscBool iascii, isstring; 22 PetscViewer sviewer; 23 PetscViewerFormat format; 24 const char *prefix; 25 26 PetscFunctionBegin; 27 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 28 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring)); 29 if (iascii) { 30 char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set"; 31 if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap)); 32 if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n)); 33 PetscCall(PetscViewerASCIIPrintf(viewer, " %s, %s\n", blocks, overlaps)); 34 PetscCall(PetscViewerASCIIPrintf(viewer, " restriction/interpolation type - %s\n", PCASMTypes[osm->type])); 35 if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, " Additive Schwarz: using DM to define subdomains\n")); 36 if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, " Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype])); 37 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 38 PetscCall(PetscViewerGetFormat(viewer, &format)); 39 if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) { 40 if (osm->ksp) { 41 PetscCall(PetscViewerASCIIPrintf(viewer, " Local solver information for first block is in the following KSP and PC objects on rank 0:\n")); 42 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 43 PetscCall(PetscViewerASCIIPrintf(viewer, " Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : "")); 44 PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 45 if (rank == 0) { 46 PetscCall(PetscViewerASCIIPushTab(viewer)); 47 PetscCall(KSPView(osm->ksp[0], sviewer)); 48 PetscCall(PetscViewerASCIIPopTab(viewer)); 49 } 50 PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 51 } 52 } else { 53 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 54 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true)); 55 PetscCall(PetscViewerFlush(viewer)); 56 PetscCall(PetscViewerASCIIPrintf(viewer, " Local solver information for each block is in the following KSP and PC objects:\n")); 57 PetscCall(PetscViewerASCIIPushTab(viewer)); 58 PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n")); 59 PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 60 for (i = 0; i < osm->n_local_true; i++) { 61 PetscCall(ISGetLocalSize(osm->is[i], &bsz)); 62 PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz)); 63 PetscCall(KSPView(osm->ksp[i], sviewer)); 64 PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n")); 65 } 66 PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 67 PetscCall(PetscViewerASCIIPopTab(viewer)); 68 PetscCall(PetscViewerFlush(viewer)); 69 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 70 } 71 } else if (isstring) { 72 PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type])); 73 PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 74 if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer)); 75 PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer)); 76 } 77 PetscFunctionReturn(PETSC_SUCCESS); 78 } 79 80 static PetscErrorCode PCASMPrintSubdomains(PC pc) 81 { 82 PC_ASM *osm = (PC_ASM *)pc->data; 83 const char *prefix; 84 char fname[PETSC_MAX_PATH_LEN + 1]; 85 PetscViewer viewer, sviewer; 86 char *s; 87 PetscInt i, j, nidx; 88 const PetscInt *idx; 89 PetscMPIInt rank, size; 90 91 PetscFunctionBegin; 92 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 93 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 94 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 95 PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL)); 96 if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname))); 97 PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer)); 98 for (i = 0; i < osm->n_local; i++) { 99 if (i < osm->n_local_true) { 100 PetscCall(ISGetLocalSize(osm->is[i], &nidx)); 101 PetscCall(ISGetIndices(osm->is[i], &idx)); 102 /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/ 103 #define len 16 * (nidx + 1) + 512 104 PetscCall(PetscMalloc1(len, &s)); 105 PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer)); 106 #undef len 107 PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i)); 108 for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j])); 109 PetscCall(ISRestoreIndices(osm->is[i], &idx)); 110 PetscCall(PetscViewerStringSPrintf(sviewer, "\n")); 111 PetscCall(PetscViewerDestroy(&sviewer)); 112 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 113 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s)); 114 PetscCall(PetscViewerFlush(viewer)); 115 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 116 PetscCall(PetscFree(s)); 117 if (osm->is_local) { 118 /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/ 119 #define len 16 * (nidx + 1) + 512 120 PetscCall(PetscMalloc1(len, &s)); 121 PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer)); 122 #undef len 123 PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i)); 124 PetscCall(ISGetLocalSize(osm->is_local[i], &nidx)); 125 PetscCall(ISGetIndices(osm->is_local[i], &idx)); 126 for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j])); 127 PetscCall(ISRestoreIndices(osm->is_local[i], &idx)); 128 PetscCall(PetscViewerStringSPrintf(sviewer, "\n")); 129 PetscCall(PetscViewerDestroy(&sviewer)); 130 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 131 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s)); 132 PetscCall(PetscViewerFlush(viewer)); 133 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 134 PetscCall(PetscFree(s)); 135 } 136 } else { 137 /* Participate in collective viewer calls. */ 138 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 139 PetscCall(PetscViewerFlush(viewer)); 140 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 141 /* Assume either all ranks have is_local or none do. */ 142 if (osm->is_local) { 143 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 144 PetscCall(PetscViewerFlush(viewer)); 145 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 146 } 147 } 148 } 149 PetscCall(PetscViewerFlush(viewer)); 150 PetscCall(PetscViewerDestroy(&viewer)); 151 PetscFunctionReturn(PETSC_SUCCESS); 152 } 153 154 static PetscErrorCode PCSetUp_ASM(PC pc) 155 { 156 PC_ASM *osm = (PC_ASM *)pc->data; 157 PetscBool flg; 158 PetscInt i, m, m_local; 159 MatReuse scall = MAT_REUSE_MATRIX; 160 IS isl; 161 KSP ksp; 162 PC subpc; 163 const char *prefix, *pprefix; 164 Vec vec; 165 DM *domain_dm = NULL; 166 167 PetscFunctionBegin; 168 if (!pc->setupcalled) { 169 PetscInt m; 170 171 /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */ 172 if (osm->n_local_true == PETSC_DECIDE) { 173 /* no subdomains given */ 174 /* try pc->dm first, if allowed */ 175 if (osm->dm_subdomains && pc->dm) { 176 PetscInt num_domains, d; 177 char **domain_names; 178 IS *inner_domain_is, *outer_domain_is; 179 PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm)); 180 osm->overlap = -1; /* We do not want to increase the overlap of the IS. 181 A future improvement of this code might allow one to use 182 DM-defined subdomains and also increase the overlap, 183 but that is not currently supported */ 184 if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is)); 185 for (d = 0; d < num_domains; ++d) { 186 if (domain_names) PetscCall(PetscFree(domain_names[d])); 187 if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d])); 188 if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d])); 189 } 190 PetscCall(PetscFree(domain_names)); 191 PetscCall(PetscFree(inner_domain_is)); 192 PetscCall(PetscFree(outer_domain_is)); 193 } 194 if (osm->n_local_true == PETSC_DECIDE) { 195 /* still no subdomains; use one subdomain per processor */ 196 osm->n_local_true = 1; 197 } 198 } 199 { /* determine the global and max number of subdomains */ 200 struct { 201 PetscInt max, sum; 202 } inwork, outwork; 203 PetscMPIInt size; 204 205 inwork.max = osm->n_local_true; 206 inwork.sum = osm->n_local_true; 207 PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc))); 208 osm->n_local = outwork.max; 209 osm->n = outwork.sum; 210 211 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 212 if (outwork.max == 1 && outwork.sum == size) { 213 /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */ 214 PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE)); 215 } 216 } 217 if (!osm->is) { /* create the index sets */ 218 PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is)); 219 } 220 if (osm->n_local_true > 1 && !osm->is_local) { 221 PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local)); 222 for (i = 0; i < osm->n_local_true; i++) { 223 if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */ 224 PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i])); 225 PetscCall(ISCopy(osm->is[i], osm->is_local[i])); 226 } else { 227 PetscCall(PetscObjectReference((PetscObject)osm->is[i])); 228 osm->is_local[i] = osm->is[i]; 229 } 230 } 231 } 232 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 233 if (osm->overlap > 0) { 234 /* Extend the "overlapping" regions by a number of steps */ 235 PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap)); 236 } 237 if (osm->sort_indices) { 238 for (i = 0; i < osm->n_local_true; i++) { 239 PetscCall(ISSort(osm->is[i])); 240 if (osm->is_local) PetscCall(ISSort(osm->is_local[i])); 241 } 242 } 243 flg = PETSC_FALSE; 244 PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg)); 245 if (flg) PetscCall(PCASMPrintSubdomains(pc)); 246 if (!osm->ksp) { 247 /* Create the local solvers */ 248 PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp)); 249 if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n")); 250 for (i = 0; i < osm->n_local_true; i++) { 251 PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp)); 252 PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure)); 253 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1)); 254 PetscCall(KSPSetType(ksp, KSPPREONLY)); 255 PetscCall(KSPGetPC(ksp, &subpc)); 256 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 257 PetscCall(KSPSetOptionsPrefix(ksp, prefix)); 258 PetscCall(KSPAppendOptionsPrefix(ksp, "sub_")); 259 if (domain_dm) { 260 PetscCall(KSPSetDM(ksp, domain_dm[i])); 261 PetscCall(KSPSetDMActive(ksp, PETSC_FALSE)); 262 PetscCall(DMDestroy(&domain_dm[i])); 263 } 264 osm->ksp[i] = ksp; 265 } 266 if (domain_dm) PetscCall(PetscFree(domain_dm)); 267 } 268 269 PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis)); 270 PetscCall(ISSortRemoveDups(osm->lis)); 271 PetscCall(ISGetLocalSize(osm->lis, &m)); 272 273 scall = MAT_INITIAL_MATRIX; 274 } else { 275 /* 276 Destroy the blocks from the previous iteration 277 */ 278 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 279 PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat)); 280 scall = MAT_INITIAL_MATRIX; 281 } 282 } 283 284 /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */ 285 if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) { 286 if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat)); 287 scall = MAT_INITIAL_MATRIX; 288 } 289 290 /* 291 Extract out the submatrices 292 */ 293 PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat)); 294 if (scall == MAT_INITIAL_MATRIX) { 295 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix)); 296 for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix)); 297 } 298 299 /* Convert the types of the submatrices (if needbe) */ 300 if (osm->sub_mat_type) { 301 for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &(osm->pmat[i]))); 302 } 303 304 if (!pc->setupcalled) { 305 VecType vtype; 306 307 /* Create the local work vectors (from the local matrices) and scatter contexts */ 308 PetscCall(MatCreateVecs(pc->pmat, &vec, NULL)); 309 310 PetscCheck(!osm->is_local || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()"); 311 if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation)); 312 PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction)); 313 PetscCall(PetscMalloc1(osm->n_local_true, &osm->x)); 314 PetscCall(PetscMalloc1(osm->n_local_true, &osm->y)); 315 316 PetscCall(ISGetLocalSize(osm->lis, &m)); 317 PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl)); 318 PetscCall(MatGetVecType(osm->pmat[0], &vtype)); 319 PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx)); 320 PetscCall(VecSetSizes(osm->lx, m, m)); 321 PetscCall(VecSetType(osm->lx, vtype)); 322 PetscCall(VecDuplicate(osm->lx, &osm->ly)); 323 PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction)); 324 PetscCall(ISDestroy(&isl)); 325 326 for (i = 0; i < osm->n_local_true; ++i) { 327 ISLocalToGlobalMapping ltog; 328 IS isll; 329 const PetscInt *idx_is; 330 PetscInt *idx_lis, nout; 331 332 PetscCall(ISGetLocalSize(osm->is[i], &m)); 333 PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL)); 334 PetscCall(VecDuplicate(osm->x[i], &osm->y[i])); 335 336 /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */ 337 PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, <og)); 338 PetscCall(ISGetLocalSize(osm->is[i], &m)); 339 PetscCall(ISGetIndices(osm->is[i], &idx_is)); 340 PetscCall(PetscMalloc1(m, &idx_lis)); 341 PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis)); 342 PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis"); 343 PetscCall(ISRestoreIndices(osm->is[i], &idx_is)); 344 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll)); 345 PetscCall(ISLocalToGlobalMappingDestroy(<og)); 346 PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl)); 347 PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i])); 348 PetscCall(ISDestroy(&isll)); 349 PetscCall(ISDestroy(&isl)); 350 if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */ 351 ISLocalToGlobalMapping ltog; 352 IS isll, isll_local; 353 const PetscInt *idx_local; 354 PetscInt *idx1, *idx2, nout; 355 356 PetscCall(ISGetLocalSize(osm->is_local[i], &m_local)); 357 PetscCall(ISGetIndices(osm->is_local[i], &idx_local)); 358 359 PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], <og)); 360 PetscCall(PetscMalloc1(m_local, &idx1)); 361 PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1)); 362 PetscCall(ISLocalToGlobalMappingDestroy(<og)); 363 PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is"); 364 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll)); 365 366 PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, <og)); 367 PetscCall(PetscMalloc1(m_local, &idx2)); 368 PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2)); 369 PetscCall(ISLocalToGlobalMappingDestroy(<og)); 370 PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis"); 371 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local)); 372 373 PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local)); 374 PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i])); 375 376 PetscCall(ISDestroy(&isll)); 377 PetscCall(ISDestroy(&isll_local)); 378 } 379 } 380 PetscCall(VecDestroy(&vec)); 381 } 382 383 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) { 384 IS *cis; 385 PetscInt c; 386 387 PetscCall(PetscMalloc1(osm->n_local_true, &cis)); 388 for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis; 389 PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats)); 390 PetscCall(PetscFree(cis)); 391 } 392 393 /* Return control to the user so that the submatrices can be modified (e.g., to apply 394 different boundary conditions for the submatrices than for the global problem) */ 395 PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP)); 396 397 /* 398 Loop over subdomains putting them into local ksp 399 */ 400 PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix)); 401 for (i = 0; i < osm->n_local_true; i++) { 402 PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i])); 403 PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix)); 404 if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i])); 405 } 406 PetscFunctionReturn(PETSC_SUCCESS); 407 } 408 409 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc) 410 { 411 PC_ASM *osm = (PC_ASM *)pc->data; 412 PetscInt i; 413 KSPConvergedReason reason; 414 415 PetscFunctionBegin; 416 for (i = 0; i < osm->n_local_true; i++) { 417 PetscCall(KSPSetUp(osm->ksp[i])); 418 PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason)); 419 if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR; 420 } 421 PetscFunctionReturn(PETSC_SUCCESS); 422 } 423 424 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y) 425 { 426 PC_ASM *osm = (PC_ASM *)pc->data; 427 PetscInt i, n_local_true = osm->n_local_true; 428 ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE; 429 430 PetscFunctionBegin; 431 /* 432 support for limiting the restriction or interpolation to only local 433 subdomain values (leaving the other values 0). 434 */ 435 if (!(osm->type & PC_ASM_RESTRICT)) { 436 forward = SCATTER_FORWARD_LOCAL; 437 /* have to zero the work RHS since scatter may leave some slots empty */ 438 PetscCall(VecSet(osm->lx, 0.0)); 439 } 440 if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL; 441 442 PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]); 443 /* zero the global and the local solutions */ 444 PetscCall(VecSet(y, 0.0)); 445 PetscCall(VecSet(osm->ly, 0.0)); 446 447 /* copy the global RHS to local RHS including the ghost nodes */ 448 PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 449 PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 450 451 /* restrict local RHS to the overlapping 0-block RHS */ 452 PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 453 PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 454 455 /* do the local solves */ 456 for (i = 0; i < n_local_true; ++i) { 457 /* solve the overlapping i-block */ 458 PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 459 PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i])); 460 PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i])); 461 PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 462 463 if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */ 464 PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 465 PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 466 } else { /* interpolate the overlapping i-block solution to the local solution */ 467 PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 468 PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 469 } 470 471 if (i < n_local_true - 1) { 472 /* restrict local RHS to the overlapping (i+1)-block RHS */ 473 PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 474 PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 475 476 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) { 477 /* update the overlapping (i+1)-block RHS using the current local solution */ 478 PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1])); 479 PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1])); 480 } 481 } 482 } 483 /* add the local solution to the global solution including the ghost nodes */ 484 PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 485 PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 486 PetscFunctionReturn(PETSC_SUCCESS); 487 } 488 489 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y) 490 { 491 PC_ASM *osm = (PC_ASM *)pc->data; 492 Mat Z, W; 493 Vec x; 494 PetscInt i, m, N; 495 ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE; 496 497 PetscFunctionBegin; 498 PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented"); 499 /* 500 support for limiting the restriction or interpolation to only local 501 subdomain values (leaving the other values 0). 502 */ 503 if (!(osm->type & PC_ASM_RESTRICT)) { 504 forward = SCATTER_FORWARD_LOCAL; 505 /* have to zero the work RHS since scatter may leave some slots empty */ 506 PetscCall(VecSet(osm->lx, 0.0)); 507 } 508 if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL; 509 PetscCall(VecGetLocalSize(osm->x[0], &m)); 510 PetscCall(MatGetSize(X, NULL, &N)); 511 PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z)); 512 513 PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]); 514 /* zero the global and the local solutions */ 515 PetscCall(MatZeroEntries(Y)); 516 PetscCall(VecSet(osm->ly, 0.0)); 517 518 for (i = 0; i < N; ++i) { 519 PetscCall(MatDenseGetColumnVecRead(X, i, &x)); 520 /* copy the global RHS to local RHS including the ghost nodes */ 521 PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 522 PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 523 PetscCall(MatDenseRestoreColumnVecRead(X, i, &x)); 524 525 PetscCall(MatDenseGetColumnVecWrite(Z, i, &x)); 526 /* restrict local RHS to the overlapping 0-block RHS */ 527 PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward)); 528 PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward)); 529 PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x)); 530 } 531 PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W)); 532 /* solve the overlapping 0-block */ 533 PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0)); 534 PetscCall(KSPMatSolve(osm->ksp[0], Z, W)); 535 PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL)); 536 PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0)); 537 PetscCall(MatDestroy(&Z)); 538 539 for (i = 0; i < N; ++i) { 540 PetscCall(VecSet(osm->ly, 0.0)); 541 PetscCall(MatDenseGetColumnVecRead(W, i, &x)); 542 if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */ 543 PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward)); 544 PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward)); 545 } else { /* interpolate the overlapping 0-block solution to the local solution */ 546 PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse)); 547 PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse)); 548 } 549 PetscCall(MatDenseRestoreColumnVecRead(W, i, &x)); 550 551 PetscCall(MatDenseGetColumnVecWrite(Y, i, &x)); 552 /* add the local solution to the global solution including the ghost nodes */ 553 PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse)); 554 PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse)); 555 PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x)); 556 } 557 PetscCall(MatDestroy(&W)); 558 PetscFunctionReturn(PETSC_SUCCESS); 559 } 560 561 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y) 562 { 563 PC_ASM *osm = (PC_ASM *)pc->data; 564 PetscInt i, n_local_true = osm->n_local_true; 565 ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE; 566 567 PetscFunctionBegin; 568 /* 569 Support for limiting the restriction or interpolation to only local 570 subdomain values (leaving the other values 0). 571 572 Note: these are reversed from the PCApply_ASM() because we are applying the 573 transpose of the three terms 574 */ 575 576 if (!(osm->type & PC_ASM_INTERPOLATE)) { 577 forward = SCATTER_FORWARD_LOCAL; 578 /* have to zero the work RHS since scatter may leave some slots empty */ 579 PetscCall(VecSet(osm->lx, 0.0)); 580 } 581 if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL; 582 583 /* zero the global and the local solutions */ 584 PetscCall(VecSet(y, 0.0)); 585 PetscCall(VecSet(osm->ly, 0.0)); 586 587 /* Copy the global RHS to local RHS including the ghost nodes */ 588 PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 589 PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward)); 590 591 /* Restrict local RHS to the overlapping 0-block RHS */ 592 PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 593 PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward)); 594 595 /* do the local solves */ 596 for (i = 0; i < n_local_true; ++i) { 597 /* solve the overlapping i-block */ 598 PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 599 PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i])); 600 PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i])); 601 PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0)); 602 603 if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */ 604 PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 605 PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward)); 606 } else { /* interpolate the overlapping i-block solution to the local solution */ 607 PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 608 PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse)); 609 } 610 611 if (i < n_local_true - 1) { 612 /* Restrict local RHS to the overlapping (i+1)-block RHS */ 613 PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 614 PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward)); 615 } 616 } 617 /* Add the local solution to the global solution including the ghost nodes */ 618 PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 619 PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse)); 620 PetscFunctionReturn(PETSC_SUCCESS); 621 } 622 623 static PetscErrorCode PCReset_ASM(PC pc) 624 { 625 PC_ASM *osm = (PC_ASM *)pc->data; 626 PetscInt i; 627 628 PetscFunctionBegin; 629 if (osm->ksp) { 630 for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i])); 631 } 632 if (osm->pmat) { 633 if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat)); 634 } 635 if (osm->lrestriction) { 636 PetscCall(VecScatterDestroy(&osm->restriction)); 637 for (i = 0; i < osm->n_local_true; i++) { 638 PetscCall(VecScatterDestroy(&osm->lrestriction[i])); 639 if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i])); 640 PetscCall(VecDestroy(&osm->x[i])); 641 PetscCall(VecDestroy(&osm->y[i])); 642 } 643 PetscCall(PetscFree(osm->lrestriction)); 644 if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation)); 645 PetscCall(PetscFree(osm->x)); 646 PetscCall(PetscFree(osm->y)); 647 } 648 PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local)); 649 PetscCall(ISDestroy(&osm->lis)); 650 PetscCall(VecDestroy(&osm->lx)); 651 PetscCall(VecDestroy(&osm->ly)); 652 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats)); 653 654 PetscCall(PetscFree(osm->sub_mat_type)); 655 656 osm->is = NULL; 657 osm->is_local = NULL; 658 PetscFunctionReturn(PETSC_SUCCESS); 659 } 660 661 static PetscErrorCode PCDestroy_ASM(PC pc) 662 { 663 PC_ASM *osm = (PC_ASM *)pc->data; 664 PetscInt i; 665 666 PetscFunctionBegin; 667 PetscCall(PCReset_ASM(pc)); 668 if (osm->ksp) { 669 for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i])); 670 PetscCall(PetscFree(osm->ksp)); 671 } 672 PetscCall(PetscFree(pc->data)); 673 674 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL)); 675 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL)); 676 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL)); 677 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL)); 678 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL)); 679 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL)); 680 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL)); 681 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL)); 682 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL)); 683 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL)); 684 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL)); 685 PetscFunctionReturn(PETSC_SUCCESS); 686 } 687 688 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject) 689 { 690 PC_ASM *osm = (PC_ASM *)pc->data; 691 PetscInt blocks, ovl; 692 PetscBool flg; 693 PCASMType asmtype; 694 PCCompositeType loctype; 695 char sub_mat_type[256]; 696 697 PetscFunctionBegin; 698 PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options"); 699 PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg)); 700 PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg)); 701 if (flg) { 702 PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL)); 703 osm->dm_subdomains = PETSC_FALSE; 704 } 705 PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg)); 706 if (flg) { 707 PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL)); 708 osm->dm_subdomains = PETSC_FALSE; 709 } 710 PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg)); 711 if (flg) { 712 PetscCall(PCASMSetOverlap(pc, ovl)); 713 osm->dm_subdomains = PETSC_FALSE; 714 } 715 flg = PETSC_FALSE; 716 PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg)); 717 if (flg) PetscCall(PCASMSetType(pc, asmtype)); 718 flg = PETSC_FALSE; 719 PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg)); 720 if (flg) PetscCall(PCASMSetLocalType(pc, loctype)); 721 PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg)); 722 if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type)); 723 PetscOptionsHeadEnd(); 724 PetscFunctionReturn(PETSC_SUCCESS); 725 } 726 727 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[]) 728 { 729 PC_ASM *osm = (PC_ASM *)pc->data; 730 PetscInt i; 731 732 PetscFunctionBegin; 733 PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n); 734 PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp()."); 735 736 if (!pc->setupcalled) { 737 if (is) { 738 for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i])); 739 } 740 if (is_local) { 741 for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i])); 742 } 743 PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local)); 744 745 osm->n_local_true = n; 746 osm->is = NULL; 747 osm->is_local = NULL; 748 if (is) { 749 PetscCall(PetscMalloc1(n, &osm->is)); 750 for (i = 0; i < n; i++) osm->is[i] = is[i]; 751 /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */ 752 osm->overlap = -1; 753 } 754 if (is_local) { 755 PetscCall(PetscMalloc1(n, &osm->is_local)); 756 for (i = 0; i < n; i++) osm->is_local[i] = is_local[i]; 757 if (!is) { 758 PetscCall(PetscMalloc1(osm->n_local_true, &osm->is)); 759 for (i = 0; i < osm->n_local_true; i++) { 760 if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */ 761 PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i])); 762 PetscCall(ISCopy(osm->is_local[i], osm->is[i])); 763 } else { 764 PetscCall(PetscObjectReference((PetscObject)osm->is_local[i])); 765 osm->is[i] = osm->is_local[i]; 766 } 767 } 768 } 769 } 770 } 771 PetscFunctionReturn(PETSC_SUCCESS); 772 } 773 774 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local) 775 { 776 PC_ASM *osm = (PC_ASM *)pc->data; 777 PetscMPIInt rank, size; 778 PetscInt n; 779 780 PetscFunctionBegin; 781 PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N); 782 PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet."); 783 784 /* 785 Split the subdomains equally among all processors 786 */ 787 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 788 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 789 n = N / size + ((N % size) > rank); 790 PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N); 791 PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp()."); 792 if (!pc->setupcalled) { 793 PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local)); 794 795 osm->n_local_true = n; 796 osm->is = NULL; 797 osm->is_local = NULL; 798 } 799 PetscFunctionReturn(PETSC_SUCCESS); 800 } 801 802 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl) 803 { 804 PC_ASM *osm = (PC_ASM *)pc->data; 805 806 PetscFunctionBegin; 807 PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested"); 808 PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp()."); 809 if (!pc->setupcalled) osm->overlap = ovl; 810 PetscFunctionReturn(PETSC_SUCCESS); 811 } 812 813 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type) 814 { 815 PC_ASM *osm = (PC_ASM *)pc->data; 816 817 PetscFunctionBegin; 818 osm->type = type; 819 osm->type_set = PETSC_TRUE; 820 PetscFunctionReturn(PETSC_SUCCESS); 821 } 822 823 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type) 824 { 825 PC_ASM *osm = (PC_ASM *)pc->data; 826 827 PetscFunctionBegin; 828 *type = osm->type; 829 PetscFunctionReturn(PETSC_SUCCESS); 830 } 831 832 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type) 833 { 834 PC_ASM *osm = (PC_ASM *)pc->data; 835 836 PetscFunctionBegin; 837 PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type"); 838 osm->loctype = type; 839 PetscFunctionReturn(PETSC_SUCCESS); 840 } 841 842 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type) 843 { 844 PC_ASM *osm = (PC_ASM *)pc->data; 845 846 PetscFunctionBegin; 847 *type = osm->loctype; 848 PetscFunctionReturn(PETSC_SUCCESS); 849 } 850 851 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort) 852 { 853 PC_ASM *osm = (PC_ASM *)pc->data; 854 855 PetscFunctionBegin; 856 osm->sort_indices = doSort; 857 PetscFunctionReturn(PETSC_SUCCESS); 858 } 859 860 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp) 861 { 862 PC_ASM *osm = (PC_ASM *)pc->data; 863 864 PetscFunctionBegin; 865 PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here"); 866 867 if (n_local) *n_local = osm->n_local_true; 868 if (first_local) { 869 PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc))); 870 *first_local -= osm->n_local_true; 871 } 872 if (ksp) *ksp = osm->ksp; 873 PetscFunctionReturn(PETSC_SUCCESS); 874 } 875 876 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type) 877 { 878 PC_ASM *osm = (PC_ASM *)pc->data; 879 880 PetscFunctionBegin; 881 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 882 PetscValidPointer(sub_mat_type, 2); 883 *sub_mat_type = osm->sub_mat_type; 884 PetscFunctionReturn(PETSC_SUCCESS); 885 } 886 887 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type) 888 { 889 PC_ASM *osm = (PC_ASM *)pc->data; 890 891 PetscFunctionBegin; 892 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 893 PetscCall(PetscFree(osm->sub_mat_type)); 894 PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type)); 895 PetscFunctionReturn(PETSC_SUCCESS); 896 } 897 898 /*@C 899 PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`. 900 901 Collective 902 903 Input Parameters: 904 + pc - the preconditioner context 905 . n - the number of subdomains for this processor (default value = 1) 906 . is - the index set that defines the subdomains for this processor 907 (or `NULL` for PETSc to determine subdomains) 908 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT 909 (or `NULL` to not provide these) 910 911 Options Database Key: 912 . -pc_asm_local_blocks <blks> - Sets number of local blocks 913 914 Level: advanced 915 916 Notes: 917 The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local 918 919 By default the `PCASM` preconditioner uses 1 block per processor. 920 921 Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors. 922 923 If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated 924 back to form the global solution (this is the standard restricted additive Schwarz method) 925 926 If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is 927 no code to handle that case. 928 929 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 930 `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM` 931 @*/ 932 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[]) 933 { 934 PetscFunctionBegin; 935 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 936 PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local)); 937 PetscFunctionReturn(PETSC_SUCCESS); 938 } 939 940 /*@C 941 PCASMSetTotalSubdomains - Sets the subdomains for all processors for the 942 additive Schwarz preconditioner, `PCASM`. 943 944 Collective, all MPI ranks must pass in the same array of `IS` 945 946 Input Parameters: 947 + pc - the preconditioner context 948 . N - the number of subdomains for all processors 949 . is - the index sets that define the subdomains for all processors 950 (or `NULL` to ask PETSc to determine the subdomains) 951 - is_local - the index sets that define the local part of the subdomains for this processor 952 (or `NULL` to not provide this information) 953 954 Options Database Key: 955 . -pc_asm_blocks <blks> - Sets total blocks 956 957 Level: advanced 958 959 Notes: 960 Currently you cannot use this to set the actual subdomains with the argument is or is_local. 961 962 By default the `PCASM` preconditioner uses 1 block per processor. 963 964 These index sets cannot be destroyed until after completion of the 965 linear solves for which the `PCASM` preconditioner is being used. 966 967 Use `PCASMSetLocalSubdomains()` to set local subdomains. 968 969 The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local 970 971 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 972 `PCASMCreateSubdomains2D()`, `PCGASM` 973 @*/ 974 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[]) 975 { 976 PetscFunctionBegin; 977 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 978 PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local)); 979 PetscFunctionReturn(PETSC_SUCCESS); 980 } 981 982 /*@ 983 PCASMSetOverlap - Sets the overlap between a pair of subdomains for the 984 additive Schwarz preconditioner, `PCASM`. 985 986 Logically Collective 987 988 Input Parameters: 989 + pc - the preconditioner context 990 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1) 991 992 Options Database Key: 993 . -pc_asm_overlap <ovl> - Sets overlap 994 995 Level: intermediate 996 997 Notes: 998 By default the `PCASM` preconditioner uses 1 block per processor. To use 999 multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and 1000 `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>). 1001 1002 The overlap defaults to 1, so if one desires that no additional 1003 overlap be computed beyond what may have been set with a call to 1004 `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl 1005 must be set to be 0. In particular, if one does not explicitly set 1006 the subdomains an application code, then all overlap would be computed 1007 internally by PETSc, and using an overlap of 0 would result in an `PCASM` 1008 variant that is equivalent to the block Jacobi preconditioner. 1009 1010 The default algorithm used by PETSc to increase overlap is fast, but not scalable, 1011 use the option -mat_increase_overlap_scalable when the problem and number of processes is large. 1012 1013 Note that one can define initial index sets with any overlap via 1014 `PCASMSetLocalSubdomains()`; the routine 1015 `PCASMSetOverlap()` merely allows PETSc to extend that overlap further 1016 if desired. 1017 1018 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`, 1019 `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM` 1020 @*/ 1021 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl) 1022 { 1023 PetscFunctionBegin; 1024 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1025 PetscValidLogicalCollectiveInt(pc, ovl, 2); 1026 PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl)); 1027 PetscFunctionReturn(PETSC_SUCCESS); 1028 } 1029 1030 /*@ 1031 PCASMSetType - Sets the type of restriction and interpolation used 1032 for local problems in the additive Schwarz method, `PCASM`. 1033 1034 Logically Collective 1035 1036 Input Parameters: 1037 + pc - the preconditioner context 1038 - type - variant of `PCASM`, one of 1039 .vb 1040 PC_ASM_BASIC - full interpolation and restriction 1041 PC_ASM_RESTRICT - full restriction, local processor interpolation (default) 1042 PC_ASM_INTERPOLATE - full interpolation, local processor restriction 1043 PC_ASM_NONE - local processor restriction and interpolation 1044 .ve 1045 1046 Options Database Key: 1047 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType` 1048 1049 Level: intermediate 1050 1051 Note: 1052 if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected 1053 to limit the local processor interpolation 1054 1055 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, 1056 `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM` 1057 @*/ 1058 PetscErrorCode PCASMSetType(PC pc, PCASMType type) 1059 { 1060 PetscFunctionBegin; 1061 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1062 PetscValidLogicalCollectiveEnum(pc, type, 2); 1063 PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type)); 1064 PetscFunctionReturn(PETSC_SUCCESS); 1065 } 1066 1067 /*@ 1068 PCASMGetType - Gets the type of restriction and interpolation used 1069 for local problems in the additive Schwarz method, `PCASM`. 1070 1071 Logically Collective 1072 1073 Input Parameter: 1074 . pc - the preconditioner context 1075 1076 Output Parameter: 1077 . type - variant of `PCASM`, one of 1078 1079 .vb 1080 PC_ASM_BASIC - full interpolation and restriction 1081 PC_ASM_RESTRICT - full restriction, local processor interpolation 1082 PC_ASM_INTERPOLATE - full interpolation, local processor restriction 1083 PC_ASM_NONE - local processor restriction and interpolation 1084 .ve 1085 1086 Options Database Key: 1087 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type 1088 1089 Level: intermediate 1090 1091 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`, 1092 `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()` 1093 @*/ 1094 PetscErrorCode PCASMGetType(PC pc, PCASMType *type) 1095 { 1096 PetscFunctionBegin; 1097 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1098 PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type)); 1099 PetscFunctionReturn(PETSC_SUCCESS); 1100 } 1101 1102 /*@ 1103 PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`. 1104 1105 Logically Collective 1106 1107 Input Parameters: 1108 + pc - the preconditioner context 1109 - type - type of composition, one of 1110 .vb 1111 PC_COMPOSITE_ADDITIVE - local additive combination 1112 PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination 1113 .ve 1114 1115 Options Database Key: 1116 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type 1117 1118 Level: intermediate 1119 1120 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASM`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType` 1121 @*/ 1122 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type) 1123 { 1124 PetscFunctionBegin; 1125 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1126 PetscValidLogicalCollectiveEnum(pc, type, 2); 1127 PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type)); 1128 PetscFunctionReturn(PETSC_SUCCESS); 1129 } 1130 1131 /*@ 1132 PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`. 1133 1134 Logically Collective 1135 1136 Input Parameter: 1137 . pc - the preconditioner context 1138 1139 Output Parameter: 1140 . type - type of composition, one of 1141 .vb 1142 PC_COMPOSITE_ADDITIVE - local additive combination 1143 PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination 1144 .ve 1145 1146 Options Database Key: 1147 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type 1148 1149 Level: intermediate 1150 1151 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType` 1152 @*/ 1153 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type) 1154 { 1155 PetscFunctionBegin; 1156 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1157 PetscValidPointer(type, 2); 1158 PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type)); 1159 PetscFunctionReturn(PETSC_SUCCESS); 1160 } 1161 1162 /*@ 1163 PCASMSetSortIndices - Determines whether subdomain indices are sorted. 1164 1165 Logically Collective 1166 1167 Input Parameters: 1168 + pc - the preconditioner context 1169 - doSort - sort the subdomain indices 1170 1171 Level: intermediate 1172 1173 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, 1174 `PCASMCreateSubdomains2D()` 1175 @*/ 1176 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort) 1177 { 1178 PetscFunctionBegin; 1179 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1180 PetscValidLogicalCollectiveBool(pc, doSort, 2); 1181 PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort)); 1182 PetscFunctionReturn(PETSC_SUCCESS); 1183 } 1184 1185 /*@C 1186 PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on 1187 this processor. 1188 1189 Collective iff first_local is requested 1190 1191 Input Parameter: 1192 . pc - the preconditioner context 1193 1194 Output Parameters: 1195 + n_local - the number of blocks on this processor or NULL 1196 . first_local - the global number of the first block on this processor or NULL, 1197 all processors must request or all must pass NULL 1198 - ksp - the array of `KSP` contexts 1199 1200 Level: advanced 1201 1202 Notes: 1203 After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed. 1204 1205 You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`. 1206 1207 Fortran Note: 1208 The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length. 1209 1210 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, 1211 `PCASMCreateSubdomains2D()`, 1212 @*/ 1213 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[]) 1214 { 1215 PetscFunctionBegin; 1216 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1217 PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp)); 1218 PetscFunctionReturn(PETSC_SUCCESS); 1219 } 1220 1221 /*MC 1222 PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with 1223 its own `KSP` object. 1224 1225 Options Database Keys: 1226 + -pc_asm_blocks <blks> - Sets total blocks. Defaults to one block per MPI rank. 1227 . -pc_asm_overlap <ovl> - Sets overlap 1228 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()` 1229 - -pc_asm_local_type [additive, multiplicative] - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()` 1230 1231 Level: beginner 1232 1233 Notes: 1234 If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you 1235 will get a different convergence rate due to the default option of -pc_asm_type restrict. Use 1236 -pc_asm_type basic to get the same convergence behavior 1237 1238 Each processor can have one or more blocks, but a block cannot be shared by more 1239 than one processor. Use `PCGASM` for subdomains shared by multiple processes. 1240 1241 To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC` 1242 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 1243 1244 To set the options on the solvers separate for each block call `PCASMGetSubKSP()` 1245 and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`) 1246 1247 References: 1248 + * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions 1249 Courant Institute, New York University Technical report 1250 - * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations, 1251 Cambridge University Press. 1252 1253 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`, 1254 `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()` 1255 `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType` 1256 M*/ 1257 1258 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc) 1259 { 1260 PC_ASM *osm; 1261 1262 PetscFunctionBegin; 1263 PetscCall(PetscNew(&osm)); 1264 1265 osm->n = PETSC_DECIDE; 1266 osm->n_local = 0; 1267 osm->n_local_true = PETSC_DECIDE; 1268 osm->overlap = 1; 1269 osm->ksp = NULL; 1270 osm->restriction = NULL; 1271 osm->lprolongation = NULL; 1272 osm->lrestriction = NULL; 1273 osm->x = NULL; 1274 osm->y = NULL; 1275 osm->is = NULL; 1276 osm->is_local = NULL; 1277 osm->mat = NULL; 1278 osm->pmat = NULL; 1279 osm->type = PC_ASM_RESTRICT; 1280 osm->loctype = PC_COMPOSITE_ADDITIVE; 1281 osm->sort_indices = PETSC_TRUE; 1282 osm->dm_subdomains = PETSC_FALSE; 1283 osm->sub_mat_type = NULL; 1284 1285 pc->data = (void *)osm; 1286 pc->ops->apply = PCApply_ASM; 1287 pc->ops->matapply = PCMatApply_ASM; 1288 pc->ops->applytranspose = PCApplyTranspose_ASM; 1289 pc->ops->setup = PCSetUp_ASM; 1290 pc->ops->reset = PCReset_ASM; 1291 pc->ops->destroy = PCDestroy_ASM; 1292 pc->ops->setfromoptions = PCSetFromOptions_ASM; 1293 pc->ops->setuponblocks = PCSetUpOnBlocks_ASM; 1294 pc->ops->view = PCView_ASM; 1295 pc->ops->applyrichardson = NULL; 1296 1297 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM)); 1298 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM)); 1299 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM)); 1300 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM)); 1301 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM)); 1302 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM)); 1303 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM)); 1304 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM)); 1305 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM)); 1306 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM)); 1307 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM)); 1308 PetscFunctionReturn(PETSC_SUCCESS); 1309 } 1310 1311 /*@C 1312 PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz 1313 preconditioner, `PCASM`, for any problem on a general grid. 1314 1315 Collective 1316 1317 Input Parameters: 1318 + A - The global matrix operator 1319 - n - the number of local blocks 1320 1321 Output Parameters: 1322 . outis - the array of index sets defining the subdomains 1323 1324 Level: advanced 1325 1326 Note: 1327 This generates nonoverlapping subdomains; the `PCASM` will generate the overlap 1328 from these if you use `PCASMSetLocalSubdomains()` 1329 1330 Fortran Note: 1331 You must provide the array outis[] already allocated of length n. 1332 1333 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()` 1334 @*/ 1335 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[]) 1336 { 1337 MatPartitioning mpart; 1338 const char *prefix; 1339 PetscInt i, j, rstart, rend, bs; 1340 PetscBool hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE; 1341 Mat Ad = NULL, adj; 1342 IS ispart, isnumb, *is; 1343 1344 PetscFunctionBegin; 1345 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1346 PetscValidPointer(outis, 3); 1347 PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n); 1348 1349 /* Get prefix, row distribution, and block size */ 1350 PetscCall(MatGetOptionsPrefix(A, &prefix)); 1351 PetscCall(MatGetOwnershipRange(A, &rstart, &rend)); 1352 PetscCall(MatGetBlockSize(A, &bs)); 1353 PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs); 1354 1355 /* Get diagonal block from matrix if possible */ 1356 PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop)); 1357 if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad)); 1358 if (Ad) { 1359 PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij)); 1360 if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij)); 1361 } 1362 if (Ad && n > 1) { 1363 PetscBool match, done; 1364 /* Try to setup a good matrix partitioning if available */ 1365 PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart)); 1366 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix)); 1367 PetscCall(MatPartitioningSetFromOptions(mpart)); 1368 PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match)); 1369 if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match)); 1370 if (!match) { /* assume a "good" partitioner is available */ 1371 PetscInt na; 1372 const PetscInt *ia, *ja; 1373 PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done)); 1374 if (done) { 1375 /* Build adjacency matrix by hand. Unfortunately a call to 1376 MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will 1377 remove the block-aij structure and we cannot expect 1378 MatPartitioning to split vertices as we need */ 1379 PetscInt i, j, len, nnz, cnt, *iia = NULL, *jja = NULL; 1380 const PetscInt *row; 1381 nnz = 0; 1382 for (i = 0; i < na; i++) { /* count number of nonzeros */ 1383 len = ia[i + 1] - ia[i]; 1384 row = ja + ia[i]; 1385 for (j = 0; j < len; j++) { 1386 if (row[j] == i) { /* don't count diagonal */ 1387 len--; 1388 break; 1389 } 1390 } 1391 nnz += len; 1392 } 1393 PetscCall(PetscMalloc1(na + 1, &iia)); 1394 PetscCall(PetscMalloc1(nnz, &jja)); 1395 nnz = 0; 1396 iia[0] = 0; 1397 for (i = 0; i < na; i++) { /* fill adjacency */ 1398 cnt = 0; 1399 len = ia[i + 1] - ia[i]; 1400 row = ja + ia[i]; 1401 for (j = 0; j < len; j++) { 1402 if (row[j] != i) { /* if not diagonal */ 1403 jja[nnz + cnt++] = row[j]; 1404 } 1405 } 1406 nnz += cnt; 1407 iia[i + 1] = nnz; 1408 } 1409 /* Partitioning of the adjacency matrix */ 1410 PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj)); 1411 PetscCall(MatPartitioningSetAdjacency(mpart, adj)); 1412 PetscCall(MatPartitioningSetNParts(mpart, n)); 1413 PetscCall(MatPartitioningApply(mpart, &ispart)); 1414 PetscCall(ISPartitioningToNumbering(ispart, &isnumb)); 1415 PetscCall(MatDestroy(&adj)); 1416 foundpart = PETSC_TRUE; 1417 } 1418 PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done)); 1419 } 1420 PetscCall(MatPartitioningDestroy(&mpart)); 1421 } 1422 1423 PetscCall(PetscMalloc1(n, &is)); 1424 *outis = is; 1425 1426 if (!foundpart) { 1427 /* Partitioning by contiguous chunks of rows */ 1428 1429 PetscInt mbs = (rend - rstart) / bs; 1430 PetscInt start = rstart; 1431 for (i = 0; i < n; i++) { 1432 PetscInt count = (mbs / n + ((mbs % n) > i)) * bs; 1433 PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i])); 1434 start += count; 1435 } 1436 1437 } else { 1438 /* Partitioning by adjacency of diagonal block */ 1439 1440 const PetscInt *numbering; 1441 PetscInt *count, nidx, *indices, *newidx, start = 0; 1442 /* Get node count in each partition */ 1443 PetscCall(PetscMalloc1(n, &count)); 1444 PetscCall(ISPartitioningCount(ispart, n, count)); 1445 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1446 for (i = 0; i < n; i++) count[i] *= bs; 1447 } 1448 /* Build indices from node numbering */ 1449 PetscCall(ISGetLocalSize(isnumb, &nidx)); 1450 PetscCall(PetscMalloc1(nidx, &indices)); 1451 for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */ 1452 PetscCall(ISGetIndices(isnumb, &numbering)); 1453 PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices)); 1454 PetscCall(ISRestoreIndices(isnumb, &numbering)); 1455 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1456 PetscCall(PetscMalloc1(nidx * bs, &newidx)); 1457 for (i = 0; i < nidx; i++) { 1458 for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j; 1459 } 1460 PetscCall(PetscFree(indices)); 1461 nidx *= bs; 1462 indices = newidx; 1463 } 1464 /* Shift to get global indices */ 1465 for (i = 0; i < nidx; i++) indices[i] += rstart; 1466 1467 /* Build the index sets for each block */ 1468 for (i = 0; i < n; i++) { 1469 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i])); 1470 PetscCall(ISSort(is[i])); 1471 start += count[i]; 1472 } 1473 1474 PetscCall(PetscFree(count)); 1475 PetscCall(PetscFree(indices)); 1476 PetscCall(ISDestroy(&isnumb)); 1477 PetscCall(ISDestroy(&ispart)); 1478 } 1479 PetscFunctionReturn(PETSC_SUCCESS); 1480 } 1481 1482 /*@C 1483 PCASMDestroySubdomains - Destroys the index sets created with 1484 `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`. 1485 1486 Collective 1487 1488 Input Parameters: 1489 + n - the number of index sets 1490 . is - the array of index sets 1491 - is_local - the array of local index sets, can be NULL 1492 1493 Level: advanced 1494 1495 .seealso: `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()` 1496 @*/ 1497 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[]) 1498 { 1499 PetscInt i; 1500 1501 PetscFunctionBegin; 1502 if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS); 1503 if (is) { 1504 PetscValidPointer(is, 2); 1505 for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i])); 1506 PetscCall(PetscFree(is)); 1507 } 1508 if (is_local) { 1509 PetscValidPointer(is_local, 3); 1510 for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i])); 1511 PetscCall(PetscFree(is_local)); 1512 } 1513 PetscFunctionReturn(PETSC_SUCCESS); 1514 } 1515 1516 /*@ 1517 PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz 1518 preconditioner, `PCASM`, for a two-dimensional problem on a regular grid. 1519 1520 Not Collective 1521 1522 Input Parameters: 1523 + m - the number of mesh points in the x direction 1524 . n - the number of mesh points in the y direction 1525 . M - the number of subdomains in the x direction 1526 . N - the number of subdomains in the y direction 1527 . dof - degrees of freedom per node 1528 - overlap - overlap in mesh lines 1529 1530 Output Parameters: 1531 + Nsub - the number of subdomains created 1532 . is - array of index sets defining overlapping (if overlap > 0) subdomains 1533 - is_local - array of index sets defining non-overlapping subdomains 1534 1535 Level: advanced 1536 1537 Note: 1538 Presently `PCAMSCreateSubdomains2d()` is valid only for sequential 1539 preconditioners. More general related routines are 1540 `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`. 1541 1542 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`, 1543 `PCASMSetOverlap()` 1544 @*/ 1545 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local) 1546 { 1547 PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer; 1548 PetscInt nidx, *idx, loc, ii, jj, count; 1549 1550 PetscFunctionBegin; 1551 PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1"); 1552 1553 *Nsub = N * M; 1554 PetscCall(PetscMalloc1(*Nsub, is)); 1555 PetscCall(PetscMalloc1(*Nsub, is_local)); 1556 ystart = 0; 1557 loc_outer = 0; 1558 for (i = 0; i < N; i++) { 1559 height = n / N + ((n % N) > i); /* height of subdomain */ 1560 PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n"); 1561 yleft = ystart - overlap; 1562 if (yleft < 0) yleft = 0; 1563 yright = ystart + height + overlap; 1564 if (yright > n) yright = n; 1565 xstart = 0; 1566 for (j = 0; j < M; j++) { 1567 width = m / M + ((m % M) > j); /* width of subdomain */ 1568 PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m"); 1569 xleft = xstart - overlap; 1570 if (xleft < 0) xleft = 0; 1571 xright = xstart + width + overlap; 1572 if (xright > m) xright = m; 1573 nidx = (xright - xleft) * (yright - yleft); 1574 PetscCall(PetscMalloc1(nidx, &idx)); 1575 loc = 0; 1576 for (ii = yleft; ii < yright; ii++) { 1577 count = m * ii + xleft; 1578 for (jj = xleft; jj < xright; jj++) idx[loc++] = count++; 1579 } 1580 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer)); 1581 if (overlap == 0) { 1582 PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer])); 1583 1584 (*is_local)[loc_outer] = (*is)[loc_outer]; 1585 } else { 1586 for (loc = 0, ii = ystart; ii < ystart + height; ii++) { 1587 for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj; 1588 } 1589 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer)); 1590 } 1591 PetscCall(PetscFree(idx)); 1592 xstart += width; 1593 loc_outer++; 1594 } 1595 ystart += height; 1596 } 1597 for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i])); 1598 PetscFunctionReturn(PETSC_SUCCESS); 1599 } 1600 1601 /*@C 1602 PCASMGetLocalSubdomains - Gets the local subdomains (for this processor 1603 only) for the additive Schwarz preconditioner, `PCASM`. 1604 1605 Not Collective 1606 1607 Input Parameter: 1608 . pc - the preconditioner context 1609 1610 Output Parameters: 1611 + n - if requested, the number of subdomains for this processor (default value = 1) 1612 . is - if requested, the index sets that define the subdomains for this processor 1613 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`) 1614 1615 Level: advanced 1616 1617 Note: 1618 The `IS` numbering is in the parallel, global numbering of the vector. 1619 1620 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 1621 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()` 1622 @*/ 1623 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[]) 1624 { 1625 PC_ASM *osm = (PC_ASM *)pc->data; 1626 PetscBool match; 1627 1628 PetscFunctionBegin; 1629 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1630 if (n) PetscValidIntPointer(n, 2); 1631 if (is) PetscValidPointer(is, 3); 1632 if (is_local) PetscValidPointer(is_local, 4); 1633 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1634 PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM"); 1635 if (n) *n = osm->n_local_true; 1636 if (is) *is = osm->is; 1637 if (is_local) *is_local = osm->is_local; 1638 PetscFunctionReturn(PETSC_SUCCESS); 1639 } 1640 1641 /*@C 1642 PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor 1643 only) for the additive Schwarz preconditioner, `PCASM`. 1644 1645 Not Collective 1646 1647 Input Parameter: 1648 . pc - the preconditioner context 1649 1650 Output Parameters: 1651 + n - if requested, the number of matrices for this processor (default value = 1) 1652 - mat - if requested, the matrices 1653 1654 Level: advanced 1655 1656 Notes: 1657 Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`) 1658 1659 Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner. 1660 1661 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, 1662 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()` 1663 @*/ 1664 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[]) 1665 { 1666 PC_ASM *osm; 1667 PetscBool match; 1668 1669 PetscFunctionBegin; 1670 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1671 if (n) PetscValidIntPointer(n, 2); 1672 if (mat) PetscValidPointer(mat, 3); 1673 PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp()."); 1674 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1675 if (!match) { 1676 if (n) *n = 0; 1677 if (mat) *mat = NULL; 1678 } else { 1679 osm = (PC_ASM *)pc->data; 1680 if (n) *n = osm->n_local_true; 1681 if (mat) *mat = osm->pmat; 1682 } 1683 PetscFunctionReturn(PETSC_SUCCESS); 1684 } 1685 1686 /*@ 1687 PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible. 1688 1689 Logically Collective 1690 1691 Input Parameters: 1692 + pc - the preconditioner 1693 - flg - boolean indicating whether to use subdomains defined by the `DM` 1694 1695 Options Database Key: 1696 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM` 1697 1698 Level: intermediate 1699 1700 Note: 1701 `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`, 1702 so setting either of the first two effectively turns the latter off. 1703 1704 .seealso: `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()` 1705 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()` 1706 @*/ 1707 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg) 1708 { 1709 PC_ASM *osm = (PC_ASM *)pc->data; 1710 PetscBool match; 1711 1712 PetscFunctionBegin; 1713 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1714 PetscValidLogicalCollectiveBool(pc, flg, 2); 1715 PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC."); 1716 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1717 if (match) osm->dm_subdomains = flg; 1718 PetscFunctionReturn(PETSC_SUCCESS); 1719 } 1720 1721 /*@ 1722 PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible. 1723 1724 Not Collective 1725 1726 Input Parameter: 1727 . pc - the preconditioner 1728 1729 Output Parameter: 1730 . flg - boolean indicating whether to use subdomains defined by the `DM` 1731 1732 Level: intermediate 1733 1734 .seealso: `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()` 1735 `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()` 1736 @*/ 1737 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg) 1738 { 1739 PC_ASM *osm = (PC_ASM *)pc->data; 1740 PetscBool match; 1741 1742 PetscFunctionBegin; 1743 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1744 PetscValidBoolPointer(flg, 2); 1745 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match)); 1746 if (match) *flg = osm->dm_subdomains; 1747 else *flg = PETSC_FALSE; 1748 PetscFunctionReturn(PETSC_SUCCESS); 1749 } 1750 1751 /*@ 1752 PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string. 1753 1754 Not Collective 1755 1756 Input Parameter: 1757 . pc - the `PC` 1758 1759 Output Parameter: 1760 . pc_asm_sub_mat_type - name of matrix type 1761 1762 Level: advanced 1763 1764 .seealso: `PCASM`, `PCASMSetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat` 1765 @*/ 1766 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type) 1767 { 1768 PetscFunctionBegin; 1769 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1770 PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type)); 1771 PetscFunctionReturn(PETSC_SUCCESS); 1772 } 1773 1774 /*@ 1775 PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves 1776 1777 Collective 1778 1779 Input Parameters: 1780 + pc - the `PC` object 1781 - sub_mat_type - the `MatType` 1782 1783 Options Database Key: 1784 . -pc_asm_sub_mat_type <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. 1785 If you specify a base name like aijviennacl, the corresponding sequential type is assumed. 1786 1787 Note: 1788 See "${PETSC_DIR}/include/petscmat.h" for available types 1789 1790 Level: advanced 1791 1792 .seealso: `PCASM`, `PCASMGetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat` 1793 @*/ 1794 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type) 1795 { 1796 PetscFunctionBegin; 1797 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1798 PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type)); 1799 PetscFunctionReturn(PETSC_SUCCESS); 1800 } 1801