xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 4dbf25a8fa98e38799e7b47dcb2d8a9309975f41)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include <petsc/private/matimpl.h>
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         iascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(sviewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(sviewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
75   }
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc)
80 {
81   PC_ASM         *osm = (PC_ASM *)pc->data;
82   const char     *prefix;
83   char            fname[PETSC_MAX_PATH_LEN + 1];
84   PetscViewer     viewer, sviewer;
85   char           *s;
86   PetscInt        i, j, nidx;
87   const PetscInt *idx;
88   PetscMPIInt     rank, size;
89 
90   PetscFunctionBegin;
91   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
92   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
93   PetscCall(PCGetOptionsPrefix(pc, &prefix));
94   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
95   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
96   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
97   for (i = 0; i < osm->n_local; i++) {
98     if (i < osm->n_local_true) {
99       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
100       PetscCall(ISGetIndices(osm->is[i], &idx));
101       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
102 #define len 16 * (nidx + 1) + 512
103       PetscCall(PetscMalloc1(len, &s));
104       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
105 #undef len
106       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
107       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
108       PetscCall(ISRestoreIndices(osm->is[i], &idx));
109       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
110       PetscCall(PetscViewerDestroy(&sviewer));
111       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
112       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
113       PetscCall(PetscViewerFlush(viewer));
114       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
115       PetscCall(PetscFree(s));
116       if (osm->is_local) {
117         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
118 #define len 16 * (nidx + 1) + 512
119         PetscCall(PetscMalloc1(len, &s));
120         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
121 #undef len
122         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
123         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
124         PetscCall(ISGetIndices(osm->is_local[i], &idx));
125         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
126         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
127         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
128         PetscCall(PetscViewerDestroy(&sviewer));
129         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
130         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
131         PetscCall(PetscViewerFlush(viewer));
132         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
133         PetscCall(PetscFree(s));
134       }
135     } else {
136       /* Participate in collective viewer calls. */
137       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
138       PetscCall(PetscViewerFlush(viewer));
139       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
140       /* Assume either all ranks have is_local or none do. */
141       if (osm->is_local) {
142         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
143         PetscCall(PetscViewerFlush(viewer));
144         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
145       }
146     }
147   }
148   PetscCall(PetscViewerFlush(viewer));
149   PetscCall(PetscViewerDestroy(&viewer));
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode PCSetUp_ASM(PC pc)
154 {
155   PC_ASM       *osm = (PC_ASM *)pc->data;
156   PetscBool     flg;
157   PetscInt      i, m, m_local;
158   MatReuse      scall = MAT_REUSE_MATRIX;
159   IS            isl;
160   KSP           ksp;
161   PC            subpc;
162   const char   *prefix, *pprefix;
163   Vec           vec;
164   DM           *domain_dm = NULL;
165   MatNullSpace *nullsp    = NULL;
166 
167   PetscFunctionBegin;
168   if (!pc->setupcalled) {
169     PetscInt m;
170 
171     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172     if (osm->n_local_true == PETSC_DECIDE) {
173       /* no subdomains given */
174       /* try pc->dm first, if allowed */
175       if (osm->dm_subdomains && pc->dm) {
176         PetscInt num_domains, d;
177         char   **domain_names;
178         IS      *inner_domain_is, *outer_domain_is;
179         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181                               A future improvement of this code might allow one to use
182                               DM-defined subdomains and also increase the overlap,
183                               but that is not currently supported */
184         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185         for (d = 0; d < num_domains; ++d) {
186           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189         }
190         PetscCall(PetscFree(domain_names));
191         PetscCall(PetscFree(inner_domain_is));
192         PetscCall(PetscFree(outer_domain_is));
193       }
194       if (osm->n_local_true == PETSC_DECIDE) {
195         /* still no subdomains; use one subdomain per processor */
196         osm->n_local_true = 1;
197       }
198     }
199     { /* determine the global and max number of subdomains */
200       struct {
201         PetscInt max, sum;
202       } inwork, outwork;
203       PetscMPIInt size;
204 
205       inwork.max = osm->n_local_true;
206       inwork.sum = osm->n_local_true;
207       PetscCallMPI(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208       osm->n_local = outwork.max;
209       osm->n       = outwork.sum;
210 
211       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212       if (outwork.max == 1 && outwork.sum == size) {
213         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215       }
216     }
217     if (!osm->is) { /* create the index sets */
218       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219     }
220     if (osm->n_local_true > 1 && !osm->is_local) {
221       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222       for (i = 0; i < osm->n_local_true; i++) {
223         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226         } else {
227           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228           osm->is_local[i] = osm->is[i];
229         }
230       }
231     }
232     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233     if (osm->overlap > 0) {
234       /* Extend the "overlapping" regions by a number of steps */
235       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236     }
237     if (osm->sort_indices) {
238       for (i = 0; i < osm->n_local_true; i++) {
239         PetscCall(ISSort(osm->is[i]));
240         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241       }
242     }
243     flg = PETSC_FALSE;
244     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246     if (!osm->ksp) {
247       /* Create the local solvers */
248       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250       for (i = 0; i < osm->n_local_true; i++) {
251         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
253         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
254         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
255         PetscCall(KSPSetType(ksp, KSPPREONLY));
256         PetscCall(KSPGetPC(ksp, &subpc));
257         PetscCall(PCGetOptionsPrefix(pc, &prefix));
258         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
259         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
260         if (domain_dm) {
261           PetscCall(KSPSetDM(ksp, domain_dm[i]));
262           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
263           PetscCall(DMDestroy(&domain_dm[i]));
264         }
265         osm->ksp[i] = ksp;
266       }
267       if (domain_dm) PetscCall(PetscFree(domain_dm));
268     }
269 
270     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
271     PetscCall(ISSortRemoveDups(osm->lis));
272     PetscCall(ISGetLocalSize(osm->lis, &m));
273 
274     scall = MAT_INITIAL_MATRIX;
275   } else {
276     /*
277        Destroy the blocks from the previous iteration
278     */
279     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
280       PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
281       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
282       scall = MAT_INITIAL_MATRIX;
283     }
284   }
285 
286   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
287   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
288     PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
289     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
290     scall = MAT_INITIAL_MATRIX;
291   }
292 
293   /*
294      Extract out the submatrices
295   */
296   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
297   if (scall == MAT_INITIAL_MATRIX) {
298     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
299     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
300     if (nullsp) PetscCall(MatRestoreNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
301   }
302 
303   /* Convert the types of the submatrices (if needbe) */
304   if (osm->sub_mat_type) {
305     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &osm->pmat[i]));
306   }
307 
308   if (!pc->setupcalled) {
309     VecType vtype;
310 
311     /* Create the local work vectors (from the local matrices) and scatter contexts */
312     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
313 
314     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
315     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
316     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
317     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
318     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
319 
320     PetscCall(ISGetLocalSize(osm->lis, &m));
321     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
322     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
323     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
324     PetscCall(VecSetSizes(osm->lx, m, m));
325     PetscCall(VecSetType(osm->lx, vtype));
326     PetscCall(VecDuplicate(osm->lx, &osm->ly));
327     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
328     PetscCall(ISDestroy(&isl));
329 
330     for (i = 0; i < osm->n_local_true; ++i) {
331       ISLocalToGlobalMapping ltog;
332       IS                     isll;
333       const PetscInt        *idx_is;
334       PetscInt              *idx_lis, nout;
335 
336       PetscCall(ISGetLocalSize(osm->is[i], &m));
337       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
338       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
339 
340       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
341       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
342       PetscCall(ISGetLocalSize(osm->is[i], &m));
343       PetscCall(ISGetIndices(osm->is[i], &idx_is));
344       PetscCall(PetscMalloc1(m, &idx_lis));
345       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
346       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
347       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
348       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
349       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
350       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
351       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
352       PetscCall(ISDestroy(&isll));
353       PetscCall(ISDestroy(&isl));
354       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the non-overlapping is_local[i] entries */
355         ISLocalToGlobalMapping ltog;
356         IS                     isll, isll_local;
357         const PetscInt        *idx_local;
358         PetscInt              *idx1, *idx2, nout;
359 
360         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
361         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
362 
363         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
364         PetscCall(PetscMalloc1(m_local, &idx1));
365         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
366         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
367         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
368         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
369 
370         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
371         PetscCall(PetscMalloc1(m_local, &idx2));
372         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
373         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
374         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
375         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
376 
377         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
378         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
379 
380         PetscCall(ISDestroy(&isll));
381         PetscCall(ISDestroy(&isll_local));
382       }
383     }
384     PetscCall(VecDestroy(&vec));
385   }
386 
387   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
388     IS      *cis;
389     PetscInt c;
390 
391     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
392     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
393     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
394     PetscCall(PetscFree(cis));
395   }
396 
397   /* Return control to the user so that the submatrices can be modified (e.g., to apply
398      different boundary conditions for the submatrices than for the global problem) */
399   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
400 
401   /*
402      Loop over subdomains putting them into local ksp
403   */
404   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
405   for (i = 0; i < osm->n_local_true; i++) {
406     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
407     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
408     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
409   }
410   PetscFunctionReturn(PETSC_SUCCESS);
411 }
412 
413 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
414 {
415   PC_ASM            *osm = (PC_ASM *)pc->data;
416   PetscInt           i;
417   KSPConvergedReason reason;
418 
419   PetscFunctionBegin;
420   for (i = 0; i < osm->n_local_true; i++) {
421     PetscCall(KSPSetUp(osm->ksp[i]));
422     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
423     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
424   }
425   PetscFunctionReturn(PETSC_SUCCESS);
426 }
427 
428 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
429 {
430   PC_ASM     *osm = (PC_ASM *)pc->data;
431   PetscInt    i, n_local_true = osm->n_local_true;
432   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
433 
434   PetscFunctionBegin;
435   /*
436      support for limiting the restriction or interpolation to only local
437      subdomain values (leaving the other values 0).
438   */
439   if (!(osm->type & PC_ASM_RESTRICT)) {
440     forward = SCATTER_FORWARD_LOCAL;
441     /* have to zero the work RHS since scatter may leave some slots empty */
442     PetscCall(VecSet(osm->lx, 0.0));
443   }
444   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
445 
446   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
447   /* zero the global and the local solutions */
448   PetscCall(VecSet(y, 0.0));
449   PetscCall(VecSet(osm->ly, 0.0));
450 
451   /* copy the global RHS to local RHS including the ghost nodes */
452   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
453   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
454 
455   /* restrict local RHS to the overlapping 0-block RHS */
456   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
457   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
458 
459   /* do the local solves */
460   for (i = 0; i < n_local_true; ++i) {
461     /* solve the overlapping i-block */
462     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
463     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
464     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
465     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
466 
467     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
468       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
469       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
470     } else { /* interpolate the overlapping i-block solution to the local solution */
471       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
472       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
473     }
474 
475     if (i < n_local_true - 1) {
476       /* restrict local RHS to the overlapping (i+1)-block RHS */
477       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
478       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
479 
480       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
481         /* update the overlapping (i+1)-block RHS using the current local solution */
482         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
483         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
484       }
485     }
486   }
487   /* add the local solution to the global solution including the ghost nodes */
488   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
489   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
490   PetscFunctionReturn(PETSC_SUCCESS);
491 }
492 
493 static PetscErrorCode PCMatApply_ASM_Private(PC pc, Mat X, Mat Y, PetscBool transpose)
494 {
495   PC_ASM     *osm = (PC_ASM *)pc->data;
496   Mat         Z, W;
497   Vec         x;
498   PetscInt    i, m, N;
499   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
500 
501   PetscFunctionBegin;
502   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
503   /*
504      support for limiting the restriction or interpolation to only local
505      subdomain values (leaving the other values 0).
506   */
507   if ((!transpose && !(osm->type & PC_ASM_RESTRICT)) || (transpose && !(osm->type & PC_ASM_INTERPOLATE))) {
508     forward = SCATTER_FORWARD_LOCAL;
509     /* have to zero the work RHS since scatter may leave some slots empty */
510     PetscCall(VecSet(osm->lx, 0.0));
511   }
512   if ((!transpose && !(osm->type & PC_ASM_INTERPOLATE)) || (transpose && !(osm->type & PC_ASM_RESTRICT))) reverse = SCATTER_REVERSE_LOCAL;
513   PetscCall(VecGetLocalSize(osm->x[0], &m));
514   PetscCall(MatGetSize(X, NULL, &N));
515   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
516 
517   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
518   /* zero the global and the local solutions */
519   PetscCall(MatZeroEntries(Y));
520   PetscCall(VecSet(osm->ly, 0.0));
521 
522   for (i = 0; i < N; ++i) {
523     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
524     /* copy the global RHS to local RHS including the ghost nodes */
525     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
526     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
527     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
528 
529     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
530     /* restrict local RHS to the overlapping 0-block RHS */
531     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
532     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
533     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
534   }
535   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
536   /* solve the overlapping 0-block */
537   if (!transpose) {
538     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
539     PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
540     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
541   } else {
542     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, osm->ksp[0], Z, W, 0));
543     PetscCall(KSPMatSolveTranspose(osm->ksp[0], Z, W));
544     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, osm->ksp[0], Z, W, 0));
545   }
546   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
547   PetscCall(MatDestroy(&Z));
548 
549   for (i = 0; i < N; ++i) {
550     PetscCall(VecSet(osm->ly, 0.0));
551     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
552     if (osm->lprolongation && ((!transpose && osm->type != PC_ASM_INTERPOLATE) || (transpose && osm->type != PC_ASM_RESTRICT))) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
553       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
554       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
555     } else { /* interpolate the overlapping 0-block solution to the local solution */
556       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
557       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
558     }
559     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
560 
561     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
562     /* add the local solution to the global solution including the ghost nodes */
563     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
564     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
565     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
566   }
567   PetscCall(MatDestroy(&W));
568   PetscFunctionReturn(PETSC_SUCCESS);
569 }
570 
571 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
572 {
573   PetscFunctionBegin;
574   PetscCall(PCMatApply_ASM_Private(pc, X, Y, PETSC_FALSE));
575   PetscFunctionReturn(PETSC_SUCCESS);
576 }
577 
578 static PetscErrorCode PCMatApplyTranspose_ASM(PC pc, Mat X, Mat Y)
579 {
580   PetscFunctionBegin;
581   PetscCall(PCMatApply_ASM_Private(pc, X, Y, PETSC_TRUE));
582   PetscFunctionReturn(PETSC_SUCCESS);
583 }
584 
585 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
586 {
587   PC_ASM     *osm = (PC_ASM *)pc->data;
588   PetscInt    i, n_local_true = osm->n_local_true;
589   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
590 
591   PetscFunctionBegin;
592   /*
593      Support for limiting the restriction or interpolation to only local
594      subdomain values (leaving the other values 0).
595 
596      Note: these are reversed from the PCApply_ASM() because we are applying the
597      transpose of the three terms
598   */
599 
600   if (!(osm->type & PC_ASM_INTERPOLATE)) {
601     forward = SCATTER_FORWARD_LOCAL;
602     /* have to zero the work RHS since scatter may leave some slots empty */
603     PetscCall(VecSet(osm->lx, 0.0));
604   }
605   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
606 
607   /* zero the global and the local solutions */
608   PetscCall(VecSet(y, 0.0));
609   PetscCall(VecSet(osm->ly, 0.0));
610 
611   /* Copy the global RHS to local RHS including the ghost nodes */
612   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
613   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
614 
615   /* Restrict local RHS to the overlapping 0-block RHS */
616   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
617   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
618 
619   /* do the local solves */
620   for (i = 0; i < n_local_true; ++i) {
621     /* solve the overlapping i-block */
622     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
623     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
624     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
625     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
626 
627     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
628       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
629       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
630     } else { /* interpolate the overlapping i-block solution to the local solution */
631       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
632       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
633     }
634 
635     if (i < n_local_true - 1) {
636       /* Restrict local RHS to the overlapping (i+1)-block RHS */
637       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
638       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
639     }
640   }
641   /* Add the local solution to the global solution including the ghost nodes */
642   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
643   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
644   PetscFunctionReturn(PETSC_SUCCESS);
645 }
646 
647 static PetscErrorCode PCReset_ASM(PC pc)
648 {
649   PC_ASM  *osm = (PC_ASM *)pc->data;
650   PetscInt i;
651 
652   PetscFunctionBegin;
653   if (osm->ksp) {
654     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
655   }
656   if (osm->pmat) {
657     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
658   }
659   if (osm->lrestriction) {
660     PetscCall(VecScatterDestroy(&osm->restriction));
661     for (i = 0; i < osm->n_local_true; i++) {
662       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
663       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
664       PetscCall(VecDestroy(&osm->x[i]));
665       PetscCall(VecDestroy(&osm->y[i]));
666     }
667     PetscCall(PetscFree(osm->lrestriction));
668     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
669     PetscCall(PetscFree(osm->x));
670     PetscCall(PetscFree(osm->y));
671   }
672   PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));
673   PetscCall(ISDestroy(&osm->lis));
674   PetscCall(VecDestroy(&osm->lx));
675   PetscCall(VecDestroy(&osm->ly));
676   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
677 
678   PetscCall(PetscFree(osm->sub_mat_type));
679 
680   osm->is       = NULL;
681   osm->is_local = NULL;
682   PetscFunctionReturn(PETSC_SUCCESS);
683 }
684 
685 static PetscErrorCode PCDestroy_ASM(PC pc)
686 {
687   PC_ASM  *osm = (PC_ASM *)pc->data;
688   PetscInt i;
689 
690   PetscFunctionBegin;
691   PetscCall(PCReset_ASM(pc));
692   if (osm->ksp) {
693     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
694     PetscCall(PetscFree(osm->ksp));
695   }
696   PetscCall(PetscFree(pc->data));
697 
698   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
699   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
700   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
701   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
702   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
703   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
704   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
705   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
706   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
707   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
708   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
709   PetscFunctionReturn(PETSC_SUCCESS);
710 }
711 
712 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems PetscOptionsObject)
713 {
714   PC_ASM         *osm = (PC_ASM *)pc->data;
715   PetscInt        blocks, ovl;
716   PetscBool       flg;
717   PCASMType       asmtype;
718   PCCompositeType loctype;
719   char            sub_mat_type[256];
720 
721   PetscFunctionBegin;
722   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
723   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
724   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
725   if (flg) {
726     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
727     osm->dm_subdomains = PETSC_FALSE;
728   }
729   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
730   if (flg) {
731     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
732     osm->dm_subdomains = PETSC_FALSE;
733   }
734   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
735   if (flg) {
736     PetscCall(PCASMSetOverlap(pc, ovl));
737     osm->dm_subdomains = PETSC_FALSE;
738   }
739   flg = PETSC_FALSE;
740   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
741   if (flg) PetscCall(PCASMSetType(pc, asmtype));
742   flg = PETSC_FALSE;
743   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
744   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
745   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
746   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
747   PetscOptionsHeadEnd();
748   PetscFunctionReturn(PETSC_SUCCESS);
749 }
750 
751 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
752 {
753   PC_ASM  *osm = (PC_ASM *)pc->data;
754   PetscInt i;
755 
756   PetscFunctionBegin;
757   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
758   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
759 
760   if (!pc->setupcalled) {
761     if (is) {
762       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
763     }
764     if (is_local) {
765       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
766     }
767     PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));
768 
769     if (osm->ksp && osm->n_local_true != n) {
770       for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
771       PetscCall(PetscFree(osm->ksp));
772     }
773 
774     osm->n_local_true = n;
775     osm->is           = NULL;
776     osm->is_local     = NULL;
777     if (is) {
778       PetscCall(PetscMalloc1(n, &osm->is));
779       for (i = 0; i < n; i++) osm->is[i] = is[i];
780       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
781       osm->overlap = -1;
782     }
783     if (is_local) {
784       PetscCall(PetscMalloc1(n, &osm->is_local));
785       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
786       if (!is) {
787         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
788         for (i = 0; i < osm->n_local_true; i++) {
789           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
790             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
791             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
792           } else {
793             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
794             osm->is[i] = osm->is_local[i];
795           }
796         }
797       }
798     }
799   }
800   PetscFunctionReturn(PETSC_SUCCESS);
801 }
802 
803 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
804 {
805   PC_ASM     *osm = (PC_ASM *)pc->data;
806   PetscMPIInt rank, size;
807   PetscInt    n;
808 
809   PetscFunctionBegin;
810   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
811   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets, they cannot be set globally yet.");
812 
813   /*
814      Split the subdomains equally among all processors
815   */
816   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
817   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
818   n = N / size + ((N % size) > rank);
819   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, rank, size, N);
820   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
821   if (!pc->setupcalled) {
822     PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));
823 
824     osm->n_local_true = n;
825     osm->is           = NULL;
826     osm->is_local     = NULL;
827   }
828   PetscFunctionReturn(PETSC_SUCCESS);
829 }
830 
831 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
832 {
833   PC_ASM *osm = (PC_ASM *)pc->data;
834 
835   PetscFunctionBegin;
836   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
837   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
838   if (!pc->setupcalled) osm->overlap = ovl;
839   PetscFunctionReturn(PETSC_SUCCESS);
840 }
841 
842 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
843 {
844   PC_ASM *osm = (PC_ASM *)pc->data;
845 
846   PetscFunctionBegin;
847   osm->type     = type;
848   osm->type_set = PETSC_TRUE;
849   PetscFunctionReturn(PETSC_SUCCESS);
850 }
851 
852 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
853 {
854   PC_ASM *osm = (PC_ASM *)pc->data;
855 
856   PetscFunctionBegin;
857   *type = osm->type;
858   PetscFunctionReturn(PETSC_SUCCESS);
859 }
860 
861 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
862 {
863   PC_ASM *osm = (PC_ASM *)pc->data;
864 
865   PetscFunctionBegin;
866   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
867   osm->loctype = type;
868   PetscFunctionReturn(PETSC_SUCCESS);
869 }
870 
871 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
872 {
873   PC_ASM *osm = (PC_ASM *)pc->data;
874 
875   PetscFunctionBegin;
876   *type = osm->loctype;
877   PetscFunctionReturn(PETSC_SUCCESS);
878 }
879 
880 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
881 {
882   PC_ASM *osm = (PC_ASM *)pc->data;
883 
884   PetscFunctionBegin;
885   osm->sort_indices = doSort;
886   PetscFunctionReturn(PETSC_SUCCESS);
887 }
888 
889 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
890 {
891   PC_ASM *osm = (PC_ASM *)pc->data;
892 
893   PetscFunctionBegin;
894   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
895 
896   if (n_local) *n_local = osm->n_local_true;
897   if (first_local) {
898     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
899     *first_local -= osm->n_local_true;
900   }
901   if (ksp) *ksp = osm->ksp;
902   PetscFunctionReturn(PETSC_SUCCESS);
903 }
904 
905 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
906 {
907   PC_ASM *osm = (PC_ASM *)pc->data;
908 
909   PetscFunctionBegin;
910   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
911   PetscAssertPointer(sub_mat_type, 2);
912   *sub_mat_type = osm->sub_mat_type;
913   PetscFunctionReturn(PETSC_SUCCESS);
914 }
915 
916 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
917 {
918   PC_ASM *osm = (PC_ASM *)pc->data;
919 
920   PetscFunctionBegin;
921   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
922   PetscCall(PetscFree(osm->sub_mat_type));
923   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
924   PetscFunctionReturn(PETSC_SUCCESS);
925 }
926 
927 /*@
928   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
929 
930   Collective
931 
932   Input Parameters:
933 + pc       - the preconditioner context
934 . n        - the number of subdomains for this processor (default value = 1)
935 . is       - the index set that defines the subdomains for this processor (or `NULL` for PETSc to determine subdomains)
936              the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call
937 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless `PCASMType` is `PC_ASM_RESTRICT`
938              (or `NULL` to not provide these). The values of the `is_local` array are copied so you can free the array
939              (not the `IS` in the array) after this call
940 
941   Options Database Key:
942 . -pc_asm_local_blocks <blks> - Sets number of local blocks
943 
944   Level: advanced
945 
946   Notes:
947   The `IS` numbering is in the parallel, global numbering of the vector for both `is` and `is_local`
948 
949   By default the `PCASM` preconditioner uses 1 block per processor.
950 
951   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
952 
953   If `is_local` is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the `is_local` region is interpolated
954   back to form the global solution (this is the standard restricted additive Schwarz method, RASM)
955 
956   If `is_local` is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
957   no code to handle that case.
958 
959 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
960           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
961 @*/
962 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
963 {
964   PetscFunctionBegin;
965   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
966   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
967   PetscFunctionReturn(PETSC_SUCCESS);
968 }
969 
970 /*@
971   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
972   additive Schwarz preconditioner, `PCASM`.
973 
974   Collective, all MPI ranks must pass in the same array of `IS`
975 
976   Input Parameters:
977 + pc       - the preconditioner context
978 . N        - the number of subdomains for all processors
979 . is       - the index sets that define the subdomains for all processors (or `NULL` to ask PETSc to determine the subdomains)
980              the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call
981 - is_local - the index sets that define the local part of the subdomains for this processor (or `NULL` to not provide this information)
982              The values of the `is_local` array are copied so you can free the array (not the `IS` in the array) after this call
983 
984   Options Database Key:
985 . -pc_asm_blocks <blks> - Sets total blocks
986 
987   Level: advanced
988 
989   Notes:
990   Currently you cannot use this to set the actual subdomains with the argument `is` or `is_local`.
991 
992   By default the `PCASM` preconditioner uses 1 block per processor.
993 
994   These index sets cannot be destroyed until after completion of the
995   linear solves for which the `PCASM` preconditioner is being used.
996 
997   Use `PCASMSetLocalSubdomains()` to set local subdomains.
998 
999   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
1000 
1001 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1002           `PCASMCreateSubdomains2D()`, `PCGASM`
1003 @*/
1004 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
1005 {
1006   PetscFunctionBegin;
1007   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1008   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
1009   PetscFunctionReturn(PETSC_SUCCESS);
1010 }
1011 
1012 /*@
1013   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1014   additive Schwarz preconditioner, `PCASM`.
1015 
1016   Logically Collective
1017 
1018   Input Parameters:
1019 + pc  - the preconditioner context
1020 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1021 
1022   Options Database Key:
1023 . -pc_asm_overlap <ovl> - Sets overlap
1024 
1025   Level: intermediate
1026 
1027   Notes:
1028   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1029   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1030   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1031 
1032   The overlap defaults to 1, so if one desires that no additional
1033   overlap be computed beyond what may have been set with a call to
1034   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1035   must be set to be 0.  In particular, if one does not explicitly set
1036   the subdomains an application code, then all overlap would be computed
1037   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1038   variant that is equivalent to the block Jacobi preconditioner.
1039 
1040   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1041   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1042 
1043   One can define initial index sets with any overlap via
1044   `PCASMSetLocalSubdomains()`; the routine
1045   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1046   if desired.
1047 
1048 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1049           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1050 @*/
1051 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1052 {
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1055   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1056   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1057   PetscFunctionReturn(PETSC_SUCCESS);
1058 }
1059 
1060 /*@
1061   PCASMSetType - Sets the type of restriction and interpolation used
1062   for local problems in the additive Schwarz method, `PCASM`.
1063 
1064   Logically Collective
1065 
1066   Input Parameters:
1067 + pc   - the preconditioner context
1068 - type - variant of `PCASM`, one of
1069 .vb
1070       PC_ASM_BASIC       - full interpolation and restriction
1071       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1072       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1073       PC_ASM_NONE        - local processor restriction and interpolation
1074 .ve
1075 
1076   Options Database Key:
1077 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1078 
1079   Level: intermediate
1080 
1081   Note:
1082   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1083   to limit the local processor interpolation
1084 
1085 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1086           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1087 @*/
1088 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1089 {
1090   PetscFunctionBegin;
1091   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1092   PetscValidLogicalCollectiveEnum(pc, type, 2);
1093   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1094   PetscFunctionReturn(PETSC_SUCCESS);
1095 }
1096 
1097 /*@
1098   PCASMGetType - Gets the type of restriction and interpolation used
1099   for local problems in the additive Schwarz method, `PCASM`.
1100 
1101   Logically Collective
1102 
1103   Input Parameter:
1104 . pc - the preconditioner context
1105 
1106   Output Parameter:
1107 . type - variant of `PCASM`, one of
1108 .vb
1109       PC_ASM_BASIC       - full interpolation and restriction
1110       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1111       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1112       PC_ASM_NONE        - local processor restriction and interpolation
1113 .ve
1114 
1115   Options Database Key:
1116 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1117 
1118   Level: intermediate
1119 
1120 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1121           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1122 @*/
1123 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1124 {
1125   PetscFunctionBegin;
1126   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1127   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1128   PetscFunctionReturn(PETSC_SUCCESS);
1129 }
1130 
1131 /*@
1132   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1133 
1134   Logically Collective
1135 
1136   Input Parameters:
1137 + pc   - the preconditioner context
1138 - type - type of composition, one of
1139 .vb
1140   PC_COMPOSITE_ADDITIVE       - local additive combination
1141   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1142 .ve
1143 
1144   Options Database Key:
1145 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1146 
1147   Level: intermediate
1148 
1149 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1150 @*/
1151 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1152 {
1153   PetscFunctionBegin;
1154   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1155   PetscValidLogicalCollectiveEnum(pc, type, 2);
1156   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1157   PetscFunctionReturn(PETSC_SUCCESS);
1158 }
1159 
1160 /*@
1161   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1162 
1163   Logically Collective
1164 
1165   Input Parameter:
1166 . pc - the preconditioner context
1167 
1168   Output Parameter:
1169 . type - type of composition, one of
1170 .vb
1171   PC_COMPOSITE_ADDITIVE       - local additive combination
1172   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1173 .ve
1174 
1175   Options Database Key:
1176 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1177 
1178   Level: intermediate
1179 
1180 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMType`, `PCCompositeType`
1181 @*/
1182 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1183 {
1184   PetscFunctionBegin;
1185   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1186   PetscAssertPointer(type, 2);
1187   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1188   PetscFunctionReturn(PETSC_SUCCESS);
1189 }
1190 
1191 /*@
1192   PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1193 
1194   Logically Collective
1195 
1196   Input Parameters:
1197 + pc     - the preconditioner context
1198 - doSort - sort the subdomain indices
1199 
1200   Level: intermediate
1201 
1202 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1203           `PCASMCreateSubdomains2D()`
1204 @*/
1205 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1206 {
1207   PetscFunctionBegin;
1208   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1209   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1210   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1211   PetscFunctionReturn(PETSC_SUCCESS);
1212 }
1213 
1214 /*@C
1215   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1216   this processor.
1217 
1218   Collective iff first_local is requested
1219 
1220   Input Parameter:
1221 . pc - the preconditioner context
1222 
1223   Output Parameters:
1224 + n_local     - the number of blocks on this processor or `NULL`
1225 . first_local - the global number of the first block on this processor or `NULL`, all processors must request or all must pass `NULL`
1226 - ksp         - the array of `KSP` contexts
1227 
1228   Level: advanced
1229 
1230   Notes:
1231   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1232 
1233   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1234 
1235 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1236           `PCASMCreateSubdomains2D()`,
1237 @*/
1238 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1239 {
1240   PetscFunctionBegin;
1241   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1242   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1243   PetscFunctionReturn(PETSC_SUCCESS);
1244 }
1245 
1246 /*MC
1247    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1248            its own `KSP` object, {cite}`dryja1987additive` and {cite}`1sbg`
1249 
1250    Options Database Keys:
1251 +  -pc_asm_blocks <blks>                          - Sets total blocks. Defaults to one block per MPI process.
1252 .  -pc_asm_overlap <ovl>                          - Sets overlap
1253 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1254 .  -pc_asm_dm_subdomains <bool>                   - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`
1255 -  -pc_asm_local_type [additive, multiplicative]  - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1256 
1257    Level: beginner
1258 
1259    Notes:
1260    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1261    will get a different convergence rate due to the default option of `-pc_asm_type restrict`. Use
1262    `-pc_asm_type basic` to get the same convergence behavior
1263 
1264    Each processor can have one or more blocks, but a block cannot be shared by more
1265    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1266 
1267    To set options on the solvers for each block append `-sub_` to all the `KSP`, and `PC`
1268    options database keys. For example, `-sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly`
1269 
1270    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1271    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1272 
1273    If the `PC` has an associated `DM`, then, by default, `DMCreateDomainDecomposition()` is used to create the subdomains
1274 
1275 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1276           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1277           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1278 M*/
1279 
1280 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1281 {
1282   PC_ASM *osm;
1283 
1284   PetscFunctionBegin;
1285   PetscCall(PetscNew(&osm));
1286 
1287   osm->n             = PETSC_DECIDE;
1288   osm->n_local       = 0;
1289   osm->n_local_true  = PETSC_DECIDE;
1290   osm->overlap       = 1;
1291   osm->ksp           = NULL;
1292   osm->restriction   = NULL;
1293   osm->lprolongation = NULL;
1294   osm->lrestriction  = NULL;
1295   osm->x             = NULL;
1296   osm->y             = NULL;
1297   osm->is            = NULL;
1298   osm->is_local      = NULL;
1299   osm->mat           = NULL;
1300   osm->pmat          = NULL;
1301   osm->type          = PC_ASM_RESTRICT;
1302   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1303   osm->sort_indices  = PETSC_TRUE;
1304   osm->dm_subdomains = PETSC_FALSE;
1305   osm->sub_mat_type  = NULL;
1306 
1307   pc->data                   = (void *)osm;
1308   pc->ops->apply             = PCApply_ASM;
1309   pc->ops->matapply          = PCMatApply_ASM;
1310   pc->ops->applytranspose    = PCApplyTranspose_ASM;
1311   pc->ops->matapplytranspose = PCMatApplyTranspose_ASM;
1312   pc->ops->setup             = PCSetUp_ASM;
1313   pc->ops->reset             = PCReset_ASM;
1314   pc->ops->destroy           = PCDestroy_ASM;
1315   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1316   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1317   pc->ops->view              = PCView_ASM;
1318   pc->ops->applyrichardson   = NULL;
1319 
1320   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1321   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1322   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1323   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1324   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1325   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1326   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1327   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1328   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1329   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1330   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1331   PetscFunctionReturn(PETSC_SUCCESS);
1332 }
1333 
1334 /*@C
1335   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1336   preconditioner, `PCASM`,  for any problem on a general grid.
1337 
1338   Collective
1339 
1340   Input Parameters:
1341 + A - The global matrix operator
1342 - n - the number of local blocks
1343 
1344   Output Parameter:
1345 . outis - the array of index sets defining the subdomains
1346 
1347   Level: advanced
1348 
1349   Note:
1350   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1351   from these if you use `PCASMSetLocalSubdomains()`
1352 
1353 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1354 @*/
1355 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1356 {
1357   MatPartitioning mpart;
1358   const char     *prefix;
1359   PetscInt        i, j, rstart, rend, bs;
1360   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1361   Mat             Ad = NULL, adj;
1362   IS              ispart, isnumb, *is;
1363 
1364   PetscFunctionBegin;
1365   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1366   PetscAssertPointer(outis, 3);
1367   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1368 
1369   /* Get prefix, row distribution, and block size */
1370   PetscCall(MatGetOptionsPrefix(A, &prefix));
1371   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1372   PetscCall(MatGetBlockSize(A, &bs));
1373   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1374 
1375   /* Get diagonal block from matrix if possible */
1376   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1377   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1378   if (Ad) {
1379     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1380     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1381   }
1382   if (Ad && n > 1) {
1383     PetscBool match, done;
1384     /* Try to setup a good matrix partitioning if available */
1385     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1386     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1387     PetscCall(MatPartitioningSetFromOptions(mpart));
1388     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1389     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1390     if (!match) { /* assume a "good" partitioner is available */
1391       PetscInt        na;
1392       const PetscInt *ia, *ja;
1393       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1394       if (done) {
1395         /* Build adjacency matrix by hand. Unfortunately a call to
1396            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1397            remove the block-aij structure and we cannot expect
1398            MatPartitioning to split vertices as we need */
1399         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1400         const PetscInt *row;
1401         nnz = 0;
1402         for (i = 0; i < na; i++) { /* count number of nonzeros */
1403           len = ia[i + 1] - ia[i];
1404           row = ja + ia[i];
1405           for (j = 0; j < len; j++) {
1406             if (row[j] == i) { /* don't count diagonal */
1407               len--;
1408               break;
1409             }
1410           }
1411           nnz += len;
1412         }
1413         PetscCall(PetscMalloc1(na + 1, &iia));
1414         PetscCall(PetscMalloc1(nnz, &jja));
1415         nnz    = 0;
1416         iia[0] = 0;
1417         for (i = 0; i < na; i++) { /* fill adjacency */
1418           cnt = 0;
1419           len = ia[i + 1] - ia[i];
1420           row = ja + ia[i];
1421           for (j = 0; j < len; j++) {
1422             if (row[j] != i) { /* if not diagonal */
1423               jja[nnz + cnt++] = row[j];
1424             }
1425           }
1426           nnz += cnt;
1427           iia[i + 1] = nnz;
1428         }
1429         /* Partitioning of the adjacency matrix */
1430         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1431         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1432         PetscCall(MatPartitioningSetNParts(mpart, n));
1433         PetscCall(MatPartitioningApply(mpart, &ispart));
1434         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1435         PetscCall(MatDestroy(&adj));
1436         foundpart = PETSC_TRUE;
1437       }
1438       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1439     }
1440     PetscCall(MatPartitioningDestroy(&mpart));
1441   }
1442 
1443   PetscCall(PetscMalloc1(n, &is));
1444   *outis = is;
1445 
1446   if (!foundpart) {
1447     /* Partitioning by contiguous chunks of rows */
1448 
1449     PetscInt mbs   = (rend - rstart) / bs;
1450     PetscInt start = rstart;
1451     for (i = 0; i < n; i++) {
1452       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1453       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1454       start += count;
1455     }
1456 
1457   } else {
1458     /* Partitioning by adjacency of diagonal block  */
1459 
1460     const PetscInt *numbering;
1461     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1462     /* Get node count in each partition */
1463     PetscCall(PetscMalloc1(n, &count));
1464     PetscCall(ISPartitioningCount(ispart, n, count));
1465     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1466       for (i = 0; i < n; i++) count[i] *= bs;
1467     }
1468     /* Build indices from node numbering */
1469     PetscCall(ISGetLocalSize(isnumb, &nidx));
1470     PetscCall(PetscMalloc1(nidx, &indices));
1471     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1472     PetscCall(ISGetIndices(isnumb, &numbering));
1473     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1474     PetscCall(ISRestoreIndices(isnumb, &numbering));
1475     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1476       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1477       for (i = 0; i < nidx; i++) {
1478         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1479       }
1480       PetscCall(PetscFree(indices));
1481       nidx *= bs;
1482       indices = newidx;
1483     }
1484     /* Shift to get global indices */
1485     for (i = 0; i < nidx; i++) indices[i] += rstart;
1486 
1487     /* Build the index sets for each block */
1488     for (i = 0; i < n; i++) {
1489       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1490       PetscCall(ISSort(is[i]));
1491       start += count[i];
1492     }
1493 
1494     PetscCall(PetscFree(count));
1495     PetscCall(PetscFree(indices));
1496     PetscCall(ISDestroy(&isnumb));
1497     PetscCall(ISDestroy(&ispart));
1498   }
1499   PetscFunctionReturn(PETSC_SUCCESS);
1500 }
1501 
1502 /*@C
1503   PCASMDestroySubdomains - Destroys the index sets created with
1504   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1505 
1506   Collective
1507 
1508   Input Parameters:
1509 + n        - the number of index sets
1510 . is       - the array of index sets
1511 - is_local - the array of local index sets, can be `NULL`
1512 
1513   Level: advanced
1514 
1515   Developer Note:
1516   The `IS` arguments should be a *[]
1517 
1518 .seealso: [](ch_ksp), `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1519 @*/
1520 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS *is[], IS *is_local[])
1521 {
1522   PetscInt i;
1523 
1524   PetscFunctionBegin;
1525   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1526   if (*is) {
1527     PetscAssertPointer(*is, 2);
1528     for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*is)[i]));
1529     PetscCall(PetscFree(*is));
1530   }
1531   if (is_local && *is_local) {
1532     PetscAssertPointer(*is_local, 3);
1533     for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*is_local)[i]));
1534     PetscCall(PetscFree(*is_local));
1535   }
1536   PetscFunctionReturn(PETSC_SUCCESS);
1537 }
1538 
1539 /*@C
1540   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1541   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1542 
1543   Not Collective
1544 
1545   Input Parameters:
1546 + m       - the number of mesh points in the x direction
1547 . n       - the number of mesh points in the y direction
1548 . M       - the number of subdomains in the x direction
1549 . N       - the number of subdomains in the y direction
1550 . dof     - degrees of freedom per node
1551 - overlap - overlap in mesh lines
1552 
1553   Output Parameters:
1554 + Nsub     - the number of subdomains created
1555 . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1556 - is_local - array of index sets defining non-overlapping subdomains
1557 
1558   Level: advanced
1559 
1560   Note:
1561   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1562   preconditioners.  More general related routines are
1563   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1564 
1565 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1566           `PCASMSetOverlap()`
1567 @*/
1568 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS *is[], IS *is_local[])
1569 {
1570   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1571   PetscInt nidx, *idx, loc, ii, jj, count;
1572 
1573   PetscFunctionBegin;
1574   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1575 
1576   *Nsub = N * M;
1577   PetscCall(PetscMalloc1(*Nsub, is));
1578   PetscCall(PetscMalloc1(*Nsub, is_local));
1579   ystart    = 0;
1580   loc_outer = 0;
1581   for (i = 0; i < N; i++) {
1582     height = n / N + ((n % N) > i); /* height of subdomain */
1583     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1584     yleft = ystart - overlap;
1585     if (yleft < 0) yleft = 0;
1586     yright = ystart + height + overlap;
1587     if (yright > n) yright = n;
1588     xstart = 0;
1589     for (j = 0; j < M; j++) {
1590       width = m / M + ((m % M) > j); /* width of subdomain */
1591       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1592       xleft = xstart - overlap;
1593       if (xleft < 0) xleft = 0;
1594       xright = xstart + width + overlap;
1595       if (xright > m) xright = m;
1596       nidx = (xright - xleft) * (yright - yleft);
1597       PetscCall(PetscMalloc1(nidx, &idx));
1598       loc = 0;
1599       for (ii = yleft; ii < yright; ii++) {
1600         count = m * ii + xleft;
1601         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1602       }
1603       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1604       if (overlap == 0) {
1605         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1606 
1607         (*is_local)[loc_outer] = (*is)[loc_outer];
1608       } else {
1609         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1610           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1611         }
1612         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1613       }
1614       PetscCall(PetscFree(idx));
1615       xstart += width;
1616       loc_outer++;
1617     }
1618     ystart += height;
1619   }
1620   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1621   PetscFunctionReturn(PETSC_SUCCESS);
1622 }
1623 
1624 /*@C
1625   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1626   only) for the additive Schwarz preconditioner, `PCASM`.
1627 
1628   Not Collective
1629 
1630   Input Parameter:
1631 . pc - the preconditioner context
1632 
1633   Output Parameters:
1634 + n        - if requested, the number of subdomains for this processor (default value = 1)
1635 . is       - if requested, the index sets that define the subdomains for this processor
1636 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)
1637 
1638   Level: advanced
1639 
1640   Note:
1641   The `IS` numbering is in the parallel, global numbering of the vector.
1642 
1643 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1644           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1645 @*/
1646 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1647 {
1648   PC_ASM   *osm = (PC_ASM *)pc->data;
1649   PetscBool match;
1650 
1651   PetscFunctionBegin;
1652   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1653   if (n) PetscAssertPointer(n, 2);
1654   if (is) PetscAssertPointer(is, 3);
1655   if (is_local) PetscAssertPointer(is_local, 4);
1656   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1657   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1658   if (n) *n = osm->n_local_true;
1659   if (is) *is = osm->is;
1660   if (is_local) *is_local = osm->is_local;
1661   PetscFunctionReturn(PETSC_SUCCESS);
1662 }
1663 
1664 /*@C
1665   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1666   only) for the additive Schwarz preconditioner, `PCASM`.
1667 
1668   Not Collective
1669 
1670   Input Parameter:
1671 . pc - the preconditioner context
1672 
1673   Output Parameters:
1674 + n   - if requested, the number of matrices for this processor (default value = 1)
1675 - mat - if requested, the matrices
1676 
1677   Level: advanced
1678 
1679   Notes:
1680   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1681 
1682   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1683 
1684 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1685           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1686 @*/
1687 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1688 {
1689   PC_ASM   *osm;
1690   PetscBool match;
1691 
1692   PetscFunctionBegin;
1693   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1694   if (n) PetscAssertPointer(n, 2);
1695   if (mat) PetscAssertPointer(mat, 3);
1696   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1697   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1698   if (!match) {
1699     if (n) *n = 0;
1700     if (mat) *mat = NULL;
1701   } else {
1702     osm = (PC_ASM *)pc->data;
1703     if (n) *n = osm->n_local_true;
1704     if (mat) *mat = osm->pmat;
1705   }
1706   PetscFunctionReturn(PETSC_SUCCESS);
1707 }
1708 
1709 /*@
1710   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1711 
1712   Logically Collective
1713 
1714   Input Parameters:
1715 + pc  - the preconditioner
1716 - flg - boolean indicating whether to use subdomains defined by the `DM`
1717 
1718   Options Database Key:
1719 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`
1720 
1721   Level: intermediate
1722 
1723   Note:
1724   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1725   so setting either of the first two effectively turns the latter off.
1726 
1727   Developer Note:
1728   This should be `PCASMSetUseDMSubdomains()`, similarly for the options database key
1729 
1730 .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1731           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1732 @*/
1733 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1734 {
1735   PC_ASM   *osm = (PC_ASM *)pc->data;
1736   PetscBool match;
1737 
1738   PetscFunctionBegin;
1739   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1740   PetscValidLogicalCollectiveBool(pc, flg, 2);
1741   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1742   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1743   if (match) osm->dm_subdomains = flg;
1744   PetscFunctionReturn(PETSC_SUCCESS);
1745 }
1746 
1747 /*@
1748   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1749 
1750   Not Collective
1751 
1752   Input Parameter:
1753 . pc - the preconditioner
1754 
1755   Output Parameter:
1756 . flg - boolean indicating whether to use subdomains defined by the `DM`
1757 
1758   Level: intermediate
1759 
1760   Developer Note:
1761   This should be `PCASMSetUseDMSubdomains()`
1762 
1763 .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1764           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1765 @*/
1766 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1767 {
1768   PC_ASM   *osm = (PC_ASM *)pc->data;
1769   PetscBool match;
1770 
1771   PetscFunctionBegin;
1772   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1773   PetscAssertPointer(flg, 2);
1774   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1775   if (match) *flg = osm->dm_subdomains;
1776   else *flg = PETSC_FALSE;
1777   PetscFunctionReturn(PETSC_SUCCESS);
1778 }
1779 
1780 /*@
1781   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1782 
1783   Not Collective
1784 
1785   Input Parameter:
1786 . pc - the `PC`
1787 
1788   Output Parameter:
1789 . sub_mat_type - name of matrix type
1790 
1791   Level: advanced
1792 
1793 .seealso: [](ch_ksp), `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1794 @*/
1795 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1796 {
1797   PetscFunctionBegin;
1798   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1799   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1800   PetscFunctionReturn(PETSC_SUCCESS);
1801 }
1802 
1803 /*@
1804   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1805 
1806   Collective
1807 
1808   Input Parameters:
1809 + pc           - the `PC` object
1810 - sub_mat_type - the `MatType`
1811 
1812   Options Database Key:
1813 . -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1814    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1815 
1816   Note:
1817   See `MatType` for available types
1818 
1819   Level: advanced
1820 
1821 .seealso: [](ch_ksp), `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1822 @*/
1823 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1824 {
1825   PetscFunctionBegin;
1826   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1827   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1828   PetscFunctionReturn(PETSC_SUCCESS);
1829 }
1830