xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision f8b9f887efa5534f423807e37a1fe2e87d779bac)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include <petsc/private/matimpl.h>
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         iascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(sviewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(sviewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
75   }
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc)
80 {
81   PC_ASM         *osm = (PC_ASM *)pc->data;
82   const char     *prefix;
83   char            fname[PETSC_MAX_PATH_LEN + 1];
84   PetscViewer     viewer, sviewer;
85   char           *s;
86   PetscInt        i, j, nidx;
87   const PetscInt *idx;
88   PetscMPIInt     rank, size;
89 
90   PetscFunctionBegin;
91   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
92   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
93   PetscCall(PCGetOptionsPrefix(pc, &prefix));
94   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
95   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
96   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
97   for (i = 0; i < osm->n_local; i++) {
98     if (i < osm->n_local_true) {
99       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
100       PetscCall(ISGetIndices(osm->is[i], &idx));
101       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
102 #define len 16 * (nidx + 1) + 512
103       PetscCall(PetscMalloc1(len, &s));
104       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
105 #undef len
106       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
107       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
108       PetscCall(ISRestoreIndices(osm->is[i], &idx));
109       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
110       PetscCall(PetscViewerDestroy(&sviewer));
111       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
112       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
113       PetscCall(PetscViewerFlush(viewer));
114       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
115       PetscCall(PetscFree(s));
116       if (osm->is_local) {
117         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
118 #define len 16 * (nidx + 1) + 512
119         PetscCall(PetscMalloc1(len, &s));
120         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
121 #undef len
122         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
123         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
124         PetscCall(ISGetIndices(osm->is_local[i], &idx));
125         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
126         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
127         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
128         PetscCall(PetscViewerDestroy(&sviewer));
129         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
130         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
131         PetscCall(PetscViewerFlush(viewer));
132         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
133         PetscCall(PetscFree(s));
134       }
135     } else {
136       /* Participate in collective viewer calls. */
137       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
138       PetscCall(PetscViewerFlush(viewer));
139       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
140       /* Assume either all ranks have is_local or none do. */
141       if (osm->is_local) {
142         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
143         PetscCall(PetscViewerFlush(viewer));
144         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
145       }
146     }
147   }
148   PetscCall(PetscViewerFlush(viewer));
149   PetscCall(PetscViewerDestroy(&viewer));
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode PCSetUp_ASM(PC pc)
154 {
155   PC_ASM       *osm = (PC_ASM *)pc->data;
156   PetscBool     flg;
157   PetscInt      i, m, m_local;
158   MatReuse      scall = MAT_REUSE_MATRIX;
159   IS            isl;
160   KSP           ksp;
161   PC            subpc;
162   const char   *prefix, *pprefix;
163   Vec           vec;
164   DM           *domain_dm = NULL;
165   MatNullSpace *nullsp    = NULL;
166 
167   PetscFunctionBegin;
168   if (!pc->setupcalled) {
169     PetscInt m;
170 
171     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172     if (osm->n_local_true == PETSC_DECIDE) {
173       /* no subdomains given */
174       /* try pc->dm first, if allowed */
175       if (osm->dm_subdomains && pc->dm) {
176         PetscInt num_domains, d;
177         char   **domain_names;
178         IS      *inner_domain_is, *outer_domain_is;
179         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181                               A future improvement of this code might allow one to use
182                               DM-defined subdomains and also increase the overlap,
183                               but that is not currently supported */
184         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185         for (d = 0; d < num_domains; ++d) {
186           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189         }
190         PetscCall(PetscFree(domain_names));
191         PetscCall(PetscFree(inner_domain_is));
192         PetscCall(PetscFree(outer_domain_is));
193       }
194       if (osm->n_local_true == PETSC_DECIDE) {
195         /* still no subdomains; use one subdomain per processor */
196         osm->n_local_true = 1;
197       }
198     }
199     { /* determine the global and max number of subdomains */
200       struct {
201         PetscInt max, sum;
202       } inwork, outwork;
203       PetscMPIInt size;
204 
205       inwork.max = osm->n_local_true;
206       inwork.sum = osm->n_local_true;
207       PetscCallMPI(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208       osm->n_local = outwork.max;
209       osm->n       = outwork.sum;
210 
211       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212       if (outwork.max == 1 && outwork.sum == size) {
213         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215       }
216     }
217     if (!osm->is) { /* create the index sets */
218       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219     }
220     if (osm->n_local_true > 1 && !osm->is_local) {
221       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222       for (i = 0; i < osm->n_local_true; i++) {
223         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226         } else {
227           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228           osm->is_local[i] = osm->is[i];
229         }
230       }
231     }
232     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233     if (osm->overlap > 0) {
234       /* Extend the "overlapping" regions by a number of steps */
235       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236     }
237     if (osm->sort_indices) {
238       for (i = 0; i < osm->n_local_true; i++) {
239         PetscCall(ISSort(osm->is[i]));
240         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241       }
242     }
243     flg = PETSC_FALSE;
244     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246     if (!osm->ksp) {
247       /* Create the local solvers */
248       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250       for (i = 0; i < osm->n_local_true; i++) {
251         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
253         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
254         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
255         PetscCall(KSPSetType(ksp, KSPPREONLY));
256         PetscCall(KSPGetPC(ksp, &subpc));
257         PetscCall(PCGetOptionsPrefix(pc, &prefix));
258         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
259         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
260         if (domain_dm) {
261           PetscCall(KSPSetDM(ksp, domain_dm[i]));
262           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
263           PetscCall(DMDestroy(&domain_dm[i]));
264         }
265         osm->ksp[i] = ksp;
266       }
267       if (domain_dm) PetscCall(PetscFree(domain_dm));
268     }
269 
270     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
271     PetscCall(ISSortRemoveDups(osm->lis));
272     PetscCall(ISGetLocalSize(osm->lis, &m));
273 
274     scall = MAT_INITIAL_MATRIX;
275   } else {
276     /*
277        Destroy the blocks from the previous iteration
278     */
279     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
280       PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
281       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
282       scall = MAT_INITIAL_MATRIX;
283     }
284   }
285 
286   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
287   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
288     PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
289     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
290     scall = MAT_INITIAL_MATRIX;
291   }
292 
293   /*
294      Extract out the submatrices
295   */
296   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
297   if (scall == MAT_INITIAL_MATRIX) {
298     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
299     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
300     if (nullsp) PetscCall(MatRestoreNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
301   }
302 
303   /* Convert the types of the submatrices (if needbe) */
304   if (osm->sub_mat_type) {
305     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &osm->pmat[i]));
306   }
307 
308   if (!pc->setupcalled) {
309     VecType vtype;
310 
311     /* Create the local work vectors (from the local matrices) and scatter contexts */
312     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
313 
314     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
315     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
316     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
317     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
318     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
319 
320     PetscCall(ISGetLocalSize(osm->lis, &m));
321     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
322     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
323     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
324     PetscCall(VecSetSizes(osm->lx, m, m));
325     PetscCall(VecSetType(osm->lx, vtype));
326     PetscCall(VecDuplicate(osm->lx, &osm->ly));
327     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
328     PetscCall(ISDestroy(&isl));
329 
330     for (i = 0; i < osm->n_local_true; ++i) {
331       ISLocalToGlobalMapping ltog;
332       IS                     isll;
333       const PetscInt        *idx_is;
334       PetscInt              *idx_lis, nout;
335 
336       PetscCall(ISGetLocalSize(osm->is[i], &m));
337       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
338       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
339 
340       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
341       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
342       PetscCall(ISGetLocalSize(osm->is[i], &m));
343       PetscCall(ISGetIndices(osm->is[i], &idx_is));
344       PetscCall(PetscMalloc1(m, &idx_lis));
345       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
346       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
347       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
348       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
349       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
350       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
351       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
352       PetscCall(ISDestroy(&isll));
353       PetscCall(ISDestroy(&isl));
354       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the non-overlapping is_local[i] entries */
355         ISLocalToGlobalMapping ltog;
356         IS                     isll, isll_local;
357         const PetscInt        *idx_local;
358         PetscInt              *idx1, *idx2, nout;
359 
360         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
361         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
362 
363         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
364         PetscCall(PetscMalloc1(m_local, &idx1));
365         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
366         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
367         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
368         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
369 
370         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
371         PetscCall(PetscMalloc1(m_local, &idx2));
372         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
373         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
374         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
375         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
376 
377         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
378         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
379 
380         PetscCall(ISDestroy(&isll));
381         PetscCall(ISDestroy(&isll_local));
382       }
383     }
384     PetscCall(VecDestroy(&vec));
385   }
386 
387   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
388     IS      *cis;
389     PetscInt c;
390 
391     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
392     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
393     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
394     PetscCall(PetscFree(cis));
395   }
396 
397   /* Return control to the user so that the submatrices can be modified (e.g., to apply
398      different boundary conditions for the submatrices than for the global problem) */
399   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
400 
401   /*
402      Loop over subdomains putting them into local ksp
403   */
404   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
405   for (i = 0; i < osm->n_local_true; i++) {
406     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
407     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
408     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
409   }
410   PetscFunctionReturn(PETSC_SUCCESS);
411 }
412 
413 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
414 {
415   PC_ASM            *osm = (PC_ASM *)pc->data;
416   PetscInt           i;
417   KSPConvergedReason reason;
418 
419   PetscFunctionBegin;
420   for (i = 0; i < osm->n_local_true; i++) {
421     PetscCall(KSPSetUp(osm->ksp[i]));
422     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
423     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
424   }
425   PetscFunctionReturn(PETSC_SUCCESS);
426 }
427 
428 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
429 {
430   PC_ASM     *osm = (PC_ASM *)pc->data;
431   PetscInt    i, n_local_true = osm->n_local_true;
432   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
433 
434   PetscFunctionBegin;
435   /*
436      support for limiting the restriction or interpolation to only local
437      subdomain values (leaving the other values 0).
438   */
439   if (!(osm->type & PC_ASM_RESTRICT)) {
440     forward = SCATTER_FORWARD_LOCAL;
441     /* have to zero the work RHS since scatter may leave some slots empty */
442     PetscCall(VecSet(osm->lx, 0.0));
443   }
444   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
445 
446   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
447   /* zero the global and the local solutions */
448   PetscCall(VecSet(y, 0.0));
449   PetscCall(VecSet(osm->ly, 0.0));
450 
451   /* copy the global RHS to local RHS including the ghost nodes */
452   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
453   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
454 
455   /* restrict local RHS to the overlapping 0-block RHS */
456   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
457   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
458 
459   /* do the local solves */
460   for (i = 0; i < n_local_true; ++i) {
461     /* solve the overlapping i-block */
462     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
463     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
464     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
465     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
466 
467     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
468       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
469       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
470     } else { /* interpolate the overlapping i-block solution to the local solution */
471       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
472       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
473     }
474 
475     if (i < n_local_true - 1) {
476       /* restrict local RHS to the overlapping (i+1)-block RHS */
477       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
478       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
479 
480       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
481         /* update the overlapping (i+1)-block RHS using the current local solution */
482         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
483         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
484       }
485     }
486   }
487   /* add the local solution to the global solution including the ghost nodes */
488   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
489   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
490   PetscFunctionReturn(PETSC_SUCCESS);
491 }
492 
493 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
494 {
495   PC_ASM     *osm = (PC_ASM *)pc->data;
496   Mat         Z, W;
497   Vec         x;
498   PetscInt    i, m, N;
499   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
500 
501   PetscFunctionBegin;
502   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
503   /*
504      support for limiting the restriction or interpolation to only local
505      subdomain values (leaving the other values 0).
506   */
507   if (!(osm->type & PC_ASM_RESTRICT)) {
508     forward = SCATTER_FORWARD_LOCAL;
509     /* have to zero the work RHS since scatter may leave some slots empty */
510     PetscCall(VecSet(osm->lx, 0.0));
511   }
512   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
513   PetscCall(VecGetLocalSize(osm->x[0], &m));
514   PetscCall(MatGetSize(X, NULL, &N));
515   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
516 
517   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
518   /* zero the global and the local solutions */
519   PetscCall(MatZeroEntries(Y));
520   PetscCall(VecSet(osm->ly, 0.0));
521 
522   for (i = 0; i < N; ++i) {
523     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
524     /* copy the global RHS to local RHS including the ghost nodes */
525     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
526     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
527     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
528 
529     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
530     /* restrict local RHS to the overlapping 0-block RHS */
531     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
532     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
533     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
534   }
535   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
536   /* solve the overlapping 0-block */
537   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
538   PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
539   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
540   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
541   PetscCall(MatDestroy(&Z));
542 
543   for (i = 0; i < N; ++i) {
544     PetscCall(VecSet(osm->ly, 0.0));
545     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
546     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
547       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
548       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
549     } else { /* interpolate the overlapping 0-block solution to the local solution */
550       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
551       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
552     }
553     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
554 
555     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
556     /* add the local solution to the global solution including the ghost nodes */
557     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
558     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
559     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
560   }
561   PetscCall(MatDestroy(&W));
562   PetscFunctionReturn(PETSC_SUCCESS);
563 }
564 
565 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
566 {
567   PC_ASM     *osm = (PC_ASM *)pc->data;
568   PetscInt    i, n_local_true = osm->n_local_true;
569   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
570 
571   PetscFunctionBegin;
572   PetscCheck(osm->n_local_true <= 1 || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
573   /*
574      Support for limiting the restriction or interpolation to only local
575      subdomain values (leaving the other values 0).
576 
577      Note: these are reversed from the PCApply_ASM() because we are applying the
578      transpose of the three terms
579   */
580 
581   if (!(osm->type & PC_ASM_INTERPOLATE)) {
582     forward = SCATTER_FORWARD_LOCAL;
583     /* have to zero the work RHS since scatter may leave some slots empty */
584     PetscCall(VecSet(osm->lx, 0.0));
585   }
586   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
587 
588   /* zero the global and the local solutions */
589   PetscCall(VecSet(y, 0.0));
590   PetscCall(VecSet(osm->ly, 0.0));
591 
592   /* Copy the global RHS to local RHS including the ghost nodes */
593   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
594   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
595 
596   /* Restrict local RHS to the overlapping 0-block RHS */
597   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
598   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
599 
600   /* do the local solves */
601   for (i = 0; i < n_local_true; ++i) {
602     /* solve the overlapping i-block */
603     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
604     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
605     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
606     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
607 
608     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
609       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
610       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
611     } else { /* interpolate the overlapping i-block solution to the local solution */
612       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
613       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
614     }
615 
616     if (i < n_local_true - 1) {
617       /* Restrict local RHS to the overlapping (i+1)-block RHS */
618       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
619       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
620     }
621   }
622   /* Add the local solution to the global solution including the ghost nodes */
623   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
624   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
625   PetscFunctionReturn(PETSC_SUCCESS);
626 }
627 
628 static PetscErrorCode PCReset_ASM(PC pc)
629 {
630   PC_ASM  *osm = (PC_ASM *)pc->data;
631   PetscInt i;
632 
633   PetscFunctionBegin;
634   if (osm->ksp) {
635     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
636   }
637   if (osm->pmat) {
638     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
639   }
640   if (osm->lrestriction) {
641     PetscCall(VecScatterDestroy(&osm->restriction));
642     for (i = 0; i < osm->n_local_true; i++) {
643       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
644       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
645       PetscCall(VecDestroy(&osm->x[i]));
646       PetscCall(VecDestroy(&osm->y[i]));
647     }
648     PetscCall(PetscFree(osm->lrestriction));
649     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
650     PetscCall(PetscFree(osm->x));
651     PetscCall(PetscFree(osm->y));
652   }
653   PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));
654   PetscCall(ISDestroy(&osm->lis));
655   PetscCall(VecDestroy(&osm->lx));
656   PetscCall(VecDestroy(&osm->ly));
657   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
658 
659   PetscCall(PetscFree(osm->sub_mat_type));
660 
661   osm->is       = NULL;
662   osm->is_local = NULL;
663   PetscFunctionReturn(PETSC_SUCCESS);
664 }
665 
666 static PetscErrorCode PCDestroy_ASM(PC pc)
667 {
668   PC_ASM  *osm = (PC_ASM *)pc->data;
669   PetscInt i;
670 
671   PetscFunctionBegin;
672   PetscCall(PCReset_ASM(pc));
673   if (osm->ksp) {
674     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
675     PetscCall(PetscFree(osm->ksp));
676   }
677   PetscCall(PetscFree(pc->data));
678 
679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
685   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
686   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
687   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
688   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
689   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
690   PetscFunctionReturn(PETSC_SUCCESS);
691 }
692 
693 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems PetscOptionsObject)
694 {
695   PC_ASM         *osm = (PC_ASM *)pc->data;
696   PetscInt        blocks, ovl;
697   PetscBool       flg;
698   PCASMType       asmtype;
699   PCCompositeType loctype;
700   char            sub_mat_type[256];
701 
702   PetscFunctionBegin;
703   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
704   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
705   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
706   if (flg) {
707     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
708     osm->dm_subdomains = PETSC_FALSE;
709   }
710   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
711   if (flg) {
712     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
713     osm->dm_subdomains = PETSC_FALSE;
714   }
715   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
716   if (flg) {
717     PetscCall(PCASMSetOverlap(pc, ovl));
718     osm->dm_subdomains = PETSC_FALSE;
719   }
720   flg = PETSC_FALSE;
721   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
722   if (flg) PetscCall(PCASMSetType(pc, asmtype));
723   flg = PETSC_FALSE;
724   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
725   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
726   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
727   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
728   PetscOptionsHeadEnd();
729   PetscFunctionReturn(PETSC_SUCCESS);
730 }
731 
732 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
733 {
734   PC_ASM  *osm = (PC_ASM *)pc->data;
735   PetscInt i;
736 
737   PetscFunctionBegin;
738   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
739   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
740 
741   if (!pc->setupcalled) {
742     if (is) {
743       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
744     }
745     if (is_local) {
746       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
747     }
748     PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));
749 
750     if (osm->ksp && osm->n_local_true != n) {
751       for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
752       PetscCall(PetscFree(osm->ksp));
753     }
754 
755     osm->n_local_true = n;
756     osm->is           = NULL;
757     osm->is_local     = NULL;
758     if (is) {
759       PetscCall(PetscMalloc1(n, &osm->is));
760       for (i = 0; i < n; i++) osm->is[i] = is[i];
761       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
762       osm->overlap = -1;
763     }
764     if (is_local) {
765       PetscCall(PetscMalloc1(n, &osm->is_local));
766       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
767       if (!is) {
768         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
769         for (i = 0; i < osm->n_local_true; i++) {
770           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
771             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
772             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
773           } else {
774             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
775             osm->is[i] = osm->is_local[i];
776           }
777         }
778       }
779     }
780   }
781   PetscFunctionReturn(PETSC_SUCCESS);
782 }
783 
784 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
785 {
786   PC_ASM     *osm = (PC_ASM *)pc->data;
787   PetscMPIInt rank, size;
788   PetscInt    n;
789 
790   PetscFunctionBegin;
791   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
792   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets, they cannot be set globally yet.");
793 
794   /*
795      Split the subdomains equally among all processors
796   */
797   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
798   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
799   n = N / size + ((N % size) > rank);
800   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, rank, size, N);
801   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
802   if (!pc->setupcalled) {
803     PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));
804 
805     osm->n_local_true = n;
806     osm->is           = NULL;
807     osm->is_local     = NULL;
808   }
809   PetscFunctionReturn(PETSC_SUCCESS);
810 }
811 
812 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
813 {
814   PC_ASM *osm = (PC_ASM *)pc->data;
815 
816   PetscFunctionBegin;
817   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
818   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
819   if (!pc->setupcalled) osm->overlap = ovl;
820   PetscFunctionReturn(PETSC_SUCCESS);
821 }
822 
823 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
824 {
825   PC_ASM *osm = (PC_ASM *)pc->data;
826 
827   PetscFunctionBegin;
828   osm->type     = type;
829   osm->type_set = PETSC_TRUE;
830   PetscFunctionReturn(PETSC_SUCCESS);
831 }
832 
833 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
834 {
835   PC_ASM *osm = (PC_ASM *)pc->data;
836 
837   PetscFunctionBegin;
838   *type = osm->type;
839   PetscFunctionReturn(PETSC_SUCCESS);
840 }
841 
842 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
843 {
844   PC_ASM *osm = (PC_ASM *)pc->data;
845 
846   PetscFunctionBegin;
847   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
848   osm->loctype = type;
849   PetscFunctionReturn(PETSC_SUCCESS);
850 }
851 
852 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
853 {
854   PC_ASM *osm = (PC_ASM *)pc->data;
855 
856   PetscFunctionBegin;
857   *type = osm->loctype;
858   PetscFunctionReturn(PETSC_SUCCESS);
859 }
860 
861 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
862 {
863   PC_ASM *osm = (PC_ASM *)pc->data;
864 
865   PetscFunctionBegin;
866   osm->sort_indices = doSort;
867   PetscFunctionReturn(PETSC_SUCCESS);
868 }
869 
870 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
871 {
872   PC_ASM *osm = (PC_ASM *)pc->data;
873 
874   PetscFunctionBegin;
875   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
876 
877   if (n_local) *n_local = osm->n_local_true;
878   if (first_local) {
879     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
880     *first_local -= osm->n_local_true;
881   }
882   if (ksp) *ksp = osm->ksp;
883   PetscFunctionReturn(PETSC_SUCCESS);
884 }
885 
886 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
887 {
888   PC_ASM *osm = (PC_ASM *)pc->data;
889 
890   PetscFunctionBegin;
891   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
892   PetscAssertPointer(sub_mat_type, 2);
893   *sub_mat_type = osm->sub_mat_type;
894   PetscFunctionReturn(PETSC_SUCCESS);
895 }
896 
897 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
898 {
899   PC_ASM *osm = (PC_ASM *)pc->data;
900 
901   PetscFunctionBegin;
902   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
903   PetscCall(PetscFree(osm->sub_mat_type));
904   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
905   PetscFunctionReturn(PETSC_SUCCESS);
906 }
907 
908 /*@
909   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
910 
911   Collective
912 
913   Input Parameters:
914 + pc       - the preconditioner context
915 . n        - the number of subdomains for this processor (default value = 1)
916 . is       - the index set that defines the subdomains for this processor (or `NULL` for PETSc to determine subdomains)
917              the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call
918 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless `PCASMType` is `PC_ASM_RESTRICT`
919              (or `NULL` to not provide these). The values of the `is_local` array are copied so you can free the array
920              (not the `IS` in the array) after this call
921 
922   Options Database Key:
923 . -pc_asm_local_blocks <blks> - Sets number of local blocks
924 
925   Level: advanced
926 
927   Notes:
928   The `IS` numbering is in the parallel, global numbering of the vector for both `is` and `is_local`
929 
930   By default the `PCASM` preconditioner uses 1 block per processor.
931 
932   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
933 
934   If `is_local` is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the `is_local` region is interpolated
935   back to form the global solution (this is the standard restricted additive Schwarz method, RASM)
936 
937   If `is_local` is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
938   no code to handle that case.
939 
940 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
941           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
942 @*/
943 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
944 {
945   PetscFunctionBegin;
946   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
947   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
948   PetscFunctionReturn(PETSC_SUCCESS);
949 }
950 
951 /*@
952   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
953   additive Schwarz preconditioner, `PCASM`.
954 
955   Collective, all MPI ranks must pass in the same array of `IS`
956 
957   Input Parameters:
958 + pc       - the preconditioner context
959 . N        - the number of subdomains for all processors
960 . is       - the index sets that define the subdomains for all processors (or `NULL` to ask PETSc to determine the subdomains)
961              the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call
962 - is_local - the index sets that define the local part of the subdomains for this processor (or `NULL` to not provide this information)
963              The values of the `is_local` array are copied so you can free the array (not the `IS` in the array) after this call
964 
965   Options Database Key:
966 . -pc_asm_blocks <blks> - Sets total blocks
967 
968   Level: advanced
969 
970   Notes:
971   Currently you cannot use this to set the actual subdomains with the argument `is` or `is_local`.
972 
973   By default the `PCASM` preconditioner uses 1 block per processor.
974 
975   These index sets cannot be destroyed until after completion of the
976   linear solves for which the `PCASM` preconditioner is being used.
977 
978   Use `PCASMSetLocalSubdomains()` to set local subdomains.
979 
980   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
981 
982 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
983           `PCASMCreateSubdomains2D()`, `PCGASM`
984 @*/
985 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
986 {
987   PetscFunctionBegin;
988   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
989   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
990   PetscFunctionReturn(PETSC_SUCCESS);
991 }
992 
993 /*@
994   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
995   additive Schwarz preconditioner, `PCASM`.
996 
997   Logically Collective
998 
999   Input Parameters:
1000 + pc  - the preconditioner context
1001 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1002 
1003   Options Database Key:
1004 . -pc_asm_overlap <ovl> - Sets overlap
1005 
1006   Level: intermediate
1007 
1008   Notes:
1009   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1010   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1011   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1012 
1013   The overlap defaults to 1, so if one desires that no additional
1014   overlap be computed beyond what may have been set with a call to
1015   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1016   must be set to be 0.  In particular, if one does not explicitly set
1017   the subdomains an application code, then all overlap would be computed
1018   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1019   variant that is equivalent to the block Jacobi preconditioner.
1020 
1021   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1022   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1023 
1024   One can define initial index sets with any overlap via
1025   `PCASMSetLocalSubdomains()`; the routine
1026   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1027   if desired.
1028 
1029 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1030           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1031 @*/
1032 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1033 {
1034   PetscFunctionBegin;
1035   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1036   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1037   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1038   PetscFunctionReturn(PETSC_SUCCESS);
1039 }
1040 
1041 /*@
1042   PCASMSetType - Sets the type of restriction and interpolation used
1043   for local problems in the additive Schwarz method, `PCASM`.
1044 
1045   Logically Collective
1046 
1047   Input Parameters:
1048 + pc   - the preconditioner context
1049 - type - variant of `PCASM`, one of
1050 .vb
1051       PC_ASM_BASIC       - full interpolation and restriction
1052       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1053       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1054       PC_ASM_NONE        - local processor restriction and interpolation
1055 .ve
1056 
1057   Options Database Key:
1058 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1059 
1060   Level: intermediate
1061 
1062   Note:
1063   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1064   to limit the local processor interpolation
1065 
1066 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1067           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1068 @*/
1069 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1070 {
1071   PetscFunctionBegin;
1072   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1073   PetscValidLogicalCollectiveEnum(pc, type, 2);
1074   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1075   PetscFunctionReturn(PETSC_SUCCESS);
1076 }
1077 
1078 /*@
1079   PCASMGetType - Gets the type of restriction and interpolation used
1080   for local problems in the additive Schwarz method, `PCASM`.
1081 
1082   Logically Collective
1083 
1084   Input Parameter:
1085 . pc - the preconditioner context
1086 
1087   Output Parameter:
1088 . type - variant of `PCASM`, one of
1089 .vb
1090       PC_ASM_BASIC       - full interpolation and restriction
1091       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1092       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1093       PC_ASM_NONE        - local processor restriction and interpolation
1094 .ve
1095 
1096   Options Database Key:
1097 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1098 
1099   Level: intermediate
1100 
1101 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1102           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1103 @*/
1104 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1105 {
1106   PetscFunctionBegin;
1107   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1108   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1109   PetscFunctionReturn(PETSC_SUCCESS);
1110 }
1111 
1112 /*@
1113   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1114 
1115   Logically Collective
1116 
1117   Input Parameters:
1118 + pc   - the preconditioner context
1119 - type - type of composition, one of
1120 .vb
1121   PC_COMPOSITE_ADDITIVE       - local additive combination
1122   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1123 .ve
1124 
1125   Options Database Key:
1126 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1127 
1128   Level: intermediate
1129 
1130 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1131 @*/
1132 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1133 {
1134   PetscFunctionBegin;
1135   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1136   PetscValidLogicalCollectiveEnum(pc, type, 2);
1137   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1138   PetscFunctionReturn(PETSC_SUCCESS);
1139 }
1140 
1141 /*@
1142   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1143 
1144   Logically Collective
1145 
1146   Input Parameter:
1147 . pc - the preconditioner context
1148 
1149   Output Parameter:
1150 . type - type of composition, one of
1151 .vb
1152   PC_COMPOSITE_ADDITIVE       - local additive combination
1153   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1154 .ve
1155 
1156   Options Database Key:
1157 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1158 
1159   Level: intermediate
1160 
1161 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMType`, `PCCompositeType`
1162 @*/
1163 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1164 {
1165   PetscFunctionBegin;
1166   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1167   PetscAssertPointer(type, 2);
1168   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1169   PetscFunctionReturn(PETSC_SUCCESS);
1170 }
1171 
1172 /*@
1173   PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1174 
1175   Logically Collective
1176 
1177   Input Parameters:
1178 + pc     - the preconditioner context
1179 - doSort - sort the subdomain indices
1180 
1181   Level: intermediate
1182 
1183 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1184           `PCASMCreateSubdomains2D()`
1185 @*/
1186 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1187 {
1188   PetscFunctionBegin;
1189   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1190   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1191   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1192   PetscFunctionReturn(PETSC_SUCCESS);
1193 }
1194 
1195 /*@C
1196   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1197   this processor.
1198 
1199   Collective iff first_local is requested
1200 
1201   Input Parameter:
1202 . pc - the preconditioner context
1203 
1204   Output Parameters:
1205 + n_local     - the number of blocks on this processor or `NULL`
1206 . first_local - the global number of the first block on this processor or `NULL`, all processors must request or all must pass `NULL`
1207 - ksp         - the array of `KSP` contexts
1208 
1209   Level: advanced
1210 
1211   Notes:
1212   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1213 
1214   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1215 
1216 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1217           `PCASMCreateSubdomains2D()`,
1218 @*/
1219 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1220 {
1221   PetscFunctionBegin;
1222   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1223   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1224   PetscFunctionReturn(PETSC_SUCCESS);
1225 }
1226 
1227 /*MC
1228    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1229            its own `KSP` object, {cite}`dryja1987additive` and {cite}`1sbg`
1230 
1231    Options Database Keys:
1232 +  -pc_asm_blocks <blks>                          - Sets total blocks. Defaults to one block per MPI process.
1233 .  -pc_asm_overlap <ovl>                          - Sets overlap
1234 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1235 .  -pc_asm_dm_subdomains <bool>                   - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`
1236 -  -pc_asm_local_type [additive, multiplicative]  - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1237 
1238    Level: beginner
1239 
1240    Notes:
1241    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1242    will get a different convergence rate due to the default option of `-pc_asm_type restrict`. Use
1243    `-pc_asm_type basic` to get the same convergence behavior
1244 
1245    Each processor can have one or more blocks, but a block cannot be shared by more
1246    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1247 
1248    To set options on the solvers for each block append `-sub_` to all the `KSP`, and `PC`
1249    options database keys. For example, `-sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly`
1250 
1251    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1252    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1253 
1254    If the `PC` has an associated `DM`, then, by default, `DMCreateDomainDecomposition()` is used to create the subdomains
1255 
1256 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1257           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1258           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1259 M*/
1260 
1261 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1262 {
1263   PC_ASM *osm;
1264 
1265   PetscFunctionBegin;
1266   PetscCall(PetscNew(&osm));
1267 
1268   osm->n             = PETSC_DECIDE;
1269   osm->n_local       = 0;
1270   osm->n_local_true  = PETSC_DECIDE;
1271   osm->overlap       = 1;
1272   osm->ksp           = NULL;
1273   osm->restriction   = NULL;
1274   osm->lprolongation = NULL;
1275   osm->lrestriction  = NULL;
1276   osm->x             = NULL;
1277   osm->y             = NULL;
1278   osm->is            = NULL;
1279   osm->is_local      = NULL;
1280   osm->mat           = NULL;
1281   osm->pmat          = NULL;
1282   osm->type          = PC_ASM_RESTRICT;
1283   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1284   osm->sort_indices  = PETSC_TRUE;
1285   osm->dm_subdomains = PETSC_FALSE;
1286   osm->sub_mat_type  = NULL;
1287 
1288   pc->data                 = (void *)osm;
1289   pc->ops->apply           = PCApply_ASM;
1290   pc->ops->matapply        = PCMatApply_ASM;
1291   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1292   pc->ops->setup           = PCSetUp_ASM;
1293   pc->ops->reset           = PCReset_ASM;
1294   pc->ops->destroy         = PCDestroy_ASM;
1295   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1296   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1297   pc->ops->view            = PCView_ASM;
1298   pc->ops->applyrichardson = NULL;
1299 
1300   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1301   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1310   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1311   PetscFunctionReturn(PETSC_SUCCESS);
1312 }
1313 
1314 /*@C
1315   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1316   preconditioner, `PCASM`,  for any problem on a general grid.
1317 
1318   Collective
1319 
1320   Input Parameters:
1321 + A - The global matrix operator
1322 - n - the number of local blocks
1323 
1324   Output Parameter:
1325 . outis - the array of index sets defining the subdomains
1326 
1327   Level: advanced
1328 
1329   Note:
1330   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1331   from these if you use `PCASMSetLocalSubdomains()`
1332 
1333 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1334 @*/
1335 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1336 {
1337   MatPartitioning mpart;
1338   const char     *prefix;
1339   PetscInt        i, j, rstart, rend, bs;
1340   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1341   Mat             Ad = NULL, adj;
1342   IS              ispart, isnumb, *is;
1343 
1344   PetscFunctionBegin;
1345   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1346   PetscAssertPointer(outis, 3);
1347   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1348 
1349   /* Get prefix, row distribution, and block size */
1350   PetscCall(MatGetOptionsPrefix(A, &prefix));
1351   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1352   PetscCall(MatGetBlockSize(A, &bs));
1353   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1354 
1355   /* Get diagonal block from matrix if possible */
1356   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1357   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1358   if (Ad) {
1359     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1360     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1361   }
1362   if (Ad && n > 1) {
1363     PetscBool match, done;
1364     /* Try to setup a good matrix partitioning if available */
1365     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1366     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1367     PetscCall(MatPartitioningSetFromOptions(mpart));
1368     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1369     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1370     if (!match) { /* assume a "good" partitioner is available */
1371       PetscInt        na;
1372       const PetscInt *ia, *ja;
1373       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1374       if (done) {
1375         /* Build adjacency matrix by hand. Unfortunately a call to
1376            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1377            remove the block-aij structure and we cannot expect
1378            MatPartitioning to split vertices as we need */
1379         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1380         const PetscInt *row;
1381         nnz = 0;
1382         for (i = 0; i < na; i++) { /* count number of nonzeros */
1383           len = ia[i + 1] - ia[i];
1384           row = ja + ia[i];
1385           for (j = 0; j < len; j++) {
1386             if (row[j] == i) { /* don't count diagonal */
1387               len--;
1388               break;
1389             }
1390           }
1391           nnz += len;
1392         }
1393         PetscCall(PetscMalloc1(na + 1, &iia));
1394         PetscCall(PetscMalloc1(nnz, &jja));
1395         nnz    = 0;
1396         iia[0] = 0;
1397         for (i = 0; i < na; i++) { /* fill adjacency */
1398           cnt = 0;
1399           len = ia[i + 1] - ia[i];
1400           row = ja + ia[i];
1401           for (j = 0; j < len; j++) {
1402             if (row[j] != i) { /* if not diagonal */
1403               jja[nnz + cnt++] = row[j];
1404             }
1405           }
1406           nnz += cnt;
1407           iia[i + 1] = nnz;
1408         }
1409         /* Partitioning of the adjacency matrix */
1410         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1411         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1412         PetscCall(MatPartitioningSetNParts(mpart, n));
1413         PetscCall(MatPartitioningApply(mpart, &ispart));
1414         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1415         PetscCall(MatDestroy(&adj));
1416         foundpart = PETSC_TRUE;
1417       }
1418       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1419     }
1420     PetscCall(MatPartitioningDestroy(&mpart));
1421   }
1422 
1423   PetscCall(PetscMalloc1(n, &is));
1424   *outis = is;
1425 
1426   if (!foundpart) {
1427     /* Partitioning by contiguous chunks of rows */
1428 
1429     PetscInt mbs   = (rend - rstart) / bs;
1430     PetscInt start = rstart;
1431     for (i = 0; i < n; i++) {
1432       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1433       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1434       start += count;
1435     }
1436 
1437   } else {
1438     /* Partitioning by adjacency of diagonal block  */
1439 
1440     const PetscInt *numbering;
1441     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1442     /* Get node count in each partition */
1443     PetscCall(PetscMalloc1(n, &count));
1444     PetscCall(ISPartitioningCount(ispart, n, count));
1445     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1446       for (i = 0; i < n; i++) count[i] *= bs;
1447     }
1448     /* Build indices from node numbering */
1449     PetscCall(ISGetLocalSize(isnumb, &nidx));
1450     PetscCall(PetscMalloc1(nidx, &indices));
1451     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1452     PetscCall(ISGetIndices(isnumb, &numbering));
1453     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1454     PetscCall(ISRestoreIndices(isnumb, &numbering));
1455     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1456       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1457       for (i = 0; i < nidx; i++) {
1458         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1459       }
1460       PetscCall(PetscFree(indices));
1461       nidx *= bs;
1462       indices = newidx;
1463     }
1464     /* Shift to get global indices */
1465     for (i = 0; i < nidx; i++) indices[i] += rstart;
1466 
1467     /* Build the index sets for each block */
1468     for (i = 0; i < n; i++) {
1469       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1470       PetscCall(ISSort(is[i]));
1471       start += count[i];
1472     }
1473 
1474     PetscCall(PetscFree(count));
1475     PetscCall(PetscFree(indices));
1476     PetscCall(ISDestroy(&isnumb));
1477     PetscCall(ISDestroy(&ispart));
1478   }
1479   PetscFunctionReturn(PETSC_SUCCESS);
1480 }
1481 
1482 /*@C
1483   PCASMDestroySubdomains - Destroys the index sets created with
1484   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1485 
1486   Collective
1487 
1488   Input Parameters:
1489 + n        - the number of index sets
1490 . is       - the array of index sets
1491 - is_local - the array of local index sets, can be `NULL`
1492 
1493   Level: advanced
1494 
1495   Developer Note:
1496   The `IS` arguments should be a *[]
1497 
1498 .seealso: [](ch_ksp), `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1499 @*/
1500 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS *is[], IS *is_local[])
1501 {
1502   PetscInt i;
1503 
1504   PetscFunctionBegin;
1505   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1506   if (*is) {
1507     PetscAssertPointer(*is, 2);
1508     for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*is)[i]));
1509     PetscCall(PetscFree(*is));
1510   }
1511   if (is_local && *is_local) {
1512     PetscAssertPointer(*is_local, 3);
1513     for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*is_local)[i]));
1514     PetscCall(PetscFree(*is_local));
1515   }
1516   PetscFunctionReturn(PETSC_SUCCESS);
1517 }
1518 
1519 /*@C
1520   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1521   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1522 
1523   Not Collective
1524 
1525   Input Parameters:
1526 + m       - the number of mesh points in the x direction
1527 . n       - the number of mesh points in the y direction
1528 . M       - the number of subdomains in the x direction
1529 . N       - the number of subdomains in the y direction
1530 . dof     - degrees of freedom per node
1531 - overlap - overlap in mesh lines
1532 
1533   Output Parameters:
1534 + Nsub     - the number of subdomains created
1535 . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1536 - is_local - array of index sets defining non-overlapping subdomains
1537 
1538   Level: advanced
1539 
1540   Note:
1541   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1542   preconditioners.  More general related routines are
1543   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1544 
1545 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1546           `PCASMSetOverlap()`
1547 @*/
1548 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS *is[], IS *is_local[])
1549 {
1550   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1551   PetscInt nidx, *idx, loc, ii, jj, count;
1552 
1553   PetscFunctionBegin;
1554   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1555 
1556   *Nsub = N * M;
1557   PetscCall(PetscMalloc1(*Nsub, is));
1558   PetscCall(PetscMalloc1(*Nsub, is_local));
1559   ystart    = 0;
1560   loc_outer = 0;
1561   for (i = 0; i < N; i++) {
1562     height = n / N + ((n % N) > i); /* height of subdomain */
1563     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1564     yleft = ystart - overlap;
1565     if (yleft < 0) yleft = 0;
1566     yright = ystart + height + overlap;
1567     if (yright > n) yright = n;
1568     xstart = 0;
1569     for (j = 0; j < M; j++) {
1570       width = m / M + ((m % M) > j); /* width of subdomain */
1571       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1572       xleft = xstart - overlap;
1573       if (xleft < 0) xleft = 0;
1574       xright = xstart + width + overlap;
1575       if (xright > m) xright = m;
1576       nidx = (xright - xleft) * (yright - yleft);
1577       PetscCall(PetscMalloc1(nidx, &idx));
1578       loc = 0;
1579       for (ii = yleft; ii < yright; ii++) {
1580         count = m * ii + xleft;
1581         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1582       }
1583       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1584       if (overlap == 0) {
1585         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1586 
1587         (*is_local)[loc_outer] = (*is)[loc_outer];
1588       } else {
1589         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1590           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1591         }
1592         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1593       }
1594       PetscCall(PetscFree(idx));
1595       xstart += width;
1596       loc_outer++;
1597     }
1598     ystart += height;
1599   }
1600   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1601   PetscFunctionReturn(PETSC_SUCCESS);
1602 }
1603 
1604 /*@C
1605   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1606   only) for the additive Schwarz preconditioner, `PCASM`.
1607 
1608   Not Collective
1609 
1610   Input Parameter:
1611 . pc - the preconditioner context
1612 
1613   Output Parameters:
1614 + n        - if requested, the number of subdomains for this processor (default value = 1)
1615 . is       - if requested, the index sets that define the subdomains for this processor
1616 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)
1617 
1618   Level: advanced
1619 
1620   Note:
1621   The `IS` numbering is in the parallel, global numbering of the vector.
1622 
1623 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1624           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1625 @*/
1626 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1627 {
1628   PC_ASM   *osm = (PC_ASM *)pc->data;
1629   PetscBool match;
1630 
1631   PetscFunctionBegin;
1632   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1633   if (n) PetscAssertPointer(n, 2);
1634   if (is) PetscAssertPointer(is, 3);
1635   if (is_local) PetscAssertPointer(is_local, 4);
1636   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1637   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1638   if (n) *n = osm->n_local_true;
1639   if (is) *is = osm->is;
1640   if (is_local) *is_local = osm->is_local;
1641   PetscFunctionReturn(PETSC_SUCCESS);
1642 }
1643 
1644 /*@C
1645   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1646   only) for the additive Schwarz preconditioner, `PCASM`.
1647 
1648   Not Collective
1649 
1650   Input Parameter:
1651 . pc - the preconditioner context
1652 
1653   Output Parameters:
1654 + n   - if requested, the number of matrices for this processor (default value = 1)
1655 - mat - if requested, the matrices
1656 
1657   Level: advanced
1658 
1659   Notes:
1660   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1661 
1662   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1663 
1664 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1665           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1666 @*/
1667 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1668 {
1669   PC_ASM   *osm;
1670   PetscBool match;
1671 
1672   PetscFunctionBegin;
1673   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1674   if (n) PetscAssertPointer(n, 2);
1675   if (mat) PetscAssertPointer(mat, 3);
1676   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1677   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1678   if (!match) {
1679     if (n) *n = 0;
1680     if (mat) *mat = NULL;
1681   } else {
1682     osm = (PC_ASM *)pc->data;
1683     if (n) *n = osm->n_local_true;
1684     if (mat) *mat = osm->pmat;
1685   }
1686   PetscFunctionReturn(PETSC_SUCCESS);
1687 }
1688 
1689 /*@
1690   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1691 
1692   Logically Collective
1693 
1694   Input Parameters:
1695 + pc  - the preconditioner
1696 - flg - boolean indicating whether to use subdomains defined by the `DM`
1697 
1698   Options Database Key:
1699 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`
1700 
1701   Level: intermediate
1702 
1703   Note:
1704   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1705   so setting either of the first two effectively turns the latter off.
1706 
1707   Developer Note:
1708   This should be `PCASMSetUseDMSubdomains()`, similarly for the options database key
1709 
1710 .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1711           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1712 @*/
1713 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1714 {
1715   PC_ASM   *osm = (PC_ASM *)pc->data;
1716   PetscBool match;
1717 
1718   PetscFunctionBegin;
1719   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1720   PetscValidLogicalCollectiveBool(pc, flg, 2);
1721   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1722   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1723   if (match) osm->dm_subdomains = flg;
1724   PetscFunctionReturn(PETSC_SUCCESS);
1725 }
1726 
1727 /*@
1728   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1729 
1730   Not Collective
1731 
1732   Input Parameter:
1733 . pc - the preconditioner
1734 
1735   Output Parameter:
1736 . flg - boolean indicating whether to use subdomains defined by the `DM`
1737 
1738   Level: intermediate
1739 
1740   Developer Note:
1741   This should be `PCASMSetUseDMSubdomains()`
1742 
1743 .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1744           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1745 @*/
1746 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1747 {
1748   PC_ASM   *osm = (PC_ASM *)pc->data;
1749   PetscBool match;
1750 
1751   PetscFunctionBegin;
1752   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1753   PetscAssertPointer(flg, 2);
1754   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1755   if (match) *flg = osm->dm_subdomains;
1756   else *flg = PETSC_FALSE;
1757   PetscFunctionReturn(PETSC_SUCCESS);
1758 }
1759 
1760 /*@
1761   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1762 
1763   Not Collective
1764 
1765   Input Parameter:
1766 . pc - the `PC`
1767 
1768   Output Parameter:
1769 . sub_mat_type - name of matrix type
1770 
1771   Level: advanced
1772 
1773 .seealso: [](ch_ksp), `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1774 @*/
1775 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1776 {
1777   PetscFunctionBegin;
1778   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1779   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1780   PetscFunctionReturn(PETSC_SUCCESS);
1781 }
1782 
1783 /*@
1784   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1785 
1786   Collective
1787 
1788   Input Parameters:
1789 + pc           - the `PC` object
1790 - sub_mat_type - the `MatType`
1791 
1792   Options Database Key:
1793 . -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1794    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1795 
1796   Note:
1797   See `MatType` for available types
1798 
1799   Level: advanced
1800 
1801 .seealso: [](ch_ksp), `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1802 @*/
1803 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1804 {
1805   PetscFunctionBegin;
1806   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1807   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1808   PetscFunctionReturn(PETSC_SUCCESS);
1809 }
1810