xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 0b4b7b1c20c2ed4ade67e3d50a7710fe0ffbfca5)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include <petsc/private/matimpl.h>
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         iascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(sviewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(sviewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
75   }
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc)
80 {
81   PC_ASM         *osm = (PC_ASM *)pc->data;
82   const char     *prefix;
83   char            fname[PETSC_MAX_PATH_LEN + 1];
84   PetscViewer     viewer, sviewer;
85   char           *s;
86   PetscInt        i, j, nidx;
87   const PetscInt *idx;
88   PetscMPIInt     rank, size;
89 
90   PetscFunctionBegin;
91   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
92   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
93   PetscCall(PCGetOptionsPrefix(pc, &prefix));
94   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
95   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
96   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
97   for (i = 0; i < osm->n_local; i++) {
98     if (i < osm->n_local_true) {
99       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
100       PetscCall(ISGetIndices(osm->is[i], &idx));
101       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
102 #define len 16 * (nidx + 1) + 512
103       PetscCall(PetscMalloc1(len, &s));
104       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
105 #undef len
106       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
107       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
108       PetscCall(ISRestoreIndices(osm->is[i], &idx));
109       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
110       PetscCall(PetscViewerDestroy(&sviewer));
111       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
112       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
113       PetscCall(PetscViewerFlush(viewer));
114       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
115       PetscCall(PetscFree(s));
116       if (osm->is_local) {
117         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
118 #define len 16 * (nidx + 1) + 512
119         PetscCall(PetscMalloc1(len, &s));
120         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
121 #undef len
122         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
123         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
124         PetscCall(ISGetIndices(osm->is_local[i], &idx));
125         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
126         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
127         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
128         PetscCall(PetscViewerDestroy(&sviewer));
129         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
130         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
131         PetscCall(PetscViewerFlush(viewer));
132         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
133         PetscCall(PetscFree(s));
134       }
135     } else {
136       /* Participate in collective viewer calls. */
137       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
138       PetscCall(PetscViewerFlush(viewer));
139       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
140       /* Assume either all ranks have is_local or none do. */
141       if (osm->is_local) {
142         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
143         PetscCall(PetscViewerFlush(viewer));
144         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
145       }
146     }
147   }
148   PetscCall(PetscViewerFlush(viewer));
149   PetscCall(PetscViewerDestroy(&viewer));
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode PCSetUp_ASM(PC pc)
154 {
155   PC_ASM       *osm = (PC_ASM *)pc->data;
156   PetscBool     flg;
157   PetscInt      i, m, m_local;
158   MatReuse      scall = MAT_REUSE_MATRIX;
159   IS            isl;
160   KSP           ksp;
161   PC            subpc;
162   const char   *prefix, *pprefix;
163   Vec           vec;
164   DM           *domain_dm = NULL;
165   MatNullSpace *nullsp    = NULL;
166 
167   PetscFunctionBegin;
168   if (!pc->setupcalled) {
169     PetscInt m;
170 
171     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172     if (osm->n_local_true == PETSC_DECIDE) {
173       /* no subdomains given */
174       /* try pc->dm first, if allowed */
175       if (osm->dm_subdomains && pc->dm) {
176         PetscInt num_domains, d;
177         char   **domain_names;
178         IS      *inner_domain_is, *outer_domain_is;
179         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181                               A future improvement of this code might allow one to use
182                               DM-defined subdomains and also increase the overlap,
183                               but that is not currently supported */
184         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185         for (d = 0; d < num_domains; ++d) {
186           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189         }
190         PetscCall(PetscFree(domain_names));
191         PetscCall(PetscFree(inner_domain_is));
192         PetscCall(PetscFree(outer_domain_is));
193       }
194       if (osm->n_local_true == PETSC_DECIDE) {
195         /* still no subdomains; use one subdomain per processor */
196         osm->n_local_true = 1;
197       }
198     }
199     { /* determine the global and max number of subdomains */
200       struct {
201         PetscInt max, sum;
202       } inwork, outwork;
203       PetscMPIInt size;
204 
205       inwork.max = osm->n_local_true;
206       inwork.sum = osm->n_local_true;
207       PetscCallMPI(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208       osm->n_local = outwork.max;
209       osm->n       = outwork.sum;
210 
211       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212       if (outwork.max == 1 && outwork.sum == size) {
213         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215       }
216     }
217     if (!osm->is) { /* create the index sets */
218       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219     }
220     if (osm->n_local_true > 1 && !osm->is_local) {
221       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222       for (i = 0; i < osm->n_local_true; i++) {
223         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226         } else {
227           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228           osm->is_local[i] = osm->is[i];
229         }
230       }
231     }
232     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233     if (osm->overlap > 0) {
234       /* Extend the "overlapping" regions by a number of steps */
235       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236     }
237     if (osm->sort_indices) {
238       for (i = 0; i < osm->n_local_true; i++) {
239         PetscCall(ISSort(osm->is[i]));
240         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241       }
242     }
243     flg = PETSC_FALSE;
244     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246     if (!osm->ksp) {
247       /* Create the local solvers */
248       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250       for (i = 0; i < osm->n_local_true; i++) {
251         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
253         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
254         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
255         PetscCall(KSPSetType(ksp, KSPPREONLY));
256         PetscCall(KSPGetPC(ksp, &subpc));
257         PetscCall(PCGetOptionsPrefix(pc, &prefix));
258         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
259         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
260         if (domain_dm) {
261           PetscCall(KSPSetDM(ksp, domain_dm[i]));
262           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
263           PetscCall(DMDestroy(&domain_dm[i]));
264         }
265         osm->ksp[i] = ksp;
266       }
267       if (domain_dm) PetscCall(PetscFree(domain_dm));
268     }
269 
270     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
271     PetscCall(ISSortRemoveDups(osm->lis));
272     PetscCall(ISGetLocalSize(osm->lis, &m));
273 
274     scall = MAT_INITIAL_MATRIX;
275   } else {
276     /*
277        Destroy the blocks from the previous iteration
278     */
279     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
280       PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
281       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
282       scall = MAT_INITIAL_MATRIX;
283     }
284   }
285 
286   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
287   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
288     PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
289     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
290     scall = MAT_INITIAL_MATRIX;
291   }
292 
293   /*
294      Extract out the submatrices
295   */
296   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
297   if (scall == MAT_INITIAL_MATRIX) {
298     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
299     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
300     if (nullsp) PetscCall(MatRestoreNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
301   }
302 
303   /* Convert the types of the submatrices (if needbe) */
304   if (osm->sub_mat_type) {
305     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &osm->pmat[i]));
306   }
307 
308   if (!pc->setupcalled) {
309     VecType vtype;
310 
311     /* Create the local work vectors (from the local matrices) and scatter contexts */
312     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
313 
314     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
315     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
316     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
317     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
318     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
319 
320     PetscCall(ISGetLocalSize(osm->lis, &m));
321     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
322     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
323     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
324     PetscCall(VecSetSizes(osm->lx, m, m));
325     PetscCall(VecSetType(osm->lx, vtype));
326     PetscCall(VecDuplicate(osm->lx, &osm->ly));
327     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
328     PetscCall(ISDestroy(&isl));
329 
330     for (i = 0; i < osm->n_local_true; ++i) {
331       ISLocalToGlobalMapping ltog;
332       IS                     isll;
333       const PetscInt        *idx_is;
334       PetscInt              *idx_lis, nout;
335 
336       PetscCall(ISGetLocalSize(osm->is[i], &m));
337       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
338       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
339 
340       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
341       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
342       PetscCall(ISGetLocalSize(osm->is[i], &m));
343       PetscCall(ISGetIndices(osm->is[i], &idx_is));
344       PetscCall(PetscMalloc1(m, &idx_lis));
345       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
346       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
347       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
348       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
349       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
350       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
351       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
352       PetscCall(ISDestroy(&isll));
353       PetscCall(ISDestroy(&isl));
354       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the non-overlapping is_local[i] entries */
355         ISLocalToGlobalMapping ltog;
356         IS                     isll, isll_local;
357         const PetscInt        *idx_local;
358         PetscInt              *idx1, *idx2, nout;
359 
360         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
361         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
362 
363         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
364         PetscCall(PetscMalloc1(m_local, &idx1));
365         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
366         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
367         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
368         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
369 
370         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
371         PetscCall(PetscMalloc1(m_local, &idx2));
372         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
373         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
374         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
375         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
376 
377         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
378         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
379 
380         PetscCall(ISDestroy(&isll));
381         PetscCall(ISDestroy(&isll_local));
382       }
383     }
384     PetscCall(VecDestroy(&vec));
385   }
386 
387   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
388     IS      *cis;
389     PetscInt c;
390 
391     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
392     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
393     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
394     PetscCall(PetscFree(cis));
395   }
396 
397   /* Return control to the user so that the submatrices can be modified (e.g., to apply
398      different boundary conditions for the submatrices than for the global problem) */
399   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
400 
401   /*
402      Loop over subdomains putting them into local ksp
403   */
404   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
405   for (i = 0; i < osm->n_local_true; i++) {
406     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
407     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
408     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
409   }
410   PetscFunctionReturn(PETSC_SUCCESS);
411 }
412 
413 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
414 {
415   PC_ASM            *osm = (PC_ASM *)pc->data;
416   PetscInt           i;
417   KSPConvergedReason reason;
418 
419   PetscFunctionBegin;
420   for (i = 0; i < osm->n_local_true; i++) {
421     PetscCall(KSPSetUp(osm->ksp[i]));
422     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
423     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
424   }
425   PetscFunctionReturn(PETSC_SUCCESS);
426 }
427 
428 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
429 {
430   PC_ASM     *osm = (PC_ASM *)pc->data;
431   PetscInt    i, n_local_true = osm->n_local_true;
432   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
433 
434   PetscFunctionBegin;
435   /*
436      support for limiting the restriction or interpolation to only local
437      subdomain values (leaving the other values 0).
438   */
439   if (!(osm->type & PC_ASM_RESTRICT)) {
440     forward = SCATTER_FORWARD_LOCAL;
441     /* have to zero the work RHS since scatter may leave some slots empty */
442     PetscCall(VecSet(osm->lx, 0.0));
443   }
444   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
445 
446   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
447   /* zero the global and the local solutions */
448   PetscCall(VecSet(y, 0.0));
449   PetscCall(VecSet(osm->ly, 0.0));
450 
451   /* copy the global RHS to local RHS including the ghost nodes */
452   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
453   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
454 
455   /* restrict local RHS to the overlapping 0-block RHS */
456   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
457   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
458 
459   /* do the local solves */
460   for (i = 0; i < n_local_true; ++i) {
461     /* solve the overlapping i-block */
462     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
463     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
464     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
465     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
466 
467     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
468       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
469       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
470     } else { /* interpolate the overlapping i-block solution to the local solution */
471       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
472       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
473     }
474 
475     if (i < n_local_true - 1) {
476       /* restrict local RHS to the overlapping (i+1)-block RHS */
477       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
478       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
479 
480       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
481         /* update the overlapping (i+1)-block RHS using the current local solution */
482         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
483         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
484       }
485     }
486   }
487   /* add the local solution to the global solution including the ghost nodes */
488   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
489   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
490   PetscFunctionReturn(PETSC_SUCCESS);
491 }
492 
493 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
494 {
495   PC_ASM     *osm = (PC_ASM *)pc->data;
496   Mat         Z, W;
497   Vec         x;
498   PetscInt    i, m, N;
499   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
500 
501   PetscFunctionBegin;
502   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
503   /*
504      support for limiting the restriction or interpolation to only local
505      subdomain values (leaving the other values 0).
506   */
507   if (!(osm->type & PC_ASM_RESTRICT)) {
508     forward = SCATTER_FORWARD_LOCAL;
509     /* have to zero the work RHS since scatter may leave some slots empty */
510     PetscCall(VecSet(osm->lx, 0.0));
511   }
512   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
513   PetscCall(VecGetLocalSize(osm->x[0], &m));
514   PetscCall(MatGetSize(X, NULL, &N));
515   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
516 
517   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
518   /* zero the global and the local solutions */
519   PetscCall(MatZeroEntries(Y));
520   PetscCall(VecSet(osm->ly, 0.0));
521 
522   for (i = 0; i < N; ++i) {
523     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
524     /* copy the global RHS to local RHS including the ghost nodes */
525     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
526     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
527     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
528 
529     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
530     /* restrict local RHS to the overlapping 0-block RHS */
531     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
532     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
533     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
534   }
535   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
536   /* solve the overlapping 0-block */
537   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
538   PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
539   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
540   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
541   PetscCall(MatDestroy(&Z));
542 
543   for (i = 0; i < N; ++i) {
544     PetscCall(VecSet(osm->ly, 0.0));
545     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
546     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
547       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
548       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
549     } else { /* interpolate the overlapping 0-block solution to the local solution */
550       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
551       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
552     }
553     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
554 
555     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
556     /* add the local solution to the global solution including the ghost nodes */
557     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
558     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
559     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
560   }
561   PetscCall(MatDestroy(&W));
562   PetscFunctionReturn(PETSC_SUCCESS);
563 }
564 
565 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
566 {
567   PC_ASM     *osm = (PC_ASM *)pc->data;
568   PetscInt    i, n_local_true = osm->n_local_true;
569   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
570 
571   PetscFunctionBegin;
572   /*
573      Support for limiting the restriction or interpolation to only local
574      subdomain values (leaving the other values 0).
575 
576      Note: these are reversed from the PCApply_ASM() because we are applying the
577      transpose of the three terms
578   */
579 
580   if (!(osm->type & PC_ASM_INTERPOLATE)) {
581     forward = SCATTER_FORWARD_LOCAL;
582     /* have to zero the work RHS since scatter may leave some slots empty */
583     PetscCall(VecSet(osm->lx, 0.0));
584   }
585   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
586 
587   /* zero the global and the local solutions */
588   PetscCall(VecSet(y, 0.0));
589   PetscCall(VecSet(osm->ly, 0.0));
590 
591   /* Copy the global RHS to local RHS including the ghost nodes */
592   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
593   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
594 
595   /* Restrict local RHS to the overlapping 0-block RHS */
596   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
597   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
598 
599   /* do the local solves */
600   for (i = 0; i < n_local_true; ++i) {
601     /* solve the overlapping i-block */
602     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
603     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
604     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
605     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
606 
607     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
608       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
609       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
610     } else { /* interpolate the overlapping i-block solution to the local solution */
611       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
612       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
613     }
614 
615     if (i < n_local_true - 1) {
616       /* Restrict local RHS to the overlapping (i+1)-block RHS */
617       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
618       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
619     }
620   }
621   /* Add the local solution to the global solution including the ghost nodes */
622   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
623   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
624   PetscFunctionReturn(PETSC_SUCCESS);
625 }
626 
627 static PetscErrorCode PCReset_ASM(PC pc)
628 {
629   PC_ASM  *osm = (PC_ASM *)pc->data;
630   PetscInt i;
631 
632   PetscFunctionBegin;
633   if (osm->ksp) {
634     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
635   }
636   if (osm->pmat) {
637     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
638   }
639   if (osm->lrestriction) {
640     PetscCall(VecScatterDestroy(&osm->restriction));
641     for (i = 0; i < osm->n_local_true; i++) {
642       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
643       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
644       PetscCall(VecDestroy(&osm->x[i]));
645       PetscCall(VecDestroy(&osm->y[i]));
646     }
647     PetscCall(PetscFree(osm->lrestriction));
648     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
649     PetscCall(PetscFree(osm->x));
650     PetscCall(PetscFree(osm->y));
651   }
652   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
653   PetscCall(ISDestroy(&osm->lis));
654   PetscCall(VecDestroy(&osm->lx));
655   PetscCall(VecDestroy(&osm->ly));
656   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
657 
658   PetscCall(PetscFree(osm->sub_mat_type));
659 
660   osm->is       = NULL;
661   osm->is_local = NULL;
662   PetscFunctionReturn(PETSC_SUCCESS);
663 }
664 
665 static PetscErrorCode PCDestroy_ASM(PC pc)
666 {
667   PC_ASM  *osm = (PC_ASM *)pc->data;
668   PetscInt i;
669 
670   PetscFunctionBegin;
671   PetscCall(PCReset_ASM(pc));
672   if (osm->ksp) {
673     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
674     PetscCall(PetscFree(osm->ksp));
675   }
676   PetscCall(PetscFree(pc->data));
677 
678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
685   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
686   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
687   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
688   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
689   PetscFunctionReturn(PETSC_SUCCESS);
690 }
691 
692 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject)
693 {
694   PC_ASM         *osm = (PC_ASM *)pc->data;
695   PetscInt        blocks, ovl;
696   PetscBool       flg;
697   PCASMType       asmtype;
698   PCCompositeType loctype;
699   char            sub_mat_type[256];
700 
701   PetscFunctionBegin;
702   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
703   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
704   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
705   if (flg) {
706     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
707     osm->dm_subdomains = PETSC_FALSE;
708   }
709   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
710   if (flg) {
711     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
712     osm->dm_subdomains = PETSC_FALSE;
713   }
714   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
715   if (flg) {
716     PetscCall(PCASMSetOverlap(pc, ovl));
717     osm->dm_subdomains = PETSC_FALSE;
718   }
719   flg = PETSC_FALSE;
720   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
721   if (flg) PetscCall(PCASMSetType(pc, asmtype));
722   flg = PETSC_FALSE;
723   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
724   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
725   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
726   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
727   PetscOptionsHeadEnd();
728   PetscFunctionReturn(PETSC_SUCCESS);
729 }
730 
731 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
732 {
733   PC_ASM  *osm = (PC_ASM *)pc->data;
734   PetscInt i;
735 
736   PetscFunctionBegin;
737   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
738   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
739 
740   if (!pc->setupcalled) {
741     if (is) {
742       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
743     }
744     if (is_local) {
745       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
746     }
747     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
748 
749     if (osm->ksp && osm->n_local_true != n) {
750       for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
751       PetscCall(PetscFree(osm->ksp));
752     }
753 
754     osm->n_local_true = n;
755     osm->is           = NULL;
756     osm->is_local     = NULL;
757     if (is) {
758       PetscCall(PetscMalloc1(n, &osm->is));
759       for (i = 0; i < n; i++) osm->is[i] = is[i];
760       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
761       osm->overlap = -1;
762     }
763     if (is_local) {
764       PetscCall(PetscMalloc1(n, &osm->is_local));
765       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
766       if (!is) {
767         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
768         for (i = 0; i < osm->n_local_true; i++) {
769           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
770             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
771             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
772           } else {
773             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
774             osm->is[i] = osm->is_local[i];
775           }
776         }
777       }
778     }
779   }
780   PetscFunctionReturn(PETSC_SUCCESS);
781 }
782 
783 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
784 {
785   PC_ASM     *osm = (PC_ASM *)pc->data;
786   PetscMPIInt rank, size;
787   PetscInt    n;
788 
789   PetscFunctionBegin;
790   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
791   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets, they cannot be set globally yet.");
792 
793   /*
794      Split the subdomains equally among all processors
795   */
796   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
797   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
798   n = N / size + ((N % size) > rank);
799   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
800   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
801   if (!pc->setupcalled) {
802     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
803 
804     osm->n_local_true = n;
805     osm->is           = NULL;
806     osm->is_local     = NULL;
807   }
808   PetscFunctionReturn(PETSC_SUCCESS);
809 }
810 
811 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
812 {
813   PC_ASM *osm = (PC_ASM *)pc->data;
814 
815   PetscFunctionBegin;
816   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
817   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
818   if (!pc->setupcalled) osm->overlap = ovl;
819   PetscFunctionReturn(PETSC_SUCCESS);
820 }
821 
822 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
823 {
824   PC_ASM *osm = (PC_ASM *)pc->data;
825 
826   PetscFunctionBegin;
827   osm->type     = type;
828   osm->type_set = PETSC_TRUE;
829   PetscFunctionReturn(PETSC_SUCCESS);
830 }
831 
832 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
833 {
834   PC_ASM *osm = (PC_ASM *)pc->data;
835 
836   PetscFunctionBegin;
837   *type = osm->type;
838   PetscFunctionReturn(PETSC_SUCCESS);
839 }
840 
841 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
842 {
843   PC_ASM *osm = (PC_ASM *)pc->data;
844 
845   PetscFunctionBegin;
846   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
847   osm->loctype = type;
848   PetscFunctionReturn(PETSC_SUCCESS);
849 }
850 
851 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
852 {
853   PC_ASM *osm = (PC_ASM *)pc->data;
854 
855   PetscFunctionBegin;
856   *type = osm->loctype;
857   PetscFunctionReturn(PETSC_SUCCESS);
858 }
859 
860 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
861 {
862   PC_ASM *osm = (PC_ASM *)pc->data;
863 
864   PetscFunctionBegin;
865   osm->sort_indices = doSort;
866   PetscFunctionReturn(PETSC_SUCCESS);
867 }
868 
869 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
870 {
871   PC_ASM *osm = (PC_ASM *)pc->data;
872 
873   PetscFunctionBegin;
874   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
875 
876   if (n_local) *n_local = osm->n_local_true;
877   if (first_local) {
878     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
879     *first_local -= osm->n_local_true;
880   }
881   if (ksp) *ksp = osm->ksp;
882   PetscFunctionReturn(PETSC_SUCCESS);
883 }
884 
885 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
886 {
887   PC_ASM *osm = (PC_ASM *)pc->data;
888 
889   PetscFunctionBegin;
890   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
891   PetscAssertPointer(sub_mat_type, 2);
892   *sub_mat_type = osm->sub_mat_type;
893   PetscFunctionReturn(PETSC_SUCCESS);
894 }
895 
896 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
897 {
898   PC_ASM *osm = (PC_ASM *)pc->data;
899 
900   PetscFunctionBegin;
901   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
902   PetscCall(PetscFree(osm->sub_mat_type));
903   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
904   PetscFunctionReturn(PETSC_SUCCESS);
905 }
906 
907 /*@
908   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
909 
910   Collective
911 
912   Input Parameters:
913 + pc       - the preconditioner context
914 . n        - the number of subdomains for this processor (default value = 1)
915 . is       - the index set that defines the subdomains for this processor (or `NULL` for PETSc to determine subdomains)
916              the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call
917 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless `PCASMType` is `PC_ASM_RESTRICT`
918              (or `NULL` to not provide these). The values of the `is_local` array are copied so you can free the array
919              (not the `IS` in the array) after this call
920 
921   Options Database Key:
922 . -pc_asm_local_blocks <blks> - Sets number of local blocks
923 
924   Level: advanced
925 
926   Notes:
927   The `IS` numbering is in the parallel, global numbering of the vector for both `is` and `is_local`
928 
929   By default the `PCASM` preconditioner uses 1 block per processor.
930 
931   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
932 
933   If `is_local` is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the `is_local` region is interpolated
934   back to form the global solution (this is the standard restricted additive Schwarz method, RASM)
935 
936   If `is_local` is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
937   no code to handle that case.
938 
939 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
940           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
941 @*/
942 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
943 {
944   PetscFunctionBegin;
945   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
946   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
947   PetscFunctionReturn(PETSC_SUCCESS);
948 }
949 
950 /*@
951   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
952   additive Schwarz preconditioner, `PCASM`.
953 
954   Collective, all MPI ranks must pass in the same array of `IS`
955 
956   Input Parameters:
957 + pc       - the preconditioner context
958 . N        - the number of subdomains for all processors
959 . is       - the index sets that define the subdomains for all processors (or `NULL` to ask PETSc to determine the subdomains)
960              the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call
961 - is_local - the index sets that define the local part of the subdomains for this processor (or `NULL` to not provide this information)
962              The values of the `is_local` array are copied so you can free the array (not the `IS` in the array) after this call
963 
964   Options Database Key:
965 . -pc_asm_blocks <blks> - Sets total blocks
966 
967   Level: advanced
968 
969   Notes:
970   Currently you cannot use this to set the actual subdomains with the argument `is` or `is_local`.
971 
972   By default the `PCASM` preconditioner uses 1 block per processor.
973 
974   These index sets cannot be destroyed until after completion of the
975   linear solves for which the `PCASM` preconditioner is being used.
976 
977   Use `PCASMSetLocalSubdomains()` to set local subdomains.
978 
979   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
980 
981 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
982           `PCASMCreateSubdomains2D()`, `PCGASM`
983 @*/
984 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
985 {
986   PetscFunctionBegin;
987   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
988   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
989   PetscFunctionReturn(PETSC_SUCCESS);
990 }
991 
992 /*@
993   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
994   additive Schwarz preconditioner, `PCASM`.
995 
996   Logically Collective
997 
998   Input Parameters:
999 + pc  - the preconditioner context
1000 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1001 
1002   Options Database Key:
1003 . -pc_asm_overlap <ovl> - Sets overlap
1004 
1005   Level: intermediate
1006 
1007   Notes:
1008   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1009   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1010   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1011 
1012   The overlap defaults to 1, so if one desires that no additional
1013   overlap be computed beyond what may have been set with a call to
1014   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1015   must be set to be 0.  In particular, if one does not explicitly set
1016   the subdomains an application code, then all overlap would be computed
1017   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1018   variant that is equivalent to the block Jacobi preconditioner.
1019 
1020   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1021   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1022 
1023   One can define initial index sets with any overlap via
1024   `PCASMSetLocalSubdomains()`; the routine
1025   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1026   if desired.
1027 
1028 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1029           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1030 @*/
1031 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1032 {
1033   PetscFunctionBegin;
1034   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1035   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1036   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1037   PetscFunctionReturn(PETSC_SUCCESS);
1038 }
1039 
1040 /*@
1041   PCASMSetType - Sets the type of restriction and interpolation used
1042   for local problems in the additive Schwarz method, `PCASM`.
1043 
1044   Logically Collective
1045 
1046   Input Parameters:
1047 + pc   - the preconditioner context
1048 - type - variant of `PCASM`, one of
1049 .vb
1050       PC_ASM_BASIC       - full interpolation and restriction
1051       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1052       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1053       PC_ASM_NONE        - local processor restriction and interpolation
1054 .ve
1055 
1056   Options Database Key:
1057 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1058 
1059   Level: intermediate
1060 
1061   Note:
1062   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1063   to limit the local processor interpolation
1064 
1065 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1066           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1067 @*/
1068 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1069 {
1070   PetscFunctionBegin;
1071   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1072   PetscValidLogicalCollectiveEnum(pc, type, 2);
1073   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1074   PetscFunctionReturn(PETSC_SUCCESS);
1075 }
1076 
1077 /*@
1078   PCASMGetType - Gets the type of restriction and interpolation used
1079   for local problems in the additive Schwarz method, `PCASM`.
1080 
1081   Logically Collective
1082 
1083   Input Parameter:
1084 . pc - the preconditioner context
1085 
1086   Output Parameter:
1087 . type - variant of `PCASM`, one of
1088 .vb
1089       PC_ASM_BASIC       - full interpolation and restriction
1090       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1091       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1092       PC_ASM_NONE        - local processor restriction and interpolation
1093 .ve
1094 
1095   Options Database Key:
1096 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1097 
1098   Level: intermediate
1099 
1100 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1101           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1102 @*/
1103 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1104 {
1105   PetscFunctionBegin;
1106   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1107   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1108   PetscFunctionReturn(PETSC_SUCCESS);
1109 }
1110 
1111 /*@
1112   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1113 
1114   Logically Collective
1115 
1116   Input Parameters:
1117 + pc   - the preconditioner context
1118 - type - type of composition, one of
1119 .vb
1120   PC_COMPOSITE_ADDITIVE       - local additive combination
1121   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1122 .ve
1123 
1124   Options Database Key:
1125 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1126 
1127   Level: intermediate
1128 
1129 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1130 @*/
1131 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1132 {
1133   PetscFunctionBegin;
1134   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1135   PetscValidLogicalCollectiveEnum(pc, type, 2);
1136   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1137   PetscFunctionReturn(PETSC_SUCCESS);
1138 }
1139 
1140 /*@
1141   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1142 
1143   Logically Collective
1144 
1145   Input Parameter:
1146 . pc - the preconditioner context
1147 
1148   Output Parameter:
1149 . type - type of composition, one of
1150 .vb
1151   PC_COMPOSITE_ADDITIVE       - local additive combination
1152   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1153 .ve
1154 
1155   Options Database Key:
1156 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1157 
1158   Level: intermediate
1159 
1160 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMType`, `PCCompositeType`
1161 @*/
1162 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1163 {
1164   PetscFunctionBegin;
1165   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1166   PetscAssertPointer(type, 2);
1167   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1168   PetscFunctionReturn(PETSC_SUCCESS);
1169 }
1170 
1171 /*@
1172   PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1173 
1174   Logically Collective
1175 
1176   Input Parameters:
1177 + pc     - the preconditioner context
1178 - doSort - sort the subdomain indices
1179 
1180   Level: intermediate
1181 
1182 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1183           `PCASMCreateSubdomains2D()`
1184 @*/
1185 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1186 {
1187   PetscFunctionBegin;
1188   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1189   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1190   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1191   PetscFunctionReturn(PETSC_SUCCESS);
1192 }
1193 
1194 /*@C
1195   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1196   this processor.
1197 
1198   Collective iff first_local is requested
1199 
1200   Input Parameter:
1201 . pc - the preconditioner context
1202 
1203   Output Parameters:
1204 + n_local     - the number of blocks on this processor or `NULL`
1205 . first_local - the global number of the first block on this processor or `NULL`, all processors must request or all must pass `NULL`
1206 - ksp         - the array of `KSP` contexts
1207 
1208   Level: advanced
1209 
1210   Notes:
1211   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1212 
1213   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1214 
1215   Fortran Notes:
1216   The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.
1217 
1218 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1219           `PCASMCreateSubdomains2D()`,
1220 @*/
1221 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1222 {
1223   PetscFunctionBegin;
1224   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1225   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1226   PetscFunctionReturn(PETSC_SUCCESS);
1227 }
1228 
1229 /*MC
1230    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1231            its own `KSP` object, {cite}`dryja1987additive` and {cite}`1sbg`
1232 
1233    Options Database Keys:
1234 +  -pc_asm_blocks <blks>                          - Sets total blocks. Defaults to one block per MPI process.
1235 .  -pc_asm_overlap <ovl>                          - Sets overlap
1236 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1237 .  -pc_asm_dm_subdomains <bool>                   - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`
1238 -  -pc_asm_local_type [additive, multiplicative]  - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1239 
1240    Level: beginner
1241 
1242    Notes:
1243    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1244    will get a different convergence rate due to the default option of `-pc_asm_type restrict`. Use
1245    `-pc_asm_type basic` to get the same convergence behavior
1246 
1247    Each processor can have one or more blocks, but a block cannot be shared by more
1248    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1249 
1250    To set options on the solvers for each block append `-sub_` to all the `KSP`, and `PC`
1251    options database keys. For example, `-sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly`
1252 
1253    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1254    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1255 
1256    If the `PC` has an associated `DM`, then, by default, `DMCreateDomainDecomposition()` is used to create the subdomains
1257 
1258 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1259           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1260           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1261 M*/
1262 
1263 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1264 {
1265   PC_ASM *osm;
1266 
1267   PetscFunctionBegin;
1268   PetscCall(PetscNew(&osm));
1269 
1270   osm->n             = PETSC_DECIDE;
1271   osm->n_local       = 0;
1272   osm->n_local_true  = PETSC_DECIDE;
1273   osm->overlap       = 1;
1274   osm->ksp           = NULL;
1275   osm->restriction   = NULL;
1276   osm->lprolongation = NULL;
1277   osm->lrestriction  = NULL;
1278   osm->x             = NULL;
1279   osm->y             = NULL;
1280   osm->is            = NULL;
1281   osm->is_local      = NULL;
1282   osm->mat           = NULL;
1283   osm->pmat          = NULL;
1284   osm->type          = PC_ASM_RESTRICT;
1285   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1286   osm->sort_indices  = PETSC_TRUE;
1287   osm->dm_subdomains = PETSC_FALSE;
1288   osm->sub_mat_type  = NULL;
1289 
1290   pc->data                 = (void *)osm;
1291   pc->ops->apply           = PCApply_ASM;
1292   pc->ops->matapply        = PCMatApply_ASM;
1293   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1294   pc->ops->setup           = PCSetUp_ASM;
1295   pc->ops->reset           = PCReset_ASM;
1296   pc->ops->destroy         = PCDestroy_ASM;
1297   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1298   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1299   pc->ops->view            = PCView_ASM;
1300   pc->ops->applyrichardson = NULL;
1301 
1302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1310   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1311   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1312   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1313   PetscFunctionReturn(PETSC_SUCCESS);
1314 }
1315 
1316 /*@C
1317   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1318   preconditioner, `PCASM`,  for any problem on a general grid.
1319 
1320   Collective
1321 
1322   Input Parameters:
1323 + A - The global matrix operator
1324 - n - the number of local blocks
1325 
1326   Output Parameter:
1327 . outis - the array of index sets defining the subdomains
1328 
1329   Level: advanced
1330 
1331   Note:
1332   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1333   from these if you use `PCASMSetLocalSubdomains()`
1334 
1335   Fortran Notes:
1336   You must provide the array `outis` already allocated of length `n`.
1337 
1338 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1339 @*/
1340 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1341 {
1342   MatPartitioning mpart;
1343   const char     *prefix;
1344   PetscInt        i, j, rstart, rend, bs;
1345   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1346   Mat             Ad = NULL, adj;
1347   IS              ispart, isnumb, *is;
1348 
1349   PetscFunctionBegin;
1350   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1351   PetscAssertPointer(outis, 3);
1352   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1353 
1354   /* Get prefix, row distribution, and block size */
1355   PetscCall(MatGetOptionsPrefix(A, &prefix));
1356   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1357   PetscCall(MatGetBlockSize(A, &bs));
1358   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1359 
1360   /* Get diagonal block from matrix if possible */
1361   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1362   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1363   if (Ad) {
1364     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1365     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1366   }
1367   if (Ad && n > 1) {
1368     PetscBool match, done;
1369     /* Try to setup a good matrix partitioning if available */
1370     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1371     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1372     PetscCall(MatPartitioningSetFromOptions(mpart));
1373     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1374     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1375     if (!match) { /* assume a "good" partitioner is available */
1376       PetscInt        na;
1377       const PetscInt *ia, *ja;
1378       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1379       if (done) {
1380         /* Build adjacency matrix by hand. Unfortunately a call to
1381            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1382            remove the block-aij structure and we cannot expect
1383            MatPartitioning to split vertices as we need */
1384         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1385         const PetscInt *row;
1386         nnz = 0;
1387         for (i = 0; i < na; i++) { /* count number of nonzeros */
1388           len = ia[i + 1] - ia[i];
1389           row = ja + ia[i];
1390           for (j = 0; j < len; j++) {
1391             if (row[j] == i) { /* don't count diagonal */
1392               len--;
1393               break;
1394             }
1395           }
1396           nnz += len;
1397         }
1398         PetscCall(PetscMalloc1(na + 1, &iia));
1399         PetscCall(PetscMalloc1(nnz, &jja));
1400         nnz    = 0;
1401         iia[0] = 0;
1402         for (i = 0; i < na; i++) { /* fill adjacency */
1403           cnt = 0;
1404           len = ia[i + 1] - ia[i];
1405           row = ja + ia[i];
1406           for (j = 0; j < len; j++) {
1407             if (row[j] != i) { /* if not diagonal */
1408               jja[nnz + cnt++] = row[j];
1409             }
1410           }
1411           nnz += cnt;
1412           iia[i + 1] = nnz;
1413         }
1414         /* Partitioning of the adjacency matrix */
1415         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1416         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1417         PetscCall(MatPartitioningSetNParts(mpart, n));
1418         PetscCall(MatPartitioningApply(mpart, &ispart));
1419         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1420         PetscCall(MatDestroy(&adj));
1421         foundpart = PETSC_TRUE;
1422       }
1423       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1424     }
1425     PetscCall(MatPartitioningDestroy(&mpart));
1426   }
1427 
1428   PetscCall(PetscMalloc1(n, &is));
1429   *outis = is;
1430 
1431   if (!foundpart) {
1432     /* Partitioning by contiguous chunks of rows */
1433 
1434     PetscInt mbs   = (rend - rstart) / bs;
1435     PetscInt start = rstart;
1436     for (i = 0; i < n; i++) {
1437       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1438       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1439       start += count;
1440     }
1441 
1442   } else {
1443     /* Partitioning by adjacency of diagonal block  */
1444 
1445     const PetscInt *numbering;
1446     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1447     /* Get node count in each partition */
1448     PetscCall(PetscMalloc1(n, &count));
1449     PetscCall(ISPartitioningCount(ispart, n, count));
1450     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1451       for (i = 0; i < n; i++) count[i] *= bs;
1452     }
1453     /* Build indices from node numbering */
1454     PetscCall(ISGetLocalSize(isnumb, &nidx));
1455     PetscCall(PetscMalloc1(nidx, &indices));
1456     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1457     PetscCall(ISGetIndices(isnumb, &numbering));
1458     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1459     PetscCall(ISRestoreIndices(isnumb, &numbering));
1460     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1461       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1462       for (i = 0; i < nidx; i++) {
1463         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1464       }
1465       PetscCall(PetscFree(indices));
1466       nidx *= bs;
1467       indices = newidx;
1468     }
1469     /* Shift to get global indices */
1470     for (i = 0; i < nidx; i++) indices[i] += rstart;
1471 
1472     /* Build the index sets for each block */
1473     for (i = 0; i < n; i++) {
1474       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1475       PetscCall(ISSort(is[i]));
1476       start += count[i];
1477     }
1478 
1479     PetscCall(PetscFree(count));
1480     PetscCall(PetscFree(indices));
1481     PetscCall(ISDestroy(&isnumb));
1482     PetscCall(ISDestroy(&ispart));
1483   }
1484   PetscFunctionReturn(PETSC_SUCCESS);
1485 }
1486 
1487 /*@C
1488   PCASMDestroySubdomains - Destroys the index sets created with
1489   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1490 
1491   Collective
1492 
1493   Input Parameters:
1494 + n        - the number of index sets
1495 . is       - the array of index sets
1496 - is_local - the array of local index sets, can be `NULL`
1497 
1498   Level: advanced
1499 
1500 .seealso: [](ch_ksp), `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1501 @*/
1502 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1503 {
1504   PetscInt i;
1505 
1506   PetscFunctionBegin;
1507   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1508   if (is) {
1509     PetscAssertPointer(is, 2);
1510     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1511     PetscCall(PetscFree(is));
1512   }
1513   if (is_local) {
1514     PetscAssertPointer(is_local, 3);
1515     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1516     PetscCall(PetscFree(is_local));
1517   }
1518   PetscFunctionReturn(PETSC_SUCCESS);
1519 }
1520 
1521 /*@C
1522   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1523   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1524 
1525   Not Collective
1526 
1527   Input Parameters:
1528 + m       - the number of mesh points in the x direction
1529 . n       - the number of mesh points in the y direction
1530 . M       - the number of subdomains in the x direction
1531 . N       - the number of subdomains in the y direction
1532 . dof     - degrees of freedom per node
1533 - overlap - overlap in mesh lines
1534 
1535   Output Parameters:
1536 + Nsub     - the number of subdomains created
1537 . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1538 - is_local - array of index sets defining non-overlapping subdomains
1539 
1540   Level: advanced
1541 
1542   Note:
1543   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1544   preconditioners.  More general related routines are
1545   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1546 
1547   Fortran Notes:
1548   `is` must be declared as an array of length long enough to hold `Nsub` entries
1549 
1550 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1551           `PCASMSetOverlap()`
1552 @*/
1553 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local)
1554 {
1555   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1556   PetscInt nidx, *idx, loc, ii, jj, count;
1557 
1558   PetscFunctionBegin;
1559   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1560 
1561   *Nsub = N * M;
1562   PetscCall(PetscMalloc1(*Nsub, is));
1563   PetscCall(PetscMalloc1(*Nsub, is_local));
1564   ystart    = 0;
1565   loc_outer = 0;
1566   for (i = 0; i < N; i++) {
1567     height = n / N + ((n % N) > i); /* height of subdomain */
1568     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1569     yleft = ystart - overlap;
1570     if (yleft < 0) yleft = 0;
1571     yright = ystart + height + overlap;
1572     if (yright > n) yright = n;
1573     xstart = 0;
1574     for (j = 0; j < M; j++) {
1575       width = m / M + ((m % M) > j); /* width of subdomain */
1576       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1577       xleft = xstart - overlap;
1578       if (xleft < 0) xleft = 0;
1579       xright = xstart + width + overlap;
1580       if (xright > m) xright = m;
1581       nidx = (xright - xleft) * (yright - yleft);
1582       PetscCall(PetscMalloc1(nidx, &idx));
1583       loc = 0;
1584       for (ii = yleft; ii < yright; ii++) {
1585         count = m * ii + xleft;
1586         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1587       }
1588       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1589       if (overlap == 0) {
1590         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1591 
1592         (*is_local)[loc_outer] = (*is)[loc_outer];
1593       } else {
1594         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1595           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1596         }
1597         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1598       }
1599       PetscCall(PetscFree(idx));
1600       xstart += width;
1601       loc_outer++;
1602     }
1603     ystart += height;
1604   }
1605   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1606   PetscFunctionReturn(PETSC_SUCCESS);
1607 }
1608 
1609 /*@C
1610   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1611   only) for the additive Schwarz preconditioner, `PCASM`.
1612 
1613   Not Collective
1614 
1615   Input Parameter:
1616 . pc - the preconditioner context
1617 
1618   Output Parameters:
1619 + n        - if requested, the number of subdomains for this processor (default value = 1)
1620 . is       - if requested, the index sets that define the subdomains for this processor
1621 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)
1622 
1623   Level: advanced
1624 
1625   Note:
1626   The `IS` numbering is in the parallel, global numbering of the vector.
1627 
1628   Fortran Note:
1629   Pass in for `is` and `is_local` arrays long enough to hold all the subdomains
1630 
1631 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1632           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1633 @*/
1634 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1635 {
1636   PC_ASM   *osm = (PC_ASM *)pc->data;
1637   PetscBool match;
1638 
1639   PetscFunctionBegin;
1640   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1641   if (n) PetscAssertPointer(n, 2);
1642   if (is) PetscAssertPointer(is, 3);
1643   if (is_local) PetscAssertPointer(is_local, 4);
1644   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1645   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1646   if (n) *n = osm->n_local_true;
1647   if (is) *is = osm->is;
1648   if (is_local) *is_local = osm->is_local;
1649   PetscFunctionReturn(PETSC_SUCCESS);
1650 }
1651 
1652 /*@C
1653   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1654   only) for the additive Schwarz preconditioner, `PCASM`.
1655 
1656   Not Collective
1657 
1658   Input Parameter:
1659 . pc - the preconditioner context
1660 
1661   Output Parameters:
1662 + n   - if requested, the number of matrices for this processor (default value = 1)
1663 - mat - if requested, the matrices
1664 
1665   Level: advanced
1666 
1667   Notes:
1668   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1669 
1670   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1671 
1672   Fortran Note:
1673   Pass in for `mat` an array long enough to hold all the matrices
1674 
1675 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1676           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1677 @*/
1678 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1679 {
1680   PC_ASM   *osm;
1681   PetscBool match;
1682 
1683   PetscFunctionBegin;
1684   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1685   if (n) PetscAssertPointer(n, 2);
1686   if (mat) PetscAssertPointer(mat, 3);
1687   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1688   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1689   if (!match) {
1690     if (n) *n = 0;
1691     if (mat) *mat = NULL;
1692   } else {
1693     osm = (PC_ASM *)pc->data;
1694     if (n) *n = osm->n_local_true;
1695     if (mat) *mat = osm->pmat;
1696   }
1697   PetscFunctionReturn(PETSC_SUCCESS);
1698 }
1699 
1700 /*@
1701   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1702 
1703   Logically Collective
1704 
1705   Input Parameters:
1706 + pc  - the preconditioner
1707 - flg - boolean indicating whether to use subdomains defined by the `DM`
1708 
1709   Options Database Key:
1710 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`
1711 
1712   Level: intermediate
1713 
1714   Note:
1715   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1716   so setting either of the first two effectively turns the latter off.
1717 
1718   Developer Note:
1719   This should be `PCASMSetUseDMSubdomains()`, similarly for the options database key
1720 
1721 .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1722           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1723 @*/
1724 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1725 {
1726   PC_ASM   *osm = (PC_ASM *)pc->data;
1727   PetscBool match;
1728 
1729   PetscFunctionBegin;
1730   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1731   PetscValidLogicalCollectiveBool(pc, flg, 2);
1732   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1733   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1734   if (match) osm->dm_subdomains = flg;
1735   PetscFunctionReturn(PETSC_SUCCESS);
1736 }
1737 
1738 /*@
1739   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1740 
1741   Not Collective
1742 
1743   Input Parameter:
1744 . pc - the preconditioner
1745 
1746   Output Parameter:
1747 . flg - boolean indicating whether to use subdomains defined by the `DM`
1748 
1749   Level: intermediate
1750 
1751   Developer Note:
1752   This should be `PCASMSetUseDMSubdomains()`
1753 
1754 .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1755           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1756 @*/
1757 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1758 {
1759   PC_ASM   *osm = (PC_ASM *)pc->data;
1760   PetscBool match;
1761 
1762   PetscFunctionBegin;
1763   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1764   PetscAssertPointer(flg, 2);
1765   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1766   if (match) *flg = osm->dm_subdomains;
1767   else *flg = PETSC_FALSE;
1768   PetscFunctionReturn(PETSC_SUCCESS);
1769 }
1770 
1771 /*@
1772   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1773 
1774   Not Collective
1775 
1776   Input Parameter:
1777 . pc - the `PC`
1778 
1779   Output Parameter:
1780 . sub_mat_type - name of matrix type
1781 
1782   Level: advanced
1783 
1784 .seealso: [](ch_ksp), `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1785 @*/
1786 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1787 {
1788   PetscFunctionBegin;
1789   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1790   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1791   PetscFunctionReturn(PETSC_SUCCESS);
1792 }
1793 
1794 /*@
1795   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1796 
1797   Collective
1798 
1799   Input Parameters:
1800 + pc           - the `PC` object
1801 - sub_mat_type - the `MatType`
1802 
1803   Options Database Key:
1804 . -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1805    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1806 
1807   Note:
1808   See `MatType` for available types
1809 
1810   Level: advanced
1811 
1812 .seealso: [](ch_ksp), `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1813 @*/
1814 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1815 {
1816   PetscFunctionBegin;
1817   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1818   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1819   PetscFunctionReturn(PETSC_SUCCESS);
1820 }
1821