xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 2ff79c18c26c94ed8cb599682f680f231dca6444)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include <petsc/private/matimpl.h>
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         isascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (isascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(sviewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(sviewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
75   }
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc)
80 {
81   PC_ASM         *osm = (PC_ASM *)pc->data;
82   const char     *prefix;
83   char            fname[PETSC_MAX_PATH_LEN + 1];
84   PetscViewer     viewer, sviewer;
85   char           *s;
86   PetscInt        i, j, nidx;
87   const PetscInt *idx;
88   PetscMPIInt     rank, size;
89 
90   PetscFunctionBegin;
91   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
92   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
93   PetscCall(PCGetOptionsPrefix(pc, &prefix));
94   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
95   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
96   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
97   for (i = 0; i < osm->n_local; i++) {
98     if (i < osm->n_local_true) {
99       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
100       PetscCall(ISGetIndices(osm->is[i], &idx));
101       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
102 #define len 16 * (nidx + 1) + 512
103       PetscCall(PetscMalloc1(len, &s));
104       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
105 #undef len
106       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
107       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
108       PetscCall(ISRestoreIndices(osm->is[i], &idx));
109       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
110       PetscCall(PetscViewerDestroy(&sviewer));
111       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
112       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
113       PetscCall(PetscViewerFlush(viewer));
114       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
115       PetscCall(PetscFree(s));
116       if (osm->is_local) {
117         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
118 #define len 16 * (nidx + 1) + 512
119         PetscCall(PetscMalloc1(len, &s));
120         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
121 #undef len
122         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
123         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
124         PetscCall(ISGetIndices(osm->is_local[i], &idx));
125         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
126         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
127         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
128         PetscCall(PetscViewerDestroy(&sviewer));
129         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
130         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
131         PetscCall(PetscViewerFlush(viewer));
132         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
133         PetscCall(PetscFree(s));
134       }
135     } else {
136       /* Participate in collective viewer calls. */
137       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
138       PetscCall(PetscViewerFlush(viewer));
139       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
140       /* Assume either all ranks have is_local or none do. */
141       if (osm->is_local) {
142         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
143         PetscCall(PetscViewerFlush(viewer));
144         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
145       }
146     }
147   }
148   PetscCall(PetscViewerFlush(viewer));
149   PetscCall(PetscViewerDestroy(&viewer));
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode PCSetUp_ASM(PC pc)
154 {
155   PC_ASM       *osm = (PC_ASM *)pc->data;
156   PetscBool     flg;
157   PetscInt      i, m, m_local;
158   MatReuse      scall = MAT_REUSE_MATRIX;
159   IS            isl;
160   KSP           ksp;
161   PC            subpc;
162   const char   *prefix, *pprefix;
163   Vec           vec;
164   DM           *domain_dm = NULL;
165   MatNullSpace *nullsp    = NULL;
166 
167   PetscFunctionBegin;
168   if (!pc->setupcalled) {
169     PetscInt m;
170 
171     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172     if (osm->n_local_true == PETSC_DECIDE) {
173       /* no subdomains given */
174       /* try pc->dm first, if allowed */
175       if (osm->dm_subdomains && pc->dm) {
176         PetscInt num_domains, d;
177         char   **domain_names;
178         IS      *inner_domain_is, *outer_domain_is;
179         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181                               A future improvement of this code might allow one to use
182                               DM-defined subdomains and also increase the overlap,
183                               but that is not currently supported */
184         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185         for (d = 0; d < num_domains; ++d) {
186           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189         }
190         PetscCall(PetscFree(domain_names));
191         PetscCall(PetscFree(inner_domain_is));
192         PetscCall(PetscFree(outer_domain_is));
193       }
194       if (osm->n_local_true == PETSC_DECIDE) {
195         /* still no subdomains; use one subdomain per processor */
196         osm->n_local_true = 1;
197       }
198     }
199     { /* determine the global and max number of subdomains */
200       struct {
201         PetscInt max, sum;
202       } inwork, outwork;
203       PetscMPIInt size;
204 
205       inwork.max = osm->n_local_true;
206       inwork.sum = osm->n_local_true;
207       PetscCallMPI(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208       osm->n_local = outwork.max;
209       osm->n       = outwork.sum;
210 
211       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212       if (outwork.max == 1 && outwork.sum == size) {
213         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215       }
216     }
217     if (!osm->is) { /* create the index sets */
218       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219     }
220     if (osm->n_local_true > 1 && !osm->is_local) {
221       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222       for (i = 0; i < osm->n_local_true; i++) {
223         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226         } else {
227           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228           osm->is_local[i] = osm->is[i];
229         }
230       }
231     }
232     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233     if (osm->overlap > 0) {
234       /* Extend the "overlapping" regions by a number of steps */
235       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236     }
237     if (osm->sort_indices) {
238       for (i = 0; i < osm->n_local_true; i++) {
239         PetscCall(ISSort(osm->is[i]));
240         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241       }
242     }
243     flg = PETSC_FALSE;
244     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246     if (!osm->ksp) {
247       /* Create the local solvers */
248       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250       for (i = 0; i < osm->n_local_true; i++) {
251         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
253         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
254         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
255         PetscCall(KSPSetType(ksp, KSPPREONLY));
256         PetscCall(KSPGetPC(ksp, &subpc));
257         PetscCall(PCGetOptionsPrefix(pc, &prefix));
258         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
259         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
260         if (domain_dm) {
261           PetscCall(KSPSetDM(ksp, domain_dm[i]));
262           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
263           PetscCall(DMDestroy(&domain_dm[i]));
264         }
265         osm->ksp[i] = ksp;
266       }
267       if (domain_dm) PetscCall(PetscFree(domain_dm));
268     }
269 
270     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
271     PetscCall(ISSortRemoveDups(osm->lis));
272     PetscCall(ISGetLocalSize(osm->lis, &m));
273 
274     scall = MAT_INITIAL_MATRIX;
275   } else {
276     /*
277        Destroy the blocks from the previous iteration
278     */
279     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
280       PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
281       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
282       scall = MAT_INITIAL_MATRIX;
283     }
284   }
285 
286   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
287   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
288     PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
289     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
290     scall = MAT_INITIAL_MATRIX;
291   }
292 
293   /*
294      Extract out the submatrices
295   */
296   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
297   if (scall == MAT_INITIAL_MATRIX) {
298     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
299     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
300     if (nullsp) PetscCall(MatRestoreNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
301   }
302 
303   /* Convert the types of the submatrices (if needbe) */
304   if (osm->sub_mat_type) {
305     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &osm->pmat[i]));
306   }
307 
308   if (!pc->setupcalled) {
309     VecType vtype;
310 
311     /* Create the local work vectors (from the local matrices) and scatter contexts */
312     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
313 
314     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
315     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
316     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
317     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
318     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
319 
320     PetscCall(ISGetLocalSize(osm->lis, &m));
321     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
322     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
323     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
324     PetscCall(VecSetSizes(osm->lx, m, m));
325     PetscCall(VecSetType(osm->lx, vtype));
326     PetscCall(VecDuplicate(osm->lx, &osm->ly));
327     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
328     PetscCall(ISDestroy(&isl));
329 
330     for (i = 0; i < osm->n_local_true; ++i) {
331       ISLocalToGlobalMapping ltog;
332       IS                     isll;
333       const PetscInt        *idx_is;
334       PetscInt              *idx_lis, nout;
335 
336       PetscCall(ISGetLocalSize(osm->is[i], &m));
337       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
338       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
339 
340       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
341       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
342       PetscCall(ISGetLocalSize(osm->is[i], &m));
343       PetscCall(ISGetIndices(osm->is[i], &idx_is));
344       PetscCall(PetscMalloc1(m, &idx_lis));
345       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
346       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
347       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
348       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
349       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
350       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
351       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
352       PetscCall(ISDestroy(&isll));
353       PetscCall(ISDestroy(&isl));
354       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the non-overlapping is_local[i] entries */
355         ISLocalToGlobalMapping ltog;
356         IS                     isll, isll_local;
357         const PetscInt        *idx_local;
358         PetscInt              *idx1, *idx2, nout;
359 
360         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
361         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
362 
363         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
364         PetscCall(PetscMalloc1(m_local, &idx1));
365         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
366         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
367         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
368         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
369 
370         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
371         PetscCall(PetscMalloc1(m_local, &idx2));
372         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
373         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
374         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
375         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
376 
377         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
378         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
379 
380         PetscCall(ISDestroy(&isll));
381         PetscCall(ISDestroy(&isll_local));
382       }
383     }
384     PetscCall(VecDestroy(&vec));
385   }
386 
387   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
388     IS      *cis;
389     PetscInt c;
390 
391     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
392     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
393     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
394     PetscCall(PetscFree(cis));
395   }
396 
397   /* Return control to the user so that the submatrices can be modified (e.g., to apply
398      different boundary conditions for the submatrices than for the global problem) */
399   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
400 
401   /*
402      Loop over subdomains putting them into local ksp
403   */
404   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
405   for (i = 0; i < osm->n_local_true; i++) {
406     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
407     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
408     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
409   }
410   PetscFunctionReturn(PETSC_SUCCESS);
411 }
412 
413 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
414 {
415   PC_ASM            *osm = (PC_ASM *)pc->data;
416   PetscInt           i;
417   KSPConvergedReason reason;
418 
419   PetscFunctionBegin;
420   for (i = 0; i < osm->n_local_true; i++) {
421     PetscCall(KSPSetUp(osm->ksp[i]));
422     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
423     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
424   }
425   PetscFunctionReturn(PETSC_SUCCESS);
426 }
427 
428 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
429 {
430   PC_ASM     *osm = (PC_ASM *)pc->data;
431   PetscInt    i, n_local_true = osm->n_local_true;
432   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
433 
434   PetscFunctionBegin;
435   /*
436      support for limiting the restriction or interpolation to only local
437      subdomain values (leaving the other values 0).
438   */
439   if (!(osm->type & PC_ASM_RESTRICT)) {
440     forward = SCATTER_FORWARD_LOCAL;
441     /* have to zero the work RHS since scatter may leave some slots empty */
442     PetscCall(VecSet(osm->lx, 0.0));
443   }
444   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
445 
446   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
447   /* zero the global and the local solutions */
448   PetscCall(VecSet(y, 0.0));
449   PetscCall(VecSet(osm->ly, 0.0));
450 
451   /* copy the global RHS to local RHS including the ghost nodes */
452   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
453   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
454 
455   /* restrict local RHS to the overlapping 0-block RHS */
456   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
457   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
458 
459   /* do the local solves */
460   for (i = 0; i < n_local_true; ++i) {
461     /* solve the overlapping i-block */
462     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
463     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
464     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
465     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
466 
467     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
468       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
469       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
470     } else { /* interpolate the overlapping i-block solution to the local solution */
471       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
472       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
473     }
474 
475     if (i < n_local_true - 1) {
476       /* restrict local RHS to the overlapping (i+1)-block RHS */
477       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
478       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
479 
480       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
481         /* update the overlapping (i+1)-block RHS using the current local solution */
482         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
483         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
484       }
485     }
486   }
487   /* add the local solution to the global solution including the ghost nodes */
488   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
489   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
490   PetscFunctionReturn(PETSC_SUCCESS);
491 }
492 
493 static PetscErrorCode PCMatApply_ASM_Private(PC pc, Mat X, Mat Y, PetscBool transpose)
494 {
495   PC_ASM     *osm = (PC_ASM *)pc->data;
496   Mat         Z, W;
497   Vec         x;
498   PetscInt    i, m, N;
499   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
500 
501   PetscFunctionBegin;
502   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
503   /*
504      support for limiting the restriction or interpolation to only local
505      subdomain values (leaving the other values 0).
506   */
507   if ((!transpose && !(osm->type & PC_ASM_RESTRICT)) || (transpose && !(osm->type & PC_ASM_INTERPOLATE))) {
508     forward = SCATTER_FORWARD_LOCAL;
509     /* have to zero the work RHS since scatter may leave some slots empty */
510     PetscCall(VecSet(osm->lx, 0.0));
511   }
512   if ((!transpose && !(osm->type & PC_ASM_INTERPOLATE)) || (transpose && !(osm->type & PC_ASM_RESTRICT))) reverse = SCATTER_REVERSE_LOCAL;
513   PetscCall(VecGetLocalSize(osm->x[0], &m));
514   PetscCall(MatGetSize(X, NULL, &N));
515   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
516 
517   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
518   /* zero the global and the local solutions */
519   PetscCall(MatZeroEntries(Y));
520   PetscCall(VecSet(osm->ly, 0.0));
521 
522   for (i = 0; i < N; ++i) {
523     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
524     /* copy the global RHS to local RHS including the ghost nodes */
525     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
526     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
527     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
528 
529     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
530     /* restrict local RHS to the overlapping 0-block RHS */
531     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
532     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
533     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
534   }
535   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
536   /* solve the overlapping 0-block */
537   if (!transpose) {
538     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
539     PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
540     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
541   } else {
542     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, osm->ksp[0], Z, W, 0));
543     PetscCall(KSPMatSolveTranspose(osm->ksp[0], Z, W));
544     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, osm->ksp[0], Z, W, 0));
545   }
546   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
547   PetscCall(MatDestroy(&Z));
548 
549   for (i = 0; i < N; ++i) {
550     PetscCall(VecSet(osm->ly, 0.0));
551     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
552     if (osm->lprolongation && ((!transpose && osm->type != PC_ASM_INTERPOLATE) || (transpose && osm->type != PC_ASM_RESTRICT))) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
553       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
554       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
555     } else { /* interpolate the overlapping 0-block solution to the local solution */
556       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
557       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
558     }
559     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
560 
561     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
562     /* add the local solution to the global solution including the ghost nodes */
563     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
564     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
565     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
566   }
567   PetscCall(MatDestroy(&W));
568   PetscFunctionReturn(PETSC_SUCCESS);
569 }
570 
571 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
572 {
573   PetscFunctionBegin;
574   PetscCall(PCMatApply_ASM_Private(pc, X, Y, PETSC_FALSE));
575   PetscFunctionReturn(PETSC_SUCCESS);
576 }
577 
578 static PetscErrorCode PCMatApplyTranspose_ASM(PC pc, Mat X, Mat Y)
579 {
580   PetscFunctionBegin;
581   PetscCall(PCMatApply_ASM_Private(pc, X, Y, PETSC_TRUE));
582   PetscFunctionReturn(PETSC_SUCCESS);
583 }
584 
585 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
586 {
587   PC_ASM     *osm = (PC_ASM *)pc->data;
588   PetscInt    i, n_local_true = osm->n_local_true;
589   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
590 
591   PetscFunctionBegin;
592   PetscCheck(osm->n_local_true <= 1 || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
593   /*
594      Support for limiting the restriction or interpolation to only local
595      subdomain values (leaving the other values 0).
596 
597      Note: these are reversed from the PCApply_ASM() because we are applying the
598      transpose of the three terms
599   */
600 
601   if (!(osm->type & PC_ASM_INTERPOLATE)) {
602     forward = SCATTER_FORWARD_LOCAL;
603     /* have to zero the work RHS since scatter may leave some slots empty */
604     PetscCall(VecSet(osm->lx, 0.0));
605   }
606   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
607 
608   /* zero the global and the local solutions */
609   PetscCall(VecSet(y, 0.0));
610   PetscCall(VecSet(osm->ly, 0.0));
611 
612   /* Copy the global RHS to local RHS including the ghost nodes */
613   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
614   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
615 
616   /* Restrict local RHS to the overlapping 0-block RHS */
617   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
618   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
619 
620   /* do the local solves */
621   for (i = 0; i < n_local_true; ++i) {
622     /* solve the overlapping i-block */
623     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
624     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
625     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
626     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
627 
628     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
629       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
630       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
631     } else { /* interpolate the overlapping i-block solution to the local solution */
632       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
633       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
634     }
635 
636     if (i < n_local_true - 1) {
637       /* Restrict local RHS to the overlapping (i+1)-block RHS */
638       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
639       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
640     }
641   }
642   /* Add the local solution to the global solution including the ghost nodes */
643   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
644   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
645   PetscFunctionReturn(PETSC_SUCCESS);
646 }
647 
648 static PetscErrorCode PCReset_ASM(PC pc)
649 {
650   PC_ASM  *osm = (PC_ASM *)pc->data;
651   PetscInt i;
652 
653   PetscFunctionBegin;
654   if (osm->ksp) {
655     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
656   }
657   if (osm->pmat) {
658     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
659   }
660   if (osm->lrestriction) {
661     PetscCall(VecScatterDestroy(&osm->restriction));
662     for (i = 0; i < osm->n_local_true; i++) {
663       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
664       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
665       PetscCall(VecDestroy(&osm->x[i]));
666       PetscCall(VecDestroy(&osm->y[i]));
667     }
668     PetscCall(PetscFree(osm->lrestriction));
669     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
670     PetscCall(PetscFree(osm->x));
671     PetscCall(PetscFree(osm->y));
672   }
673   PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));
674   PetscCall(ISDestroy(&osm->lis));
675   PetscCall(VecDestroy(&osm->lx));
676   PetscCall(VecDestroy(&osm->ly));
677   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
678 
679   PetscCall(PetscFree(osm->sub_mat_type));
680 
681   osm->is       = NULL;
682   osm->is_local = NULL;
683   PetscFunctionReturn(PETSC_SUCCESS);
684 }
685 
686 static PetscErrorCode PCDestroy_ASM(PC pc)
687 {
688   PC_ASM  *osm = (PC_ASM *)pc->data;
689   PetscInt i;
690 
691   PetscFunctionBegin;
692   PetscCall(PCReset_ASM(pc));
693   if (osm->ksp) {
694     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
695     PetscCall(PetscFree(osm->ksp));
696   }
697   PetscCall(PetscFree(pc->data));
698 
699   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
700   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
701   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
702   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
703   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
704   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
705   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
706   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
707   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
708   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
709   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
710   PetscFunctionReturn(PETSC_SUCCESS);
711 }
712 
713 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems PetscOptionsObject)
714 {
715   PC_ASM         *osm = (PC_ASM *)pc->data;
716   PetscInt        blocks, ovl;
717   PetscBool       flg;
718   PCASMType       asmtype;
719   PCCompositeType loctype;
720   char            sub_mat_type[256];
721 
722   PetscFunctionBegin;
723   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
724   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
725   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
726   if (flg) {
727     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
728     osm->dm_subdomains = PETSC_FALSE;
729   }
730   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
731   if (flg) {
732     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
733     osm->dm_subdomains = PETSC_FALSE;
734   }
735   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
736   if (flg) {
737     PetscCall(PCASMSetOverlap(pc, ovl));
738     osm->dm_subdomains = PETSC_FALSE;
739   }
740   flg = PETSC_FALSE;
741   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
742   if (flg) PetscCall(PCASMSetType(pc, asmtype));
743   flg = PETSC_FALSE;
744   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
745   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
746   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
747   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
748   PetscOptionsHeadEnd();
749   PetscFunctionReturn(PETSC_SUCCESS);
750 }
751 
752 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
753 {
754   PC_ASM  *osm = (PC_ASM *)pc->data;
755   PetscInt i;
756 
757   PetscFunctionBegin;
758   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
759   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
760 
761   if (!pc->setupcalled) {
762     if (is) {
763       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
764     }
765     if (is_local) {
766       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
767     }
768     PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));
769 
770     if (osm->ksp && osm->n_local_true != n) {
771       for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
772       PetscCall(PetscFree(osm->ksp));
773     }
774 
775     osm->n_local_true = n;
776     osm->is           = NULL;
777     osm->is_local     = NULL;
778     if (is) {
779       PetscCall(PetscMalloc1(n, &osm->is));
780       for (i = 0; i < n; i++) osm->is[i] = is[i];
781       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
782       osm->overlap = -1;
783     }
784     if (is_local) {
785       PetscCall(PetscMalloc1(n, &osm->is_local));
786       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
787       if (!is) {
788         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
789         for (i = 0; i < osm->n_local_true; i++) {
790           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
791             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
792             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
793           } else {
794             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
795             osm->is[i] = osm->is_local[i];
796           }
797         }
798       }
799     }
800   }
801   PetscFunctionReturn(PETSC_SUCCESS);
802 }
803 
804 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
805 {
806   PC_ASM     *osm = (PC_ASM *)pc->data;
807   PetscMPIInt rank, size;
808   PetscInt    n;
809 
810   PetscFunctionBegin;
811   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
812   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets, they cannot be set globally yet.");
813 
814   /*
815      Split the subdomains equally among all processors
816   */
817   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
818   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
819   n = N / size + ((N % size) > rank);
820   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, rank, size, N);
821   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
822   if (!pc->setupcalled) {
823     PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));
824 
825     osm->n_local_true = n;
826     osm->is           = NULL;
827     osm->is_local     = NULL;
828   }
829   PetscFunctionReturn(PETSC_SUCCESS);
830 }
831 
832 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
833 {
834   PC_ASM *osm = (PC_ASM *)pc->data;
835 
836   PetscFunctionBegin;
837   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
838   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
839   if (!pc->setupcalled) osm->overlap = ovl;
840   PetscFunctionReturn(PETSC_SUCCESS);
841 }
842 
843 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
844 {
845   PC_ASM *osm = (PC_ASM *)pc->data;
846 
847   PetscFunctionBegin;
848   osm->type     = type;
849   osm->type_set = PETSC_TRUE;
850   PetscFunctionReturn(PETSC_SUCCESS);
851 }
852 
853 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
854 {
855   PC_ASM *osm = (PC_ASM *)pc->data;
856 
857   PetscFunctionBegin;
858   *type = osm->type;
859   PetscFunctionReturn(PETSC_SUCCESS);
860 }
861 
862 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
863 {
864   PC_ASM *osm = (PC_ASM *)pc->data;
865 
866   PetscFunctionBegin;
867   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
868   osm->loctype = type;
869   PetscFunctionReturn(PETSC_SUCCESS);
870 }
871 
872 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
873 {
874   PC_ASM *osm = (PC_ASM *)pc->data;
875 
876   PetscFunctionBegin;
877   *type = osm->loctype;
878   PetscFunctionReturn(PETSC_SUCCESS);
879 }
880 
881 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
882 {
883   PC_ASM *osm = (PC_ASM *)pc->data;
884 
885   PetscFunctionBegin;
886   osm->sort_indices = doSort;
887   PetscFunctionReturn(PETSC_SUCCESS);
888 }
889 
890 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
891 {
892   PC_ASM *osm = (PC_ASM *)pc->data;
893 
894   PetscFunctionBegin;
895   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
896 
897   if (n_local) *n_local = osm->n_local_true;
898   if (first_local) {
899     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
900     *first_local -= osm->n_local_true;
901   }
902   if (ksp) *ksp = osm->ksp;
903   PetscFunctionReturn(PETSC_SUCCESS);
904 }
905 
906 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
907 {
908   PC_ASM *osm = (PC_ASM *)pc->data;
909 
910   PetscFunctionBegin;
911   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
912   PetscAssertPointer(sub_mat_type, 2);
913   *sub_mat_type = osm->sub_mat_type;
914   PetscFunctionReturn(PETSC_SUCCESS);
915 }
916 
917 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
918 {
919   PC_ASM *osm = (PC_ASM *)pc->data;
920 
921   PetscFunctionBegin;
922   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
923   PetscCall(PetscFree(osm->sub_mat_type));
924   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
925   PetscFunctionReturn(PETSC_SUCCESS);
926 }
927 
928 /*@
929   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
930 
931   Collective
932 
933   Input Parameters:
934 + pc       - the preconditioner context
935 . n        - the number of subdomains for this processor (default value = 1)
936 . is       - the index set that defines the subdomains for this processor (or `NULL` for PETSc to determine subdomains)
937              the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call
938 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless `PCASMType` is `PC_ASM_RESTRICT`
939              (or `NULL` to not provide these). The values of the `is_local` array are copied so you can free the array
940              (not the `IS` in the array) after this call
941 
942   Options Database Key:
943 . -pc_asm_local_blocks <blks> - Sets number of local blocks
944 
945   Level: advanced
946 
947   Notes:
948   The `IS` numbering is in the parallel, global numbering of the vector for both `is` and `is_local`
949 
950   By default the `PCASM` preconditioner uses 1 block per processor.
951 
952   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
953 
954   If `is_local` is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the `is_local` region is interpolated
955   back to form the global solution (this is the standard restricted additive Schwarz method, RASM)
956 
957   If `is_local` is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
958   no code to handle that case.
959 
960 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
961           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
962 @*/
963 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
964 {
965   PetscFunctionBegin;
966   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
967   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
968   PetscFunctionReturn(PETSC_SUCCESS);
969 }
970 
971 /*@
972   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
973   additive Schwarz preconditioner, `PCASM`.
974 
975   Collective, all MPI ranks must pass in the same array of `IS`
976 
977   Input Parameters:
978 + pc       - the preconditioner context
979 . N        - the number of subdomains for all processors
980 . is       - the index sets that define the subdomains for all processors (or `NULL` to ask PETSc to determine the subdomains)
981              the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call
982 - is_local - the index sets that define the local part of the subdomains for this processor (or `NULL` to not provide this information)
983              The values of the `is_local` array are copied so you can free the array (not the `IS` in the array) after this call
984 
985   Options Database Key:
986 . -pc_asm_blocks <blks> - Sets total blocks
987 
988   Level: advanced
989 
990   Notes:
991   Currently you cannot use this to set the actual subdomains with the argument `is` or `is_local`.
992 
993   By default the `PCASM` preconditioner uses 1 block per processor.
994 
995   These index sets cannot be destroyed until after completion of the
996   linear solves for which the `PCASM` preconditioner is being used.
997 
998   Use `PCASMSetLocalSubdomains()` to set local subdomains.
999 
1000   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
1001 
1002 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1003           `PCASMCreateSubdomains2D()`, `PCGASM`
1004 @*/
1005 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
1006 {
1007   PetscFunctionBegin;
1008   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1009   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
1010   PetscFunctionReturn(PETSC_SUCCESS);
1011 }
1012 
1013 /*@
1014   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1015   additive Schwarz preconditioner, `PCASM`.
1016 
1017   Logically Collective
1018 
1019   Input Parameters:
1020 + pc  - the preconditioner context
1021 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1022 
1023   Options Database Key:
1024 . -pc_asm_overlap <ovl> - Sets overlap
1025 
1026   Level: intermediate
1027 
1028   Notes:
1029   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1030   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1031   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1032 
1033   The overlap defaults to 1, so if one desires that no additional
1034   overlap be computed beyond what may have been set with a call to
1035   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1036   must be set to be 0.  In particular, if one does not explicitly set
1037   the subdomains an application code, then all overlap would be computed
1038   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1039   variant that is equivalent to the block Jacobi preconditioner.
1040 
1041   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1042   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1043 
1044   One can define initial index sets with any overlap via
1045   `PCASMSetLocalSubdomains()`; the routine
1046   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1047   if desired.
1048 
1049 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1050           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1051 @*/
1052 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1053 {
1054   PetscFunctionBegin;
1055   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1056   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1057   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1058   PetscFunctionReturn(PETSC_SUCCESS);
1059 }
1060 
1061 /*@
1062   PCASMSetType - Sets the type of restriction and interpolation used
1063   for local problems in the additive Schwarz method, `PCASM`.
1064 
1065   Logically Collective
1066 
1067   Input Parameters:
1068 + pc   - the preconditioner context
1069 - type - variant of `PCASM`, one of
1070 .vb
1071       PC_ASM_BASIC       - full interpolation and restriction
1072       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1073       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1074       PC_ASM_NONE        - local processor restriction and interpolation
1075 .ve
1076 
1077   Options Database Key:
1078 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1079 
1080   Level: intermediate
1081 
1082   Note:
1083   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1084   to limit the local processor interpolation
1085 
1086 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1087           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1088 @*/
1089 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1090 {
1091   PetscFunctionBegin;
1092   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1093   PetscValidLogicalCollectiveEnum(pc, type, 2);
1094   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1095   PetscFunctionReturn(PETSC_SUCCESS);
1096 }
1097 
1098 /*@
1099   PCASMGetType - Gets the type of restriction and interpolation used
1100   for local problems in the additive Schwarz method, `PCASM`.
1101 
1102   Logically Collective
1103 
1104   Input Parameter:
1105 . pc - the preconditioner context
1106 
1107   Output Parameter:
1108 . type - variant of `PCASM`, one of
1109 .vb
1110       PC_ASM_BASIC       - full interpolation and restriction
1111       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1112       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1113       PC_ASM_NONE        - local processor restriction and interpolation
1114 .ve
1115 
1116   Options Database Key:
1117 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1118 
1119   Level: intermediate
1120 
1121 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1122           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1123 @*/
1124 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1125 {
1126   PetscFunctionBegin;
1127   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1128   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1129   PetscFunctionReturn(PETSC_SUCCESS);
1130 }
1131 
1132 /*@
1133   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1134 
1135   Logically Collective
1136 
1137   Input Parameters:
1138 + pc   - the preconditioner context
1139 - type - type of composition, one of
1140 .vb
1141   PC_COMPOSITE_ADDITIVE       - local additive combination
1142   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1143 .ve
1144 
1145   Options Database Key:
1146 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1147 
1148   Level: intermediate
1149 
1150 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1151 @*/
1152 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1153 {
1154   PetscFunctionBegin;
1155   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1156   PetscValidLogicalCollectiveEnum(pc, type, 2);
1157   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1158   PetscFunctionReturn(PETSC_SUCCESS);
1159 }
1160 
1161 /*@
1162   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1163 
1164   Logically Collective
1165 
1166   Input Parameter:
1167 . pc - the preconditioner context
1168 
1169   Output Parameter:
1170 . type - type of composition, one of
1171 .vb
1172   PC_COMPOSITE_ADDITIVE       - local additive combination
1173   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1174 .ve
1175 
1176   Options Database Key:
1177 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1178 
1179   Level: intermediate
1180 
1181 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMType`, `PCCompositeType`
1182 @*/
1183 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1184 {
1185   PetscFunctionBegin;
1186   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1187   PetscAssertPointer(type, 2);
1188   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1189   PetscFunctionReturn(PETSC_SUCCESS);
1190 }
1191 
1192 /*@
1193   PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1194 
1195   Logically Collective
1196 
1197   Input Parameters:
1198 + pc     - the preconditioner context
1199 - doSort - sort the subdomain indices
1200 
1201   Level: intermediate
1202 
1203 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1204           `PCASMCreateSubdomains2D()`
1205 @*/
1206 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1207 {
1208   PetscFunctionBegin;
1209   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1210   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1211   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1212   PetscFunctionReturn(PETSC_SUCCESS);
1213 }
1214 
1215 /*@C
1216   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1217   this processor.
1218 
1219   Collective iff first_local is requested
1220 
1221   Input Parameter:
1222 . pc - the preconditioner context
1223 
1224   Output Parameters:
1225 + n_local     - the number of blocks on this processor or `NULL`
1226 . first_local - the global number of the first block on this processor or `NULL`, all processors must request or all must pass `NULL`
1227 - ksp         - the array of `KSP` contexts
1228 
1229   Level: advanced
1230 
1231   Notes:
1232   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1233 
1234   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1235 
1236   Fortran Note:
1237   Call `PCASMRestoreSubKSP()` when access to the array of `KSP` is no longer needed
1238 
1239 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1240           `PCASMCreateSubdomains2D()`,
1241 @*/
1242 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1243 {
1244   PetscFunctionBegin;
1245   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1246   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1247   PetscFunctionReturn(PETSC_SUCCESS);
1248 }
1249 
1250 /*MC
1251    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1252            its own `KSP` object, {cite}`dryja1987additive` and {cite}`1sbg`
1253 
1254    Options Database Keys:
1255 +  -pc_asm_blocks <blks>                          - Sets total blocks. Defaults to one block per MPI process.
1256 .  -pc_asm_overlap <ovl>                          - Sets overlap
1257 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1258 .  -pc_asm_dm_subdomains <bool>                   - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`
1259 -  -pc_asm_local_type [additive, multiplicative]  - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1260 
1261    Level: beginner
1262 
1263    Notes:
1264    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1265    will get a different convergence rate due to the default option of `-pc_asm_type restrict`. Use
1266    `-pc_asm_type basic` to get the same convergence behavior
1267 
1268    Each processor can have one or more blocks, but a block cannot be shared by more
1269    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1270 
1271    To set options on the solvers for each block append `-sub_` to all the `KSP`, and `PC`
1272    options database keys. For example, `-sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly`
1273 
1274    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1275    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1276 
1277    If the `PC` has an associated `DM`, then, by default, `DMCreateDomainDecomposition()` is used to create the subdomains
1278 
1279 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1280           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1281           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1282 M*/
1283 
1284 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1285 {
1286   PC_ASM *osm;
1287 
1288   PetscFunctionBegin;
1289   PetscCall(PetscNew(&osm));
1290 
1291   osm->n             = PETSC_DECIDE;
1292   osm->n_local       = 0;
1293   osm->n_local_true  = PETSC_DECIDE;
1294   osm->overlap       = 1;
1295   osm->ksp           = NULL;
1296   osm->restriction   = NULL;
1297   osm->lprolongation = NULL;
1298   osm->lrestriction  = NULL;
1299   osm->x             = NULL;
1300   osm->y             = NULL;
1301   osm->is            = NULL;
1302   osm->is_local      = NULL;
1303   osm->mat           = NULL;
1304   osm->pmat          = NULL;
1305   osm->type          = PC_ASM_RESTRICT;
1306   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1307   osm->sort_indices  = PETSC_TRUE;
1308   osm->dm_subdomains = PETSC_FALSE;
1309   osm->sub_mat_type  = NULL;
1310 
1311   pc->data                   = (void *)osm;
1312   pc->ops->apply             = PCApply_ASM;
1313   pc->ops->matapply          = PCMatApply_ASM;
1314   pc->ops->applytranspose    = PCApplyTranspose_ASM;
1315   pc->ops->matapplytranspose = PCMatApplyTranspose_ASM;
1316   pc->ops->setup             = PCSetUp_ASM;
1317   pc->ops->reset             = PCReset_ASM;
1318   pc->ops->destroy           = PCDestroy_ASM;
1319   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1320   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1321   pc->ops->view              = PCView_ASM;
1322   pc->ops->applyrichardson   = NULL;
1323 
1324   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1325   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1326   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1327   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1328   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1329   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1330   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1331   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1332   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1333   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1334   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1335   PetscFunctionReturn(PETSC_SUCCESS);
1336 }
1337 
1338 /*@C
1339   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1340   preconditioner, `PCASM`,  for any problem on a general grid.
1341 
1342   Collective
1343 
1344   Input Parameters:
1345 + A - The global matrix operator
1346 - n - the number of local blocks
1347 
1348   Output Parameter:
1349 . outis - the array of index sets defining the subdomains
1350 
1351   Level: advanced
1352 
1353   Note:
1354   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1355   from these if you use `PCASMSetLocalSubdomains()`
1356 
1357 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1358 @*/
1359 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1360 {
1361   MatPartitioning mpart;
1362   const char     *prefix;
1363   PetscInt        i, j, rstart, rend, bs;
1364   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1365   Mat             Ad = NULL, adj;
1366   IS              ispart, isnumb, *is;
1367 
1368   PetscFunctionBegin;
1369   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1370   PetscAssertPointer(outis, 3);
1371   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1372 
1373   /* Get prefix, row distribution, and block size */
1374   PetscCall(MatGetOptionsPrefix(A, &prefix));
1375   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1376   PetscCall(MatGetBlockSize(A, &bs));
1377   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1378 
1379   /* Get diagonal block from matrix if possible */
1380   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1381   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1382   if (Ad) {
1383     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1384     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1385   }
1386   if (Ad && n > 1) {
1387     PetscBool match, done;
1388     /* Try to setup a good matrix partitioning if available */
1389     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1390     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1391     PetscCall(MatPartitioningSetFromOptions(mpart));
1392     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1393     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1394     if (!match) { /* assume a "good" partitioner is available */
1395       PetscInt        na;
1396       const PetscInt *ia, *ja;
1397       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1398       if (done) {
1399         /* Build adjacency matrix by hand. Unfortunately a call to
1400            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1401            remove the block-aij structure and we cannot expect
1402            MatPartitioning to split vertices as we need */
1403         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1404         const PetscInt *row;
1405         nnz = 0;
1406         for (i = 0; i < na; i++) { /* count number of nonzeros */
1407           len = ia[i + 1] - ia[i];
1408           row = ja + ia[i];
1409           for (j = 0; j < len; j++) {
1410             if (row[j] == i) { /* don't count diagonal */
1411               len--;
1412               break;
1413             }
1414           }
1415           nnz += len;
1416         }
1417         PetscCall(PetscMalloc1(na + 1, &iia));
1418         PetscCall(PetscMalloc1(nnz, &jja));
1419         nnz    = 0;
1420         iia[0] = 0;
1421         for (i = 0; i < na; i++) { /* fill adjacency */
1422           cnt = 0;
1423           len = ia[i + 1] - ia[i];
1424           row = ja + ia[i];
1425           for (j = 0; j < len; j++) {
1426             if (row[j] != i) { /* if not diagonal */
1427               jja[nnz + cnt++] = row[j];
1428             }
1429           }
1430           nnz += cnt;
1431           iia[i + 1] = nnz;
1432         }
1433         /* Partitioning of the adjacency matrix */
1434         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1435         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1436         PetscCall(MatPartitioningSetNParts(mpart, n));
1437         PetscCall(MatPartitioningApply(mpart, &ispart));
1438         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1439         PetscCall(MatDestroy(&adj));
1440         foundpart = PETSC_TRUE;
1441       }
1442       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1443     }
1444     PetscCall(MatPartitioningDestroy(&mpart));
1445   }
1446 
1447   PetscCall(PetscMalloc1(n, &is));
1448   *outis = is;
1449 
1450   if (!foundpart) {
1451     /* Partitioning by contiguous chunks of rows */
1452 
1453     PetscInt mbs   = (rend - rstart) / bs;
1454     PetscInt start = rstart;
1455     for (i = 0; i < n; i++) {
1456       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1457       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1458       start += count;
1459     }
1460 
1461   } else {
1462     /* Partitioning by adjacency of diagonal block  */
1463 
1464     const PetscInt *numbering;
1465     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1466     /* Get node count in each partition */
1467     PetscCall(PetscMalloc1(n, &count));
1468     PetscCall(ISPartitioningCount(ispart, n, count));
1469     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1470       for (i = 0; i < n; i++) count[i] *= bs;
1471     }
1472     /* Build indices from node numbering */
1473     PetscCall(ISGetLocalSize(isnumb, &nidx));
1474     PetscCall(PetscMalloc1(nidx, &indices));
1475     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1476     PetscCall(ISGetIndices(isnumb, &numbering));
1477     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1478     PetscCall(ISRestoreIndices(isnumb, &numbering));
1479     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1480       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1481       for (i = 0; i < nidx; i++) {
1482         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1483       }
1484       PetscCall(PetscFree(indices));
1485       nidx *= bs;
1486       indices = newidx;
1487     }
1488     /* Shift to get global indices */
1489     for (i = 0; i < nidx; i++) indices[i] += rstart;
1490 
1491     /* Build the index sets for each block */
1492     for (i = 0; i < n; i++) {
1493       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1494       PetscCall(ISSort(is[i]));
1495       start += count[i];
1496     }
1497 
1498     PetscCall(PetscFree(count));
1499     PetscCall(PetscFree(indices));
1500     PetscCall(ISDestroy(&isnumb));
1501     PetscCall(ISDestroy(&ispart));
1502   }
1503   PetscFunctionReturn(PETSC_SUCCESS);
1504 }
1505 
1506 /*@C
1507   PCASMDestroySubdomains - Destroys the index sets created with
1508   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1509 
1510   Collective
1511 
1512   Input Parameters:
1513 + n        - the number of index sets
1514 . is       - the array of index sets
1515 - is_local - the array of local index sets, can be `NULL`
1516 
1517   Level: advanced
1518 
1519   Developer Note:
1520   The `IS` arguments should be a *[]
1521 
1522 .seealso: [](ch_ksp), `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1523 @*/
1524 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS *is[], IS *is_local[])
1525 {
1526   PetscInt i;
1527 
1528   PetscFunctionBegin;
1529   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1530   if (*is) {
1531     PetscAssertPointer(*is, 2);
1532     for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*is)[i]));
1533     PetscCall(PetscFree(*is));
1534   }
1535   if (is_local && *is_local) {
1536     PetscAssertPointer(*is_local, 3);
1537     for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*is_local)[i]));
1538     PetscCall(PetscFree(*is_local));
1539   }
1540   PetscFunctionReturn(PETSC_SUCCESS);
1541 }
1542 
1543 /*@C
1544   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1545   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1546 
1547   Not Collective
1548 
1549   Input Parameters:
1550 + m       - the number of mesh points in the x direction
1551 . n       - the number of mesh points in the y direction
1552 . M       - the number of subdomains in the x direction
1553 . N       - the number of subdomains in the y direction
1554 . dof     - degrees of freedom per node
1555 - overlap - overlap in mesh lines
1556 
1557   Output Parameters:
1558 + Nsub     - the number of subdomains created
1559 . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1560 - is_local - array of index sets defining non-overlapping subdomains
1561 
1562   Level: advanced
1563 
1564   Note:
1565   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1566   preconditioners.  More general related routines are
1567   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1568 
1569 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1570           `PCASMSetOverlap()`
1571 @*/
1572 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS *is[], IS *is_local[])
1573 {
1574   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1575   PetscInt nidx, *idx, loc, ii, jj, count;
1576 
1577   PetscFunctionBegin;
1578   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1579 
1580   *Nsub = N * M;
1581   PetscCall(PetscMalloc1(*Nsub, is));
1582   PetscCall(PetscMalloc1(*Nsub, is_local));
1583   ystart    = 0;
1584   loc_outer = 0;
1585   for (i = 0; i < N; i++) {
1586     height = n / N + ((n % N) > i); /* height of subdomain */
1587     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1588     yleft = ystart - overlap;
1589     if (yleft < 0) yleft = 0;
1590     yright = ystart + height + overlap;
1591     if (yright > n) yright = n;
1592     xstart = 0;
1593     for (j = 0; j < M; j++) {
1594       width = m / M + ((m % M) > j); /* width of subdomain */
1595       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1596       xleft = xstart - overlap;
1597       if (xleft < 0) xleft = 0;
1598       xright = xstart + width + overlap;
1599       if (xright > m) xright = m;
1600       nidx = (xright - xleft) * (yright - yleft);
1601       PetscCall(PetscMalloc1(nidx, &idx));
1602       loc = 0;
1603       for (ii = yleft; ii < yright; ii++) {
1604         count = m * ii + xleft;
1605         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1606       }
1607       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1608       if (overlap == 0) {
1609         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1610 
1611         (*is_local)[loc_outer] = (*is)[loc_outer];
1612       } else {
1613         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1614           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1615         }
1616         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1617       }
1618       PetscCall(PetscFree(idx));
1619       xstart += width;
1620       loc_outer++;
1621     }
1622     ystart += height;
1623   }
1624   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1625   PetscFunctionReturn(PETSC_SUCCESS);
1626 }
1627 
1628 /*@C
1629   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1630   only) for the additive Schwarz preconditioner, `PCASM`.
1631 
1632   Not Collective
1633 
1634   Input Parameter:
1635 . pc - the preconditioner context
1636 
1637   Output Parameters:
1638 + n        - if requested, the number of subdomains for this processor (default value = 1)
1639 . is       - if requested, the index sets that define the subdomains for this processor
1640 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)
1641 
1642   Level: advanced
1643 
1644   Note:
1645   The `IS` numbering is in the parallel, global numbering of the vector.
1646 
1647 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1648           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1649 @*/
1650 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1651 {
1652   PC_ASM   *osm = (PC_ASM *)pc->data;
1653   PetscBool match;
1654 
1655   PetscFunctionBegin;
1656   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1657   if (n) PetscAssertPointer(n, 2);
1658   if (is) PetscAssertPointer(is, 3);
1659   if (is_local) PetscAssertPointer(is_local, 4);
1660   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1661   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1662   if (n) *n = osm->n_local_true;
1663   if (is) *is = osm->is;
1664   if (is_local) *is_local = osm->is_local;
1665   PetscFunctionReturn(PETSC_SUCCESS);
1666 }
1667 
1668 /*@C
1669   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1670   only) for the additive Schwarz preconditioner, `PCASM`.
1671 
1672   Not Collective
1673 
1674   Input Parameter:
1675 . pc - the preconditioner context
1676 
1677   Output Parameters:
1678 + n   - if requested, the number of matrices for this processor (default value = 1)
1679 - mat - if requested, the matrices
1680 
1681   Level: advanced
1682 
1683   Notes:
1684   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1685 
1686   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1687 
1688 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1689           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1690 @*/
1691 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1692 {
1693   PC_ASM   *osm;
1694   PetscBool match;
1695 
1696   PetscFunctionBegin;
1697   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1698   if (n) PetscAssertPointer(n, 2);
1699   if (mat) PetscAssertPointer(mat, 3);
1700   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1701   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1702   if (!match) {
1703     if (n) *n = 0;
1704     if (mat) *mat = NULL;
1705   } else {
1706     osm = (PC_ASM *)pc->data;
1707     if (n) *n = osm->n_local_true;
1708     if (mat) *mat = osm->pmat;
1709   }
1710   PetscFunctionReturn(PETSC_SUCCESS);
1711 }
1712 
1713 /*@
1714   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1715 
1716   Logically Collective
1717 
1718   Input Parameters:
1719 + pc  - the preconditioner
1720 - flg - boolean indicating whether to use subdomains defined by the `DM`
1721 
1722   Options Database Key:
1723 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`
1724 
1725   Level: intermediate
1726 
1727   Note:
1728   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1729   so setting either of the first two effectively turns the latter off.
1730 
1731   Developer Note:
1732   This should be `PCASMSetUseDMSubdomains()`, similarly for the options database key
1733 
1734 .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1735           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1736 @*/
1737 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1738 {
1739   PC_ASM   *osm = (PC_ASM *)pc->data;
1740   PetscBool match;
1741 
1742   PetscFunctionBegin;
1743   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1744   PetscValidLogicalCollectiveBool(pc, flg, 2);
1745   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1746   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1747   if (match) osm->dm_subdomains = flg;
1748   PetscFunctionReturn(PETSC_SUCCESS);
1749 }
1750 
1751 /*@
1752   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1753 
1754   Not Collective
1755 
1756   Input Parameter:
1757 . pc - the preconditioner
1758 
1759   Output Parameter:
1760 . flg - boolean indicating whether to use subdomains defined by the `DM`
1761 
1762   Level: intermediate
1763 
1764   Developer Note:
1765   This should be `PCASMSetUseDMSubdomains()`
1766 
1767 .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1768           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1769 @*/
1770 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1771 {
1772   PC_ASM   *osm = (PC_ASM *)pc->data;
1773   PetscBool match;
1774 
1775   PetscFunctionBegin;
1776   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1777   PetscAssertPointer(flg, 2);
1778   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1779   if (match) *flg = osm->dm_subdomains;
1780   else *flg = PETSC_FALSE;
1781   PetscFunctionReturn(PETSC_SUCCESS);
1782 }
1783 
1784 /*@
1785   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1786 
1787   Not Collective
1788 
1789   Input Parameter:
1790 . pc - the `PC`
1791 
1792   Output Parameter:
1793 . sub_mat_type - name of matrix type
1794 
1795   Level: advanced
1796 
1797 .seealso: [](ch_ksp), `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1798 @*/
1799 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1800 {
1801   PetscFunctionBegin;
1802   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1803   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1804   PetscFunctionReturn(PETSC_SUCCESS);
1805 }
1806 
1807 /*@
1808   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1809 
1810   Collective
1811 
1812   Input Parameters:
1813 + pc           - the `PC` object
1814 - sub_mat_type - the `MatType`
1815 
1816   Options Database Key:
1817 . -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1818    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1819 
1820   Note:
1821   See `MatType` for available types
1822 
1823   Level: advanced
1824 
1825 .seealso: [](ch_ksp), `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1826 @*/
1827 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1828 {
1829   PetscFunctionBegin;
1830   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1831   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1832   PetscFunctionReturn(PETSC_SUCCESS);
1833 }
1834