xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision f13dfd9ea68e0ddeee984e65c377a1819eab8a8a)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include <petsc/private/matimpl.h>
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         iascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(sviewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(sviewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
75   }
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc)
80 {
81   PC_ASM         *osm = (PC_ASM *)pc->data;
82   const char     *prefix;
83   char            fname[PETSC_MAX_PATH_LEN + 1];
84   PetscViewer     viewer, sviewer;
85   char           *s;
86   PetscInt        i, j, nidx;
87   const PetscInt *idx;
88   PetscMPIInt     rank, size;
89 
90   PetscFunctionBegin;
91   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
92   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
93   PetscCall(PCGetOptionsPrefix(pc, &prefix));
94   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
95   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
96   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
97   for (i = 0; i < osm->n_local; i++) {
98     if (i < osm->n_local_true) {
99       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
100       PetscCall(ISGetIndices(osm->is[i], &idx));
101       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
102 #define len 16 * (nidx + 1) + 512
103       PetscCall(PetscMalloc1(len, &s));
104       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
105 #undef len
106       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
107       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
108       PetscCall(ISRestoreIndices(osm->is[i], &idx));
109       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
110       PetscCall(PetscViewerDestroy(&sviewer));
111       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
112       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
113       PetscCall(PetscViewerFlush(viewer));
114       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
115       PetscCall(PetscFree(s));
116       if (osm->is_local) {
117         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
118 #define len 16 * (nidx + 1) + 512
119         PetscCall(PetscMalloc1(len, &s));
120         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
121 #undef len
122         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
123         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
124         PetscCall(ISGetIndices(osm->is_local[i], &idx));
125         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
126         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
127         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
128         PetscCall(PetscViewerDestroy(&sviewer));
129         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
130         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
131         PetscCall(PetscViewerFlush(viewer));
132         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
133         PetscCall(PetscFree(s));
134       }
135     } else {
136       /* Participate in collective viewer calls. */
137       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
138       PetscCall(PetscViewerFlush(viewer));
139       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
140       /* Assume either all ranks have is_local or none do. */
141       if (osm->is_local) {
142         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
143         PetscCall(PetscViewerFlush(viewer));
144         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
145       }
146     }
147   }
148   PetscCall(PetscViewerFlush(viewer));
149   PetscCall(PetscViewerDestroy(&viewer));
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode PCSetUp_ASM(PC pc)
154 {
155   PC_ASM       *osm = (PC_ASM *)pc->data;
156   PetscBool     flg;
157   PetscInt      i, m, m_local;
158   MatReuse      scall = MAT_REUSE_MATRIX;
159   IS            isl;
160   KSP           ksp;
161   PC            subpc;
162   const char   *prefix, *pprefix;
163   Vec           vec;
164   DM           *domain_dm = NULL;
165   MatNullSpace *nullsp    = NULL;
166 
167   PetscFunctionBegin;
168   if (!pc->setupcalled) {
169     PetscInt m;
170 
171     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172     if (osm->n_local_true == PETSC_DECIDE) {
173       /* no subdomains given */
174       /* try pc->dm first, if allowed */
175       if (osm->dm_subdomains && pc->dm) {
176         PetscInt num_domains, d;
177         char   **domain_names;
178         IS      *inner_domain_is, *outer_domain_is;
179         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181                               A future improvement of this code might allow one to use
182                               DM-defined subdomains and also increase the overlap,
183                               but that is not currently supported */
184         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185         for (d = 0; d < num_domains; ++d) {
186           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189         }
190         PetscCall(PetscFree(domain_names));
191         PetscCall(PetscFree(inner_domain_is));
192         PetscCall(PetscFree(outer_domain_is));
193       }
194       if (osm->n_local_true == PETSC_DECIDE) {
195         /* still no subdomains; use one subdomain per processor */
196         osm->n_local_true = 1;
197       }
198     }
199     { /* determine the global and max number of subdomains */
200       struct {
201         PetscInt max, sum;
202       } inwork, outwork;
203       PetscMPIInt size;
204 
205       inwork.max = osm->n_local_true;
206       inwork.sum = osm->n_local_true;
207       PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208       osm->n_local = outwork.max;
209       osm->n       = outwork.sum;
210 
211       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212       if (outwork.max == 1 && outwork.sum == size) {
213         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215       }
216     }
217     if (!osm->is) { /* create the index sets */
218       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219     }
220     if (osm->n_local_true > 1 && !osm->is_local) {
221       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222       for (i = 0; i < osm->n_local_true; i++) {
223         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226         } else {
227           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228           osm->is_local[i] = osm->is[i];
229         }
230       }
231     }
232     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233     if (osm->overlap > 0) {
234       /* Extend the "overlapping" regions by a number of steps */
235       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236     }
237     if (osm->sort_indices) {
238       for (i = 0; i < osm->n_local_true; i++) {
239         PetscCall(ISSort(osm->is[i]));
240         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241       }
242     }
243     flg = PETSC_FALSE;
244     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246     if (!osm->ksp) {
247       /* Create the local solvers */
248       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250       for (i = 0; i < osm->n_local_true; i++) {
251         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
253         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
254         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
255         PetscCall(KSPSetType(ksp, KSPPREONLY));
256         PetscCall(KSPGetPC(ksp, &subpc));
257         PetscCall(PCGetOptionsPrefix(pc, &prefix));
258         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
259         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
260         if (domain_dm) {
261           PetscCall(KSPSetDM(ksp, domain_dm[i]));
262           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
263           PetscCall(DMDestroy(&domain_dm[i]));
264         }
265         osm->ksp[i] = ksp;
266       }
267       if (domain_dm) PetscCall(PetscFree(domain_dm));
268     }
269 
270     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
271     PetscCall(ISSortRemoveDups(osm->lis));
272     PetscCall(ISGetLocalSize(osm->lis, &m));
273 
274     scall = MAT_INITIAL_MATRIX;
275   } else {
276     /*
277        Destroy the blocks from the previous iteration
278     */
279     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
280       PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
281       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
282       scall = MAT_INITIAL_MATRIX;
283     }
284   }
285 
286   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
287   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
288     PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
289     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
290     scall = MAT_INITIAL_MATRIX;
291   }
292 
293   /*
294      Extract out the submatrices
295   */
296   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
297   if (scall == MAT_INITIAL_MATRIX) {
298     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
299     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
300     if (nullsp) PetscCall(MatRestoreNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
301   }
302 
303   /* Convert the types of the submatrices (if needbe) */
304   if (osm->sub_mat_type) {
305     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &osm->pmat[i]));
306   }
307 
308   if (!pc->setupcalled) {
309     VecType vtype;
310 
311     /* Create the local work vectors (from the local matrices) and scatter contexts */
312     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
313 
314     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
315     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
316     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
317     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
318     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
319 
320     PetscCall(ISGetLocalSize(osm->lis, &m));
321     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
322     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
323     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
324     PetscCall(VecSetSizes(osm->lx, m, m));
325     PetscCall(VecSetType(osm->lx, vtype));
326     PetscCall(VecDuplicate(osm->lx, &osm->ly));
327     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
328     PetscCall(ISDestroy(&isl));
329 
330     for (i = 0; i < osm->n_local_true; ++i) {
331       ISLocalToGlobalMapping ltog;
332       IS                     isll;
333       const PetscInt        *idx_is;
334       PetscInt              *idx_lis, nout;
335 
336       PetscCall(ISGetLocalSize(osm->is[i], &m));
337       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
338       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
339 
340       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
341       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
342       PetscCall(ISGetLocalSize(osm->is[i], &m));
343       PetscCall(ISGetIndices(osm->is[i], &idx_is));
344       PetscCall(PetscMalloc1(m, &idx_lis));
345       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
346       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
347       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
348       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
349       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
350       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
351       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
352       PetscCall(ISDestroy(&isll));
353       PetscCall(ISDestroy(&isl));
354       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the non-overlapping is_local[i] entries */
355         ISLocalToGlobalMapping ltog;
356         IS                     isll, isll_local;
357         const PetscInt        *idx_local;
358         PetscInt              *idx1, *idx2, nout;
359 
360         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
361         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
362 
363         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
364         PetscCall(PetscMalloc1(m_local, &idx1));
365         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
366         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
367         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
368         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
369 
370         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
371         PetscCall(PetscMalloc1(m_local, &idx2));
372         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
373         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
374         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
375         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
376 
377         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
378         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
379 
380         PetscCall(ISDestroy(&isll));
381         PetscCall(ISDestroy(&isll_local));
382       }
383     }
384     PetscCall(VecDestroy(&vec));
385   }
386 
387   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
388     IS      *cis;
389     PetscInt c;
390 
391     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
392     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
393     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
394     PetscCall(PetscFree(cis));
395   }
396 
397   /* Return control to the user so that the submatrices can be modified (e.g., to apply
398      different boundary conditions for the submatrices than for the global problem) */
399   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
400 
401   /*
402      Loop over subdomains putting them into local ksp
403   */
404   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
405   for (i = 0; i < osm->n_local_true; i++) {
406     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
407     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
408     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
409   }
410   PetscFunctionReturn(PETSC_SUCCESS);
411 }
412 
413 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
414 {
415   PC_ASM            *osm = (PC_ASM *)pc->data;
416   PetscInt           i;
417   KSPConvergedReason reason;
418 
419   PetscFunctionBegin;
420   for (i = 0; i < osm->n_local_true; i++) {
421     PetscCall(KSPSetUp(osm->ksp[i]));
422     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
423     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
424   }
425   PetscFunctionReturn(PETSC_SUCCESS);
426 }
427 
428 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
429 {
430   PC_ASM     *osm = (PC_ASM *)pc->data;
431   PetscInt    i, n_local_true = osm->n_local_true;
432   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
433 
434   PetscFunctionBegin;
435   /*
436      support for limiting the restriction or interpolation to only local
437      subdomain values (leaving the other values 0).
438   */
439   if (!(osm->type & PC_ASM_RESTRICT)) {
440     forward = SCATTER_FORWARD_LOCAL;
441     /* have to zero the work RHS since scatter may leave some slots empty */
442     PetscCall(VecSet(osm->lx, 0.0));
443   }
444   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
445 
446   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
447   /* zero the global and the local solutions */
448   PetscCall(VecSet(y, 0.0));
449   PetscCall(VecSet(osm->ly, 0.0));
450 
451   /* copy the global RHS to local RHS including the ghost nodes */
452   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
453   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
454 
455   /* restrict local RHS to the overlapping 0-block RHS */
456   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
457   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
458 
459   /* do the local solves */
460   for (i = 0; i < n_local_true; ++i) {
461     /* solve the overlapping i-block */
462     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
463     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
464     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
465     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
466 
467     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
468       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
469       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
470     } else { /* interpolate the overlapping i-block solution to the local solution */
471       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
472       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
473     }
474 
475     if (i < n_local_true - 1) {
476       /* restrict local RHS to the overlapping (i+1)-block RHS */
477       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
478       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
479 
480       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
481         /* update the overlapping (i+1)-block RHS using the current local solution */
482         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
483         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
484       }
485     }
486   }
487   /* add the local solution to the global solution including the ghost nodes */
488   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
489   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
490   PetscFunctionReturn(PETSC_SUCCESS);
491 }
492 
493 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
494 {
495   PC_ASM     *osm = (PC_ASM *)pc->data;
496   Mat         Z, W;
497   Vec         x;
498   PetscInt    i, m, N;
499   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
500 
501   PetscFunctionBegin;
502   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
503   /*
504      support for limiting the restriction or interpolation to only local
505      subdomain values (leaving the other values 0).
506   */
507   if (!(osm->type & PC_ASM_RESTRICT)) {
508     forward = SCATTER_FORWARD_LOCAL;
509     /* have to zero the work RHS since scatter may leave some slots empty */
510     PetscCall(VecSet(osm->lx, 0.0));
511   }
512   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
513   PetscCall(VecGetLocalSize(osm->x[0], &m));
514   PetscCall(MatGetSize(X, NULL, &N));
515   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
516 
517   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
518   /* zero the global and the local solutions */
519   PetscCall(MatZeroEntries(Y));
520   PetscCall(VecSet(osm->ly, 0.0));
521 
522   for (i = 0; i < N; ++i) {
523     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
524     /* copy the global RHS to local RHS including the ghost nodes */
525     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
526     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
527     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
528 
529     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
530     /* restrict local RHS to the overlapping 0-block RHS */
531     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
532     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
533     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
534   }
535   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
536   /* solve the overlapping 0-block */
537   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
538   PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
539   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
540   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
541   PetscCall(MatDestroy(&Z));
542 
543   for (i = 0; i < N; ++i) {
544     PetscCall(VecSet(osm->ly, 0.0));
545     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
546     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
547       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
548       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
549     } else { /* interpolate the overlapping 0-block solution to the local solution */
550       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
551       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
552     }
553     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
554 
555     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
556     /* add the local solution to the global solution including the ghost nodes */
557     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
558     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
559     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
560   }
561   PetscCall(MatDestroy(&W));
562   PetscFunctionReturn(PETSC_SUCCESS);
563 }
564 
565 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
566 {
567   PC_ASM     *osm = (PC_ASM *)pc->data;
568   PetscInt    i, n_local_true = osm->n_local_true;
569   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
570 
571   PetscFunctionBegin;
572   /*
573      Support for limiting the restriction or interpolation to only local
574      subdomain values (leaving the other values 0).
575 
576      Note: these are reversed from the PCApply_ASM() because we are applying the
577      transpose of the three terms
578   */
579 
580   if (!(osm->type & PC_ASM_INTERPOLATE)) {
581     forward = SCATTER_FORWARD_LOCAL;
582     /* have to zero the work RHS since scatter may leave some slots empty */
583     PetscCall(VecSet(osm->lx, 0.0));
584   }
585   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
586 
587   /* zero the global and the local solutions */
588   PetscCall(VecSet(y, 0.0));
589   PetscCall(VecSet(osm->ly, 0.0));
590 
591   /* Copy the global RHS to local RHS including the ghost nodes */
592   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
593   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
594 
595   /* Restrict local RHS to the overlapping 0-block RHS */
596   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
597   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
598 
599   /* do the local solves */
600   for (i = 0; i < n_local_true; ++i) {
601     /* solve the overlapping i-block */
602     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
603     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
604     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
605     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
606 
607     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
608       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
609       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
610     } else { /* interpolate the overlapping i-block solution to the local solution */
611       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
612       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
613     }
614 
615     if (i < n_local_true - 1) {
616       /* Restrict local RHS to the overlapping (i+1)-block RHS */
617       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
618       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
619     }
620   }
621   /* Add the local solution to the global solution including the ghost nodes */
622   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
623   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
624   PetscFunctionReturn(PETSC_SUCCESS);
625 }
626 
627 static PetscErrorCode PCReset_ASM(PC pc)
628 {
629   PC_ASM  *osm = (PC_ASM *)pc->data;
630   PetscInt i;
631 
632   PetscFunctionBegin;
633   if (osm->ksp) {
634     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
635   }
636   if (osm->pmat) {
637     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
638   }
639   if (osm->lrestriction) {
640     PetscCall(VecScatterDestroy(&osm->restriction));
641     for (i = 0; i < osm->n_local_true; i++) {
642       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
643       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
644       PetscCall(VecDestroy(&osm->x[i]));
645       PetscCall(VecDestroy(&osm->y[i]));
646     }
647     PetscCall(PetscFree(osm->lrestriction));
648     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
649     PetscCall(PetscFree(osm->x));
650     PetscCall(PetscFree(osm->y));
651   }
652   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
653   PetscCall(ISDestroy(&osm->lis));
654   PetscCall(VecDestroy(&osm->lx));
655   PetscCall(VecDestroy(&osm->ly));
656   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
657 
658   PetscCall(PetscFree(osm->sub_mat_type));
659 
660   osm->is       = NULL;
661   osm->is_local = NULL;
662   PetscFunctionReturn(PETSC_SUCCESS);
663 }
664 
665 static PetscErrorCode PCDestroy_ASM(PC pc)
666 {
667   PC_ASM  *osm = (PC_ASM *)pc->data;
668   PetscInt i;
669 
670   PetscFunctionBegin;
671   PetscCall(PCReset_ASM(pc));
672   if (osm->ksp) {
673     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
674     PetscCall(PetscFree(osm->ksp));
675   }
676   PetscCall(PetscFree(pc->data));
677 
678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
685   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
686   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
687   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
688   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
689   PetscFunctionReturn(PETSC_SUCCESS);
690 }
691 
692 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject)
693 {
694   PC_ASM         *osm = (PC_ASM *)pc->data;
695   PetscInt        blocks, ovl;
696   PetscBool       flg;
697   PCASMType       asmtype;
698   PCCompositeType loctype;
699   char            sub_mat_type[256];
700 
701   PetscFunctionBegin;
702   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
703   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
704   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
705   if (flg) {
706     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
707     osm->dm_subdomains = PETSC_FALSE;
708   }
709   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
710   if (flg) {
711     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
712     osm->dm_subdomains = PETSC_FALSE;
713   }
714   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
715   if (flg) {
716     PetscCall(PCASMSetOverlap(pc, ovl));
717     osm->dm_subdomains = PETSC_FALSE;
718   }
719   flg = PETSC_FALSE;
720   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
721   if (flg) PetscCall(PCASMSetType(pc, asmtype));
722   flg = PETSC_FALSE;
723   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
724   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
725   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
726   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
727   PetscOptionsHeadEnd();
728   PetscFunctionReturn(PETSC_SUCCESS);
729 }
730 
731 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
732 {
733   PC_ASM  *osm = (PC_ASM *)pc->data;
734   PetscInt i;
735 
736   PetscFunctionBegin;
737   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
738   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
739 
740   if (!pc->setupcalled) {
741     if (is) {
742       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
743     }
744     if (is_local) {
745       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
746     }
747     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
748 
749     if (osm->ksp && osm->n_local_true != n) {
750       for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
751       PetscCall(PetscFree(osm->ksp));
752     }
753 
754     osm->n_local_true = n;
755     osm->is           = NULL;
756     osm->is_local     = NULL;
757     if (is) {
758       PetscCall(PetscMalloc1(n, &osm->is));
759       for (i = 0; i < n; i++) osm->is[i] = is[i];
760       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
761       osm->overlap = -1;
762     }
763     if (is_local) {
764       PetscCall(PetscMalloc1(n, &osm->is_local));
765       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
766       if (!is) {
767         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
768         for (i = 0; i < osm->n_local_true; i++) {
769           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
770             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
771             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
772           } else {
773             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
774             osm->is[i] = osm->is_local[i];
775           }
776         }
777       }
778     }
779   }
780   PetscFunctionReturn(PETSC_SUCCESS);
781 }
782 
783 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
784 {
785   PC_ASM     *osm = (PC_ASM *)pc->data;
786   PetscMPIInt rank, size;
787   PetscInt    n;
788 
789   PetscFunctionBegin;
790   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
791   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets, they cannot be set globally yet.");
792 
793   /*
794      Split the subdomains equally among all processors
795   */
796   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
797   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
798   n = N / size + ((N % size) > rank);
799   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
800   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
801   if (!pc->setupcalled) {
802     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
803 
804     osm->n_local_true = n;
805     osm->is           = NULL;
806     osm->is_local     = NULL;
807   }
808   PetscFunctionReturn(PETSC_SUCCESS);
809 }
810 
811 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
812 {
813   PC_ASM *osm = (PC_ASM *)pc->data;
814 
815   PetscFunctionBegin;
816   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
817   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
818   if (!pc->setupcalled) osm->overlap = ovl;
819   PetscFunctionReturn(PETSC_SUCCESS);
820 }
821 
822 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
823 {
824   PC_ASM *osm = (PC_ASM *)pc->data;
825 
826   PetscFunctionBegin;
827   osm->type     = type;
828   osm->type_set = PETSC_TRUE;
829   PetscFunctionReturn(PETSC_SUCCESS);
830 }
831 
832 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
833 {
834   PC_ASM *osm = (PC_ASM *)pc->data;
835 
836   PetscFunctionBegin;
837   *type = osm->type;
838   PetscFunctionReturn(PETSC_SUCCESS);
839 }
840 
841 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
842 {
843   PC_ASM *osm = (PC_ASM *)pc->data;
844 
845   PetscFunctionBegin;
846   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
847   osm->loctype = type;
848   PetscFunctionReturn(PETSC_SUCCESS);
849 }
850 
851 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
852 {
853   PC_ASM *osm = (PC_ASM *)pc->data;
854 
855   PetscFunctionBegin;
856   *type = osm->loctype;
857   PetscFunctionReturn(PETSC_SUCCESS);
858 }
859 
860 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
861 {
862   PC_ASM *osm = (PC_ASM *)pc->data;
863 
864   PetscFunctionBegin;
865   osm->sort_indices = doSort;
866   PetscFunctionReturn(PETSC_SUCCESS);
867 }
868 
869 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
870 {
871   PC_ASM *osm = (PC_ASM *)pc->data;
872 
873   PetscFunctionBegin;
874   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
875 
876   if (n_local) *n_local = osm->n_local_true;
877   if (first_local) {
878     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
879     *first_local -= osm->n_local_true;
880   }
881   if (ksp) *ksp = osm->ksp;
882   PetscFunctionReturn(PETSC_SUCCESS);
883 }
884 
885 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
886 {
887   PC_ASM *osm = (PC_ASM *)pc->data;
888 
889   PetscFunctionBegin;
890   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
891   PetscAssertPointer(sub_mat_type, 2);
892   *sub_mat_type = osm->sub_mat_type;
893   PetscFunctionReturn(PETSC_SUCCESS);
894 }
895 
896 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
897 {
898   PC_ASM *osm = (PC_ASM *)pc->data;
899 
900   PetscFunctionBegin;
901   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
902   PetscCall(PetscFree(osm->sub_mat_type));
903   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
904   PetscFunctionReturn(PETSC_SUCCESS);
905 }
906 
907 /*@C
908   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
909 
910   Collective
911 
912   Input Parameters:
913 + pc       - the preconditioner context
914 . n        - the number of subdomains for this processor (default value = 1)
915 . is       - the index set that defines the subdomains for this processor (or `NULL` for PETSc to determine subdomains)
916              the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call
917 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless `PCASMType` is `PC_ASM_RESTRICT`
918              (or `NULL` to not provide these). The values of the `is_local` array are copied so you can free the array
919              (not the `IS` in the array) after this call
920 
921   Options Database Key:
922 . -pc_asm_local_blocks <blks> - Sets number of local blocks
923 
924   Level: advanced
925 
926   Notes:
927   The `IS` numbering is in the parallel, global numbering of the vector for both `is` and `is_local`
928 
929   By default the `PCASM` preconditioner uses 1 block per processor.
930 
931   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
932 
933   If `is_local` is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the `is_local` region is interpolated
934   back to form the global solution (this is the standard restricted additive Schwarz method, RASM)
935 
936   If `is_local` is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
937   no code to handle that case.
938 
939 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
940           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
941 @*/
942 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
943 {
944   PetscFunctionBegin;
945   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
946   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
947   PetscFunctionReturn(PETSC_SUCCESS);
948 }
949 
950 /*@C
951   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
952   additive Schwarz preconditioner, `PCASM`.
953 
954   Collective, all MPI ranks must pass in the same array of `IS`
955 
956   Input Parameters:
957 + pc       - the preconditioner context
958 . N        - the number of subdomains for all processors
959 . is       - the index sets that define the subdomains for all processors (or `NULL` to ask PETSc to determine the subdomains)
960 - is_local - the index sets that define the local part of the subdomains for this processor (or `NULL` to not provide this information)
961 
962   Options Database Key:
963 . -pc_asm_blocks <blks> - Sets total blocks
964 
965   Level: advanced
966 
967   Notes:
968   Currently you cannot use this to set the actual subdomains with the argument `is` or `is_local`.
969 
970   By default the `PCASM` preconditioner uses 1 block per processor.
971 
972   These index sets cannot be destroyed until after completion of the
973   linear solves for which the `PCASM` preconditioner is being used.
974 
975   Use `PCASMSetLocalSubdomains()` to set local subdomains.
976 
977   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
978 
979 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
980           `PCASMCreateSubdomains2D()`, `PCGASM`
981 @*/
982 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
983 {
984   PetscFunctionBegin;
985   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
986   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
987   PetscFunctionReturn(PETSC_SUCCESS);
988 }
989 
990 /*@
991   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
992   additive Schwarz preconditioner, `PCASM`.
993 
994   Logically Collective
995 
996   Input Parameters:
997 + pc  - the preconditioner context
998 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
999 
1000   Options Database Key:
1001 . -pc_asm_overlap <ovl> - Sets overlap
1002 
1003   Level: intermediate
1004 
1005   Notes:
1006   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1007   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1008   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1009 
1010   The overlap defaults to 1, so if one desires that no additional
1011   overlap be computed beyond what may have been set with a call to
1012   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1013   must be set to be 0.  In particular, if one does not explicitly set
1014   the subdomains an application code, then all overlap would be computed
1015   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1016   variant that is equivalent to the block Jacobi preconditioner.
1017 
1018   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1019   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1020 
1021   One can define initial index sets with any overlap via
1022   `PCASMSetLocalSubdomains()`; the routine
1023   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1024   if desired.
1025 
1026 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1027           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1028 @*/
1029 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1030 {
1031   PetscFunctionBegin;
1032   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1033   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1034   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1035   PetscFunctionReturn(PETSC_SUCCESS);
1036 }
1037 
1038 /*@
1039   PCASMSetType - Sets the type of restriction and interpolation used
1040   for local problems in the additive Schwarz method, `PCASM`.
1041 
1042   Logically Collective
1043 
1044   Input Parameters:
1045 + pc   - the preconditioner context
1046 - type - variant of `PCASM`, one of
1047 .vb
1048       PC_ASM_BASIC       - full interpolation and restriction
1049       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1050       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1051       PC_ASM_NONE        - local processor restriction and interpolation
1052 .ve
1053 
1054   Options Database Key:
1055 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1056 
1057   Level: intermediate
1058 
1059   Note:
1060   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1061   to limit the local processor interpolation
1062 
1063 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1064           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1065 @*/
1066 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1067 {
1068   PetscFunctionBegin;
1069   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1070   PetscValidLogicalCollectiveEnum(pc, type, 2);
1071   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1072   PetscFunctionReturn(PETSC_SUCCESS);
1073 }
1074 
1075 /*@
1076   PCASMGetType - Gets the type of restriction and interpolation used
1077   for local problems in the additive Schwarz method, `PCASM`.
1078 
1079   Logically Collective
1080 
1081   Input Parameter:
1082 . pc - the preconditioner context
1083 
1084   Output Parameter:
1085 . type - variant of `PCASM`, one of
1086 .vb
1087       PC_ASM_BASIC       - full interpolation and restriction
1088       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1089       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1090       PC_ASM_NONE        - local processor restriction and interpolation
1091 .ve
1092 
1093   Options Database Key:
1094 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1095 
1096   Level: intermediate
1097 
1098 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1099           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1100 @*/
1101 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1102 {
1103   PetscFunctionBegin;
1104   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1105   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1106   PetscFunctionReturn(PETSC_SUCCESS);
1107 }
1108 
1109 /*@
1110   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1111 
1112   Logically Collective
1113 
1114   Input Parameters:
1115 + pc   - the preconditioner context
1116 - type - type of composition, one of
1117 .vb
1118   PC_COMPOSITE_ADDITIVE       - local additive combination
1119   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1120 .ve
1121 
1122   Options Database Key:
1123 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1124 
1125   Level: intermediate
1126 
1127 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1128 @*/
1129 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1130 {
1131   PetscFunctionBegin;
1132   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1133   PetscValidLogicalCollectiveEnum(pc, type, 2);
1134   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1135   PetscFunctionReturn(PETSC_SUCCESS);
1136 }
1137 
1138 /*@
1139   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1140 
1141   Logically Collective
1142 
1143   Input Parameter:
1144 . pc - the preconditioner context
1145 
1146   Output Parameter:
1147 . type - type of composition, one of
1148 .vb
1149   PC_COMPOSITE_ADDITIVE       - local additive combination
1150   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1151 .ve
1152 
1153   Options Database Key:
1154 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1155 
1156   Level: intermediate
1157 
1158 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCCompositeType`
1159 @*/
1160 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1161 {
1162   PetscFunctionBegin;
1163   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1164   PetscAssertPointer(type, 2);
1165   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1166   PetscFunctionReturn(PETSC_SUCCESS);
1167 }
1168 
1169 /*@
1170   PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1171 
1172   Logically Collective
1173 
1174   Input Parameters:
1175 + pc     - the preconditioner context
1176 - doSort - sort the subdomain indices
1177 
1178   Level: intermediate
1179 
1180 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1181           `PCASMCreateSubdomains2D()`
1182 @*/
1183 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1184 {
1185   PetscFunctionBegin;
1186   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1187   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1188   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1189   PetscFunctionReturn(PETSC_SUCCESS);
1190 }
1191 
1192 /*@C
1193   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1194   this processor.
1195 
1196   Collective iff first_local is requested
1197 
1198   Input Parameter:
1199 . pc - the preconditioner context
1200 
1201   Output Parameters:
1202 + n_local     - the number of blocks on this processor or `NULL`
1203 . first_local - the global number of the first block on this processor or `NULL`, all processors must request or all must pass `NULL`
1204 - ksp         - the array of `KSP` contexts
1205 
1206   Level: advanced
1207 
1208   Notes:
1209   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1210 
1211   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1212 
1213   Fortran Notes:
1214   The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.
1215 
1216 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1217           `PCASMCreateSubdomains2D()`,
1218 @*/
1219 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1220 {
1221   PetscFunctionBegin;
1222   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1223   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1224   PetscFunctionReturn(PETSC_SUCCESS);
1225 }
1226 
1227 /*MC
1228    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1229            its own `KSP` object, {cite}`dryja1987additive` and {cite}`1sbg`
1230 
1231    Options Database Keys:
1232 +  -pc_asm_blocks <blks>                          - Sets total blocks. Defaults to one block per MPI process.
1233 .  -pc_asm_overlap <ovl>                          - Sets overlap
1234 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1235 .  -pc_asm_dm_subdomains <bool>                   - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`
1236 -  -pc_asm_local_type [additive, multiplicative]  - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1237 
1238    Level: beginner
1239 
1240    Notes:
1241    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1242    will get a different convergence rate due to the default option of `-pc_asm_type restrict`. Use
1243    `-pc_asm_type basic` to get the same convergence behavior
1244 
1245    Each processor can have one or more blocks, but a block cannot be shared by more
1246    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1247 
1248    To set options on the solvers for each block append `-sub_` to all the `KSP`, and `PC`
1249    options database keys. For example, `-sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly`
1250 
1251    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1252    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1253 
1254    If the `PC` has an associated `DM`, then, by default, `DMCreateDomainDecomposition()` is used to create the subdomains
1255 
1256 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1257           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1258           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1259 M*/
1260 
1261 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1262 {
1263   PC_ASM *osm;
1264 
1265   PetscFunctionBegin;
1266   PetscCall(PetscNew(&osm));
1267 
1268   osm->n             = PETSC_DECIDE;
1269   osm->n_local       = 0;
1270   osm->n_local_true  = PETSC_DECIDE;
1271   osm->overlap       = 1;
1272   osm->ksp           = NULL;
1273   osm->restriction   = NULL;
1274   osm->lprolongation = NULL;
1275   osm->lrestriction  = NULL;
1276   osm->x             = NULL;
1277   osm->y             = NULL;
1278   osm->is            = NULL;
1279   osm->is_local      = NULL;
1280   osm->mat           = NULL;
1281   osm->pmat          = NULL;
1282   osm->type          = PC_ASM_RESTRICT;
1283   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1284   osm->sort_indices  = PETSC_TRUE;
1285   osm->dm_subdomains = PETSC_FALSE;
1286   osm->sub_mat_type  = NULL;
1287 
1288   pc->data                 = (void *)osm;
1289   pc->ops->apply           = PCApply_ASM;
1290   pc->ops->matapply        = PCMatApply_ASM;
1291   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1292   pc->ops->setup           = PCSetUp_ASM;
1293   pc->ops->reset           = PCReset_ASM;
1294   pc->ops->destroy         = PCDestroy_ASM;
1295   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1296   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1297   pc->ops->view            = PCView_ASM;
1298   pc->ops->applyrichardson = NULL;
1299 
1300   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1301   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1310   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1311   PetscFunctionReturn(PETSC_SUCCESS);
1312 }
1313 
1314 /*@C
1315   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1316   preconditioner, `PCASM`,  for any problem on a general grid.
1317 
1318   Collective
1319 
1320   Input Parameters:
1321 + A - The global matrix operator
1322 - n - the number of local blocks
1323 
1324   Output Parameter:
1325 . outis - the array of index sets defining the subdomains
1326 
1327   Level: advanced
1328 
1329   Note:
1330   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1331   from these if you use `PCASMSetLocalSubdomains()`
1332 
1333   Fortran Notes:
1334   You must provide the array outis[] already allocated of length n.
1335 
1336 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1337 @*/
1338 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1339 {
1340   MatPartitioning mpart;
1341   const char     *prefix;
1342   PetscInt        i, j, rstart, rend, bs;
1343   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1344   Mat             Ad = NULL, adj;
1345   IS              ispart, isnumb, *is;
1346 
1347   PetscFunctionBegin;
1348   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1349   PetscAssertPointer(outis, 3);
1350   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1351 
1352   /* Get prefix, row distribution, and block size */
1353   PetscCall(MatGetOptionsPrefix(A, &prefix));
1354   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1355   PetscCall(MatGetBlockSize(A, &bs));
1356   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1357 
1358   /* Get diagonal block from matrix if possible */
1359   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1360   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1361   if (Ad) {
1362     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1363     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1364   }
1365   if (Ad && n > 1) {
1366     PetscBool match, done;
1367     /* Try to setup a good matrix partitioning if available */
1368     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1369     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1370     PetscCall(MatPartitioningSetFromOptions(mpart));
1371     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1372     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1373     if (!match) { /* assume a "good" partitioner is available */
1374       PetscInt        na;
1375       const PetscInt *ia, *ja;
1376       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1377       if (done) {
1378         /* Build adjacency matrix by hand. Unfortunately a call to
1379            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1380            remove the block-aij structure and we cannot expect
1381            MatPartitioning to split vertices as we need */
1382         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1383         const PetscInt *row;
1384         nnz = 0;
1385         for (i = 0; i < na; i++) { /* count number of nonzeros */
1386           len = ia[i + 1] - ia[i];
1387           row = ja + ia[i];
1388           for (j = 0; j < len; j++) {
1389             if (row[j] == i) { /* don't count diagonal */
1390               len--;
1391               break;
1392             }
1393           }
1394           nnz += len;
1395         }
1396         PetscCall(PetscMalloc1(na + 1, &iia));
1397         PetscCall(PetscMalloc1(nnz, &jja));
1398         nnz    = 0;
1399         iia[0] = 0;
1400         for (i = 0; i < na; i++) { /* fill adjacency */
1401           cnt = 0;
1402           len = ia[i + 1] - ia[i];
1403           row = ja + ia[i];
1404           for (j = 0; j < len; j++) {
1405             if (row[j] != i) { /* if not diagonal */
1406               jja[nnz + cnt++] = row[j];
1407             }
1408           }
1409           nnz += cnt;
1410           iia[i + 1] = nnz;
1411         }
1412         /* Partitioning of the adjacency matrix */
1413         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1414         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1415         PetscCall(MatPartitioningSetNParts(mpart, n));
1416         PetscCall(MatPartitioningApply(mpart, &ispart));
1417         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1418         PetscCall(MatDestroy(&adj));
1419         foundpart = PETSC_TRUE;
1420       }
1421       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1422     }
1423     PetscCall(MatPartitioningDestroy(&mpart));
1424   }
1425 
1426   PetscCall(PetscMalloc1(n, &is));
1427   *outis = is;
1428 
1429   if (!foundpart) {
1430     /* Partitioning by contiguous chunks of rows */
1431 
1432     PetscInt mbs   = (rend - rstart) / bs;
1433     PetscInt start = rstart;
1434     for (i = 0; i < n; i++) {
1435       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1436       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1437       start += count;
1438     }
1439 
1440   } else {
1441     /* Partitioning by adjacency of diagonal block  */
1442 
1443     const PetscInt *numbering;
1444     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1445     /* Get node count in each partition */
1446     PetscCall(PetscMalloc1(n, &count));
1447     PetscCall(ISPartitioningCount(ispart, n, count));
1448     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1449       for (i = 0; i < n; i++) count[i] *= bs;
1450     }
1451     /* Build indices from node numbering */
1452     PetscCall(ISGetLocalSize(isnumb, &nidx));
1453     PetscCall(PetscMalloc1(nidx, &indices));
1454     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1455     PetscCall(ISGetIndices(isnumb, &numbering));
1456     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1457     PetscCall(ISRestoreIndices(isnumb, &numbering));
1458     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1459       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1460       for (i = 0; i < nidx; i++) {
1461         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1462       }
1463       PetscCall(PetscFree(indices));
1464       nidx *= bs;
1465       indices = newidx;
1466     }
1467     /* Shift to get global indices */
1468     for (i = 0; i < nidx; i++) indices[i] += rstart;
1469 
1470     /* Build the index sets for each block */
1471     for (i = 0; i < n; i++) {
1472       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1473       PetscCall(ISSort(is[i]));
1474       start += count[i];
1475     }
1476 
1477     PetscCall(PetscFree(count));
1478     PetscCall(PetscFree(indices));
1479     PetscCall(ISDestroy(&isnumb));
1480     PetscCall(ISDestroy(&ispart));
1481   }
1482   PetscFunctionReturn(PETSC_SUCCESS);
1483 }
1484 
1485 /*@C
1486   PCASMDestroySubdomains - Destroys the index sets created with
1487   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1488 
1489   Collective
1490 
1491   Input Parameters:
1492 + n        - the number of index sets
1493 . is       - the array of index sets
1494 - is_local - the array of local index sets, can be `NULL`
1495 
1496   Level: advanced
1497 
1498 .seealso: [](ch_ksp), `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1499 @*/
1500 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1501 {
1502   PetscInt i;
1503 
1504   PetscFunctionBegin;
1505   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1506   if (is) {
1507     PetscAssertPointer(is, 2);
1508     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1509     PetscCall(PetscFree(is));
1510   }
1511   if (is_local) {
1512     PetscAssertPointer(is_local, 3);
1513     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1514     PetscCall(PetscFree(is_local));
1515   }
1516   PetscFunctionReturn(PETSC_SUCCESS);
1517 }
1518 
1519 /*@C
1520   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1521   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1522 
1523   Not Collective
1524 
1525   Input Parameters:
1526 + m       - the number of mesh points in the x direction
1527 . n       - the number of mesh points in the y direction
1528 . M       - the number of subdomains in the x direction
1529 . N       - the number of subdomains in the y direction
1530 . dof     - degrees of freedom per node
1531 - overlap - overlap in mesh lines
1532 
1533   Output Parameters:
1534 + Nsub     - the number of subdomains created
1535 . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1536 - is_local - array of index sets defining non-overlapping subdomains
1537 
1538   Level: advanced
1539 
1540   Note:
1541   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1542   preconditioners.  More general related routines are
1543   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1544 
1545   Fortran Notes:
1546   The `IS` must be declared as an array of length long enough to hold `Nsub` entries
1547 
1548 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1549           `PCASMSetOverlap()`
1550 @*/
1551 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local)
1552 {
1553   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1554   PetscInt nidx, *idx, loc, ii, jj, count;
1555 
1556   PetscFunctionBegin;
1557   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1558 
1559   *Nsub = N * M;
1560   PetscCall(PetscMalloc1(*Nsub, is));
1561   PetscCall(PetscMalloc1(*Nsub, is_local));
1562   ystart    = 0;
1563   loc_outer = 0;
1564   for (i = 0; i < N; i++) {
1565     height = n / N + ((n % N) > i); /* height of subdomain */
1566     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1567     yleft = ystart - overlap;
1568     if (yleft < 0) yleft = 0;
1569     yright = ystart + height + overlap;
1570     if (yright > n) yright = n;
1571     xstart = 0;
1572     for (j = 0; j < M; j++) {
1573       width = m / M + ((m % M) > j); /* width of subdomain */
1574       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1575       xleft = xstart - overlap;
1576       if (xleft < 0) xleft = 0;
1577       xright = xstart + width + overlap;
1578       if (xright > m) xright = m;
1579       nidx = (xright - xleft) * (yright - yleft);
1580       PetscCall(PetscMalloc1(nidx, &idx));
1581       loc = 0;
1582       for (ii = yleft; ii < yright; ii++) {
1583         count = m * ii + xleft;
1584         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1585       }
1586       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1587       if (overlap == 0) {
1588         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1589 
1590         (*is_local)[loc_outer] = (*is)[loc_outer];
1591       } else {
1592         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1593           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1594         }
1595         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1596       }
1597       PetscCall(PetscFree(idx));
1598       xstart += width;
1599       loc_outer++;
1600     }
1601     ystart += height;
1602   }
1603   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1604   PetscFunctionReturn(PETSC_SUCCESS);
1605 }
1606 
1607 /*@C
1608   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1609   only) for the additive Schwarz preconditioner, `PCASM`.
1610 
1611   Not Collective
1612 
1613   Input Parameter:
1614 . pc - the preconditioner context
1615 
1616   Output Parameters:
1617 + n        - if requested, the number of subdomains for this processor (default value = 1)
1618 . is       - if requested, the index sets that define the subdomains for this processor
1619 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)
1620 
1621   Level: advanced
1622 
1623   Note:
1624   The `IS` numbering is in the parallel, global numbering of the vector.
1625 
1626 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1627           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1628 @*/
1629 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1630 {
1631   PC_ASM   *osm = (PC_ASM *)pc->data;
1632   PetscBool match;
1633 
1634   PetscFunctionBegin;
1635   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1636   if (n) PetscAssertPointer(n, 2);
1637   if (is) PetscAssertPointer(is, 3);
1638   if (is_local) PetscAssertPointer(is_local, 4);
1639   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1640   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1641   if (n) *n = osm->n_local_true;
1642   if (is) *is = osm->is;
1643   if (is_local) *is_local = osm->is_local;
1644   PetscFunctionReturn(PETSC_SUCCESS);
1645 }
1646 
1647 /*@C
1648   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1649   only) for the additive Schwarz preconditioner, `PCASM`.
1650 
1651   Not Collective
1652 
1653   Input Parameter:
1654 . pc - the preconditioner context
1655 
1656   Output Parameters:
1657 + n   - if requested, the number of matrices for this processor (default value = 1)
1658 - mat - if requested, the matrices
1659 
1660   Level: advanced
1661 
1662   Notes:
1663   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1664 
1665   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1666 
1667 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1668           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1669 @*/
1670 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1671 {
1672   PC_ASM   *osm;
1673   PetscBool match;
1674 
1675   PetscFunctionBegin;
1676   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1677   if (n) PetscAssertPointer(n, 2);
1678   if (mat) PetscAssertPointer(mat, 3);
1679   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1680   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1681   if (!match) {
1682     if (n) *n = 0;
1683     if (mat) *mat = NULL;
1684   } else {
1685     osm = (PC_ASM *)pc->data;
1686     if (n) *n = osm->n_local_true;
1687     if (mat) *mat = osm->pmat;
1688   }
1689   PetscFunctionReturn(PETSC_SUCCESS);
1690 }
1691 
1692 /*@
1693   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1694 
1695   Logically Collective
1696 
1697   Input Parameters:
1698 + pc  - the preconditioner
1699 - flg - boolean indicating whether to use subdomains defined by the `DM`
1700 
1701   Options Database Key:
1702 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`
1703 
1704   Level: intermediate
1705 
1706   Note:
1707   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1708   so setting either of the first two effectively turns the latter off.
1709 
1710   Developer Note:
1711   This should be `PCASMSetUseDMSubdomains()`, similarly for the options database key
1712 
1713 .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1714           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1715 @*/
1716 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1717 {
1718   PC_ASM   *osm = (PC_ASM *)pc->data;
1719   PetscBool match;
1720 
1721   PetscFunctionBegin;
1722   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1723   PetscValidLogicalCollectiveBool(pc, flg, 2);
1724   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1725   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1726   if (match) osm->dm_subdomains = flg;
1727   PetscFunctionReturn(PETSC_SUCCESS);
1728 }
1729 
1730 /*@
1731   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1732 
1733   Not Collective
1734 
1735   Input Parameter:
1736 . pc - the preconditioner
1737 
1738   Output Parameter:
1739 . flg - boolean indicating whether to use subdomains defined by the `DM`
1740 
1741   Level: intermediate
1742 
1743   Developer Note:
1744   This should be `PCASMSetUseDMSubdomains()`
1745 
1746 .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1747           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1748 @*/
1749 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1750 {
1751   PC_ASM   *osm = (PC_ASM *)pc->data;
1752   PetscBool match;
1753 
1754   PetscFunctionBegin;
1755   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1756   PetscAssertPointer(flg, 2);
1757   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1758   if (match) *flg = osm->dm_subdomains;
1759   else *flg = PETSC_FALSE;
1760   PetscFunctionReturn(PETSC_SUCCESS);
1761 }
1762 
1763 /*@C
1764   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1765 
1766   Not Collective
1767 
1768   Input Parameter:
1769 . pc - the `PC`
1770 
1771   Output Parameter:
1772 . sub_mat_type - name of matrix type
1773 
1774   Level: advanced
1775 
1776 .seealso: [](ch_ksp), `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1777 @*/
1778 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1779 {
1780   PetscFunctionBegin;
1781   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1782   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1783   PetscFunctionReturn(PETSC_SUCCESS);
1784 }
1785 
1786 /*@C
1787   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1788 
1789   Collective
1790 
1791   Input Parameters:
1792 + pc           - the `PC` object
1793 - sub_mat_type - the `MatType`
1794 
1795   Options Database Key:
1796 . -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1797    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1798 
1799   Note:
1800   See `MatType` for available types
1801 
1802   Level: advanced
1803 
1804 .seealso: [](ch_ksp), `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1805 @*/
1806 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1807 {
1808   PetscFunctionBegin;
1809   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1810   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1811   PetscFunctionReturn(PETSC_SUCCESS);
1812 }
1813