xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 976ed0a4a7e2c9a504a034b07aaf489e2c2d55c5)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include <petsc/private/matimpl.h>
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         iascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(viewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(viewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
75   }
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc)
80 {
81   PC_ASM         *osm = (PC_ASM *)pc->data;
82   const char     *prefix;
83   char            fname[PETSC_MAX_PATH_LEN + 1];
84   PetscViewer     viewer, sviewer;
85   char           *s;
86   PetscInt        i, j, nidx;
87   const PetscInt *idx;
88   PetscMPIInt     rank, size;
89 
90   PetscFunctionBegin;
91   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
92   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
93   PetscCall(PCGetOptionsPrefix(pc, &prefix));
94   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
95   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
96   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
97   for (i = 0; i < osm->n_local; i++) {
98     if (i < osm->n_local_true) {
99       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
100       PetscCall(ISGetIndices(osm->is[i], &idx));
101       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
102 #define len 16 * (nidx + 1) + 512
103       PetscCall(PetscMalloc1(len, &s));
104       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
105 #undef len
106       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
107       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
108       PetscCall(ISRestoreIndices(osm->is[i], &idx));
109       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
110       PetscCall(PetscViewerDestroy(&sviewer));
111       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
112       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
113       PetscCall(PetscViewerFlush(viewer));
114       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
115       PetscCall(PetscFree(s));
116       if (osm->is_local) {
117         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
118 #define len 16 * (nidx + 1) + 512
119         PetscCall(PetscMalloc1(len, &s));
120         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
121 #undef len
122         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
123         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
124         PetscCall(ISGetIndices(osm->is_local[i], &idx));
125         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
126         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
127         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
128         PetscCall(PetscViewerDestroy(&sviewer));
129         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
130         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
131         PetscCall(PetscViewerFlush(viewer));
132         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
133         PetscCall(PetscFree(s));
134       }
135     } else {
136       /* Participate in collective viewer calls. */
137       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
138       PetscCall(PetscViewerFlush(viewer));
139       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
140       /* Assume either all ranks have is_local or none do. */
141       if (osm->is_local) {
142         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
143         PetscCall(PetscViewerFlush(viewer));
144         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
145       }
146     }
147   }
148   PetscCall(PetscViewerFlush(viewer));
149   PetscCall(PetscViewerDestroy(&viewer));
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode PCSetUp_ASM(PC pc)
154 {
155   PC_ASM     *osm = (PC_ASM *)pc->data;
156   PetscBool   flg;
157   PetscInt    i, m, m_local;
158   MatReuse    scall = MAT_REUSE_MATRIX;
159   IS          isl;
160   KSP         ksp;
161   PC          subpc;
162   const char *prefix, *pprefix;
163   Vec         vec;
164   DM         *domain_dm = NULL;
165 
166   PetscFunctionBegin;
167   if (!pc->setupcalled) {
168     PetscInt m;
169 
170     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
171     if (osm->n_local_true == PETSC_DECIDE) {
172       /* no subdomains given */
173       /* try pc->dm first, if allowed */
174       if (osm->dm_subdomains && pc->dm) {
175         PetscInt num_domains, d;
176         char   **domain_names;
177         IS      *inner_domain_is, *outer_domain_is;
178         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
179         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
180                               A future improvement of this code might allow one to use
181                               DM-defined subdomains and also increase the overlap,
182                               but that is not currently supported */
183         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
184         for (d = 0; d < num_domains; ++d) {
185           if (domain_names) PetscCall(PetscFree(domain_names[d]));
186           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
187           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
188         }
189         PetscCall(PetscFree(domain_names));
190         PetscCall(PetscFree(inner_domain_is));
191         PetscCall(PetscFree(outer_domain_is));
192       }
193       if (osm->n_local_true == PETSC_DECIDE) {
194         /* still no subdomains; use one subdomain per processor */
195         osm->n_local_true = 1;
196       }
197     }
198     { /* determine the global and max number of subdomains */
199       struct {
200         PetscInt max, sum;
201       } inwork, outwork;
202       PetscMPIInt size;
203 
204       inwork.max = osm->n_local_true;
205       inwork.sum = osm->n_local_true;
206       PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
207       osm->n_local = outwork.max;
208       osm->n       = outwork.sum;
209 
210       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
211       if (outwork.max == 1 && outwork.sum == size) {
212         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
213         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
214       }
215     }
216     if (!osm->is) { /* create the index sets */
217       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
218     }
219     if (osm->n_local_true > 1 && !osm->is_local) {
220       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
221       for (i = 0; i < osm->n_local_true; i++) {
222         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
223           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
224           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
225         } else {
226           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
227           osm->is_local[i] = osm->is[i];
228         }
229       }
230     }
231     PetscCall(PCGetOptionsPrefix(pc, &prefix));
232     if (osm->overlap > 0) {
233       /* Extend the "overlapping" regions by a number of steps */
234       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
235     }
236     if (osm->sort_indices) {
237       for (i = 0; i < osm->n_local_true; i++) {
238         PetscCall(ISSort(osm->is[i]));
239         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
240       }
241     }
242     flg = PETSC_FALSE;
243     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
244     if (flg) PetscCall(PCASMPrintSubdomains(pc));
245     if (!osm->ksp) {
246       /* Create the local solvers */
247       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
248       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
249       for (i = 0; i < osm->n_local_true; i++) {
250         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
251         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
252         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
253         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
254         PetscCall(KSPSetType(ksp, KSPPREONLY));
255         PetscCall(KSPGetPC(ksp, &subpc));
256         PetscCall(PCGetOptionsPrefix(pc, &prefix));
257         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
258         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
259         if (domain_dm) {
260           PetscCall(KSPSetDM(ksp, domain_dm[i]));
261           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
262           PetscCall(DMDestroy(&domain_dm[i]));
263         }
264         osm->ksp[i] = ksp;
265       }
266       if (domain_dm) PetscCall(PetscFree(domain_dm));
267     }
268 
269     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
270     PetscCall(ISSortRemoveDups(osm->lis));
271     PetscCall(ISGetLocalSize(osm->lis, &m));
272 
273     scall = MAT_INITIAL_MATRIX;
274   } else {
275     /*
276        Destroy the blocks from the previous iteration
277     */
278     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
279       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
280       scall = MAT_INITIAL_MATRIX;
281     }
282   }
283 
284   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
285   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
286     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
287     scall = MAT_INITIAL_MATRIX;
288   }
289 
290   /*
291      Extract out the submatrices
292   */
293   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
294   if (scall == MAT_INITIAL_MATRIX) {
295     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
296     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
297   }
298 
299   /* Convert the types of the submatrices (if needbe) */
300   if (osm->sub_mat_type) {
301     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &(osm->pmat[i])));
302   }
303 
304   if (!pc->setupcalled) {
305     VecType vtype;
306 
307     /* Create the local work vectors (from the local matrices) and scatter contexts */
308     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
309 
310     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
311     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
312     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
313     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
314     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
315 
316     PetscCall(ISGetLocalSize(osm->lis, &m));
317     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
318     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
319     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
320     PetscCall(VecSetSizes(osm->lx, m, m));
321     PetscCall(VecSetType(osm->lx, vtype));
322     PetscCall(VecDuplicate(osm->lx, &osm->ly));
323     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
324     PetscCall(ISDestroy(&isl));
325 
326     for (i = 0; i < osm->n_local_true; ++i) {
327       ISLocalToGlobalMapping ltog;
328       IS                     isll;
329       const PetscInt        *idx_is;
330       PetscInt              *idx_lis, nout;
331 
332       PetscCall(ISGetLocalSize(osm->is[i], &m));
333       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
334       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
335 
336       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
337       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
338       PetscCall(ISGetLocalSize(osm->is[i], &m));
339       PetscCall(ISGetIndices(osm->is[i], &idx_is));
340       PetscCall(PetscMalloc1(m, &idx_lis));
341       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
342       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
343       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
344       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
345       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
346       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
347       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
348       PetscCall(ISDestroy(&isll));
349       PetscCall(ISDestroy(&isl));
350       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
351         ISLocalToGlobalMapping ltog;
352         IS                     isll, isll_local;
353         const PetscInt        *idx_local;
354         PetscInt              *idx1, *idx2, nout;
355 
356         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
357         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
358 
359         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
360         PetscCall(PetscMalloc1(m_local, &idx1));
361         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
362         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
363         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
364         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
365 
366         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
367         PetscCall(PetscMalloc1(m_local, &idx2));
368         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
369         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
370         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
371         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
372 
373         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
374         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
375 
376         PetscCall(ISDestroy(&isll));
377         PetscCall(ISDestroy(&isll_local));
378       }
379     }
380     PetscCall(VecDestroy(&vec));
381   }
382 
383   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
384     IS      *cis;
385     PetscInt c;
386 
387     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
388     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
389     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
390     PetscCall(PetscFree(cis));
391   }
392 
393   /* Return control to the user so that the submatrices can be modified (e.g., to apply
394      different boundary conditions for the submatrices than for the global problem) */
395   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
396 
397   /*
398      Loop over subdomains putting them into local ksp
399   */
400   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
401   for (i = 0; i < osm->n_local_true; i++) {
402     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
403     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
404     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
405   }
406   PetscFunctionReturn(PETSC_SUCCESS);
407 }
408 
409 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
410 {
411   PC_ASM            *osm = (PC_ASM *)pc->data;
412   PetscInt           i;
413   KSPConvergedReason reason;
414 
415   PetscFunctionBegin;
416   for (i = 0; i < osm->n_local_true; i++) {
417     PetscCall(KSPSetUp(osm->ksp[i]));
418     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
419     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
420   }
421   PetscFunctionReturn(PETSC_SUCCESS);
422 }
423 
424 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
425 {
426   PC_ASM     *osm = (PC_ASM *)pc->data;
427   PetscInt    i, n_local_true = osm->n_local_true;
428   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
429 
430   PetscFunctionBegin;
431   /*
432      support for limiting the restriction or interpolation to only local
433      subdomain values (leaving the other values 0).
434   */
435   if (!(osm->type & PC_ASM_RESTRICT)) {
436     forward = SCATTER_FORWARD_LOCAL;
437     /* have to zero the work RHS since scatter may leave some slots empty */
438     PetscCall(VecSet(osm->lx, 0.0));
439   }
440   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
441 
442   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
443   /* zero the global and the local solutions */
444   PetscCall(VecSet(y, 0.0));
445   PetscCall(VecSet(osm->ly, 0.0));
446 
447   /* copy the global RHS to local RHS including the ghost nodes */
448   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
449   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
450 
451   /* restrict local RHS to the overlapping 0-block RHS */
452   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
453   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
454 
455   /* do the local solves */
456   for (i = 0; i < n_local_true; ++i) {
457     /* solve the overlapping i-block */
458     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
459     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
460     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
461     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
462 
463     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
464       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
465       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
466     } else { /* interpolate the overlapping i-block solution to the local solution */
467       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
468       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
469     }
470 
471     if (i < n_local_true - 1) {
472       /* restrict local RHS to the overlapping (i+1)-block RHS */
473       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
474       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
475 
476       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
477         /* update the overlapping (i+1)-block RHS using the current local solution */
478         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
479         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
480       }
481     }
482   }
483   /* add the local solution to the global solution including the ghost nodes */
484   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
485   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
486   PetscFunctionReturn(PETSC_SUCCESS);
487 }
488 
489 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
490 {
491   PC_ASM     *osm = (PC_ASM *)pc->data;
492   Mat         Z, W;
493   Vec         x;
494   PetscInt    i, m, N;
495   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
496 
497   PetscFunctionBegin;
498   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
499   /*
500      support for limiting the restriction or interpolation to only local
501      subdomain values (leaving the other values 0).
502   */
503   if (!(osm->type & PC_ASM_RESTRICT)) {
504     forward = SCATTER_FORWARD_LOCAL;
505     /* have to zero the work RHS since scatter may leave some slots empty */
506     PetscCall(VecSet(osm->lx, 0.0));
507   }
508   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
509   PetscCall(VecGetLocalSize(osm->x[0], &m));
510   PetscCall(MatGetSize(X, NULL, &N));
511   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
512 
513   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
514   /* zero the global and the local solutions */
515   PetscCall(MatZeroEntries(Y));
516   PetscCall(VecSet(osm->ly, 0.0));
517 
518   for (i = 0; i < N; ++i) {
519     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
520     /* copy the global RHS to local RHS including the ghost nodes */
521     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
522     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
523     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
524 
525     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
526     /* restrict local RHS to the overlapping 0-block RHS */
527     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
528     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
529     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
530   }
531   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
532   /* solve the overlapping 0-block */
533   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
534   PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
535   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
536   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
537   PetscCall(MatDestroy(&Z));
538 
539   for (i = 0; i < N; ++i) {
540     PetscCall(VecSet(osm->ly, 0.0));
541     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
542     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
543       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
544       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
545     } else { /* interpolate the overlapping 0-block solution to the local solution */
546       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
547       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
548     }
549     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
550 
551     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
552     /* add the local solution to the global solution including the ghost nodes */
553     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
554     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
555     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
556   }
557   PetscCall(MatDestroy(&W));
558   PetscFunctionReturn(PETSC_SUCCESS);
559 }
560 
561 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
562 {
563   PC_ASM     *osm = (PC_ASM *)pc->data;
564   PetscInt    i, n_local_true = osm->n_local_true;
565   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
566 
567   PetscFunctionBegin;
568   /*
569      Support for limiting the restriction or interpolation to only local
570      subdomain values (leaving the other values 0).
571 
572      Note: these are reversed from the PCApply_ASM() because we are applying the
573      transpose of the three terms
574   */
575 
576   if (!(osm->type & PC_ASM_INTERPOLATE)) {
577     forward = SCATTER_FORWARD_LOCAL;
578     /* have to zero the work RHS since scatter may leave some slots empty */
579     PetscCall(VecSet(osm->lx, 0.0));
580   }
581   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
582 
583   /* zero the global and the local solutions */
584   PetscCall(VecSet(y, 0.0));
585   PetscCall(VecSet(osm->ly, 0.0));
586 
587   /* Copy the global RHS to local RHS including the ghost nodes */
588   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
589   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
590 
591   /* Restrict local RHS to the overlapping 0-block RHS */
592   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
593   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
594 
595   /* do the local solves */
596   for (i = 0; i < n_local_true; ++i) {
597     /* solve the overlapping i-block */
598     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
599     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
600     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
601     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
602 
603     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
604       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
605       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
606     } else { /* interpolate the overlapping i-block solution to the local solution */
607       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
608       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
609     }
610 
611     if (i < n_local_true - 1) {
612       /* Restrict local RHS to the overlapping (i+1)-block RHS */
613       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
614       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
615     }
616   }
617   /* Add the local solution to the global solution including the ghost nodes */
618   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
619   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
620   PetscFunctionReturn(PETSC_SUCCESS);
621 }
622 
623 static PetscErrorCode PCReset_ASM(PC pc)
624 {
625   PC_ASM  *osm = (PC_ASM *)pc->data;
626   PetscInt i;
627 
628   PetscFunctionBegin;
629   if (osm->ksp) {
630     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
631   }
632   if (osm->pmat) {
633     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
634   }
635   if (osm->lrestriction) {
636     PetscCall(VecScatterDestroy(&osm->restriction));
637     for (i = 0; i < osm->n_local_true; i++) {
638       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
639       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
640       PetscCall(VecDestroy(&osm->x[i]));
641       PetscCall(VecDestroy(&osm->y[i]));
642     }
643     PetscCall(PetscFree(osm->lrestriction));
644     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
645     PetscCall(PetscFree(osm->x));
646     PetscCall(PetscFree(osm->y));
647   }
648   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
649   PetscCall(ISDestroy(&osm->lis));
650   PetscCall(VecDestroy(&osm->lx));
651   PetscCall(VecDestroy(&osm->ly));
652   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
653 
654   PetscCall(PetscFree(osm->sub_mat_type));
655 
656   osm->is       = NULL;
657   osm->is_local = NULL;
658   PetscFunctionReturn(PETSC_SUCCESS);
659 }
660 
661 static PetscErrorCode PCDestroy_ASM(PC pc)
662 {
663   PC_ASM  *osm = (PC_ASM *)pc->data;
664   PetscInt i;
665 
666   PetscFunctionBegin;
667   PetscCall(PCReset_ASM(pc));
668   if (osm->ksp) {
669     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
670     PetscCall(PetscFree(osm->ksp));
671   }
672   PetscCall(PetscFree(pc->data));
673 
674   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
675   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
676   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
677   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
685   PetscFunctionReturn(PETSC_SUCCESS);
686 }
687 
688 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject)
689 {
690   PC_ASM         *osm = (PC_ASM *)pc->data;
691   PetscInt        blocks, ovl;
692   PetscBool       flg;
693   PCASMType       asmtype;
694   PCCompositeType loctype;
695   char            sub_mat_type[256];
696 
697   PetscFunctionBegin;
698   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
699   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
700   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
701   if (flg) {
702     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
703     osm->dm_subdomains = PETSC_FALSE;
704   }
705   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
706   if (flg) {
707     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
708     osm->dm_subdomains = PETSC_FALSE;
709   }
710   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
711   if (flg) {
712     PetscCall(PCASMSetOverlap(pc, ovl));
713     osm->dm_subdomains = PETSC_FALSE;
714   }
715   flg = PETSC_FALSE;
716   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
717   if (flg) PetscCall(PCASMSetType(pc, asmtype));
718   flg = PETSC_FALSE;
719   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
720   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
721   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
722   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
723   PetscOptionsHeadEnd();
724   PetscFunctionReturn(PETSC_SUCCESS);
725 }
726 
727 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
728 {
729   PC_ASM  *osm = (PC_ASM *)pc->data;
730   PetscInt i;
731 
732   PetscFunctionBegin;
733   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
734   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
735 
736   if (!pc->setupcalled) {
737     if (is) {
738       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
739     }
740     if (is_local) {
741       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
742     }
743     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
744 
745     if (osm->ksp && osm->n_local_true != n) {
746       for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
747       PetscCall(PetscFree(osm->ksp));
748     }
749 
750     osm->n_local_true = n;
751     osm->is           = NULL;
752     osm->is_local     = NULL;
753     if (is) {
754       PetscCall(PetscMalloc1(n, &osm->is));
755       for (i = 0; i < n; i++) osm->is[i] = is[i];
756       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
757       osm->overlap = -1;
758     }
759     if (is_local) {
760       PetscCall(PetscMalloc1(n, &osm->is_local));
761       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
762       if (!is) {
763         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
764         for (i = 0; i < osm->n_local_true; i++) {
765           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
766             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
767             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
768           } else {
769             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
770             osm->is[i] = osm->is_local[i];
771           }
772         }
773       }
774     }
775   }
776   PetscFunctionReturn(PETSC_SUCCESS);
777 }
778 
779 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
780 {
781   PC_ASM     *osm = (PC_ASM *)pc->data;
782   PetscMPIInt rank, size;
783   PetscInt    n;
784 
785   PetscFunctionBegin;
786   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
787   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
788 
789   /*
790      Split the subdomains equally among all processors
791   */
792   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
793   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
794   n = N / size + ((N % size) > rank);
795   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
796   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
797   if (!pc->setupcalled) {
798     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
799 
800     osm->n_local_true = n;
801     osm->is           = NULL;
802     osm->is_local     = NULL;
803   }
804   PetscFunctionReturn(PETSC_SUCCESS);
805 }
806 
807 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
808 {
809   PC_ASM *osm = (PC_ASM *)pc->data;
810 
811   PetscFunctionBegin;
812   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
813   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
814   if (!pc->setupcalled) osm->overlap = ovl;
815   PetscFunctionReturn(PETSC_SUCCESS);
816 }
817 
818 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
819 {
820   PC_ASM *osm = (PC_ASM *)pc->data;
821 
822   PetscFunctionBegin;
823   osm->type     = type;
824   osm->type_set = PETSC_TRUE;
825   PetscFunctionReturn(PETSC_SUCCESS);
826 }
827 
828 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
829 {
830   PC_ASM *osm = (PC_ASM *)pc->data;
831 
832   PetscFunctionBegin;
833   *type = osm->type;
834   PetscFunctionReturn(PETSC_SUCCESS);
835 }
836 
837 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
838 {
839   PC_ASM *osm = (PC_ASM *)pc->data;
840 
841   PetscFunctionBegin;
842   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
843   osm->loctype = type;
844   PetscFunctionReturn(PETSC_SUCCESS);
845 }
846 
847 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
848 {
849   PC_ASM *osm = (PC_ASM *)pc->data;
850 
851   PetscFunctionBegin;
852   *type = osm->loctype;
853   PetscFunctionReturn(PETSC_SUCCESS);
854 }
855 
856 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
857 {
858   PC_ASM *osm = (PC_ASM *)pc->data;
859 
860   PetscFunctionBegin;
861   osm->sort_indices = doSort;
862   PetscFunctionReturn(PETSC_SUCCESS);
863 }
864 
865 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
866 {
867   PC_ASM *osm = (PC_ASM *)pc->data;
868 
869   PetscFunctionBegin;
870   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
871 
872   if (n_local) *n_local = osm->n_local_true;
873   if (first_local) {
874     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
875     *first_local -= osm->n_local_true;
876   }
877   if (ksp) *ksp = osm->ksp;
878   PetscFunctionReturn(PETSC_SUCCESS);
879 }
880 
881 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
882 {
883   PC_ASM *osm = (PC_ASM *)pc->data;
884 
885   PetscFunctionBegin;
886   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
887   PetscAssertPointer(sub_mat_type, 2);
888   *sub_mat_type = osm->sub_mat_type;
889   PetscFunctionReturn(PETSC_SUCCESS);
890 }
891 
892 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
893 {
894   PC_ASM *osm = (PC_ASM *)pc->data;
895 
896   PetscFunctionBegin;
897   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
898   PetscCall(PetscFree(osm->sub_mat_type));
899   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
900   PetscFunctionReturn(PETSC_SUCCESS);
901 }
902 
903 /*@C
904   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
905 
906   Collective
907 
908   Input Parameters:
909 + pc       - the preconditioner context
910 . n        - the number of subdomains for this processor (default value = 1)
911 . is       - the index set that defines the subdomains for this processor
912          (or `NULL` for PETSc to determine subdomains)
913 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
914          (or `NULL` to not provide these)
915 
916   Options Database Key:
917 . -pc_asm_local_blocks <blks> - Sets number of local blocks
918 
919   Level: advanced
920 
921   Notes:
922   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
923 
924   By default the `PCASM` preconditioner uses 1 block per processor.
925 
926   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
927 
928   If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated
929   back to form the global solution (this is the standard restricted additive Schwarz method)
930 
931   If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
932   no code to handle that case.
933 
934 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
935           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
936 @*/
937 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
938 {
939   PetscFunctionBegin;
940   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
941   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
942   PetscFunctionReturn(PETSC_SUCCESS);
943 }
944 
945 /*@C
946   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
947   additive Schwarz preconditioner, `PCASM`.
948 
949   Collective, all MPI ranks must pass in the same array of `IS`
950 
951   Input Parameters:
952 + pc       - the preconditioner context
953 . N        - the number of subdomains for all processors
954 . is       - the index sets that define the subdomains for all processors
955          (or `NULL` to ask PETSc to determine the subdomains)
956 - is_local - the index sets that define the local part of the subdomains for this processor
957          (or `NULL` to not provide this information)
958 
959   Options Database Key:
960 . -pc_asm_blocks <blks> - Sets total blocks
961 
962   Level: advanced
963 
964   Notes:
965   Currently you cannot use this to set the actual subdomains with the argument is or is_local.
966 
967   By default the `PCASM` preconditioner uses 1 block per processor.
968 
969   These index sets cannot be destroyed until after completion of the
970   linear solves for which the `PCASM` preconditioner is being used.
971 
972   Use `PCASMSetLocalSubdomains()` to set local subdomains.
973 
974   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
975 
976 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
977           `PCASMCreateSubdomains2D()`, `PCGASM`
978 @*/
979 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
980 {
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
983   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
984   PetscFunctionReturn(PETSC_SUCCESS);
985 }
986 
987 /*@
988   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
989   additive Schwarz preconditioner, `PCASM`.
990 
991   Logically Collective
992 
993   Input Parameters:
994 + pc  - the preconditioner context
995 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
996 
997   Options Database Key:
998 . -pc_asm_overlap <ovl> - Sets overlap
999 
1000   Level: intermediate
1001 
1002   Notes:
1003   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1004   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1005   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1006 
1007   The overlap defaults to 1, so if one desires that no additional
1008   overlap be computed beyond what may have been set with a call to
1009   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1010   must be set to be 0.  In particular, if one does not explicitly set
1011   the subdomains an application code, then all overlap would be computed
1012   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1013   variant that is equivalent to the block Jacobi preconditioner.
1014 
1015   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1016   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1017 
1018   One can define initial index sets with any overlap via
1019   `PCASMSetLocalSubdomains()`; the routine
1020   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1021   if desired.
1022 
1023 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1024           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1025 @*/
1026 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1027 {
1028   PetscFunctionBegin;
1029   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1030   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1031   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1032   PetscFunctionReturn(PETSC_SUCCESS);
1033 }
1034 
1035 /*@
1036   PCASMSetType - Sets the type of restriction and interpolation used
1037   for local problems in the additive Schwarz method, `PCASM`.
1038 
1039   Logically Collective
1040 
1041   Input Parameters:
1042 + pc   - the preconditioner context
1043 - type - variant of `PCASM`, one of
1044 .vb
1045       PC_ASM_BASIC       - full interpolation and restriction
1046       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1047       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1048       PC_ASM_NONE        - local processor restriction and interpolation
1049 .ve
1050 
1051   Options Database Key:
1052 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1053 
1054   Level: intermediate
1055 
1056   Note:
1057   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1058   to limit the local processor interpolation
1059 
1060 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1061           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1062 @*/
1063 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1064 {
1065   PetscFunctionBegin;
1066   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1067   PetscValidLogicalCollectiveEnum(pc, type, 2);
1068   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1069   PetscFunctionReturn(PETSC_SUCCESS);
1070 }
1071 
1072 /*@
1073   PCASMGetType - Gets the type of restriction and interpolation used
1074   for local problems in the additive Schwarz method, `PCASM`.
1075 
1076   Logically Collective
1077 
1078   Input Parameter:
1079 . pc - the preconditioner context
1080 
1081   Output Parameter:
1082 . type - variant of `PCASM`, one of
1083 .vb
1084       PC_ASM_BASIC       - full interpolation and restriction
1085       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1086       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1087       PC_ASM_NONE        - local processor restriction and interpolation
1088 .ve
1089 
1090   Options Database Key:
1091 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1092 
1093   Level: intermediate
1094 
1095 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1096           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1097 @*/
1098 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1099 {
1100   PetscFunctionBegin;
1101   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1102   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1103   PetscFunctionReturn(PETSC_SUCCESS);
1104 }
1105 
1106 /*@
1107   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1108 
1109   Logically Collective
1110 
1111   Input Parameters:
1112 + pc   - the preconditioner context
1113 - type - type of composition, one of
1114 .vb
1115   PC_COMPOSITE_ADDITIVE       - local additive combination
1116   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1117 .ve
1118 
1119   Options Database Key:
1120 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1121 
1122   Level: intermediate
1123 
1124 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1125 @*/
1126 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1127 {
1128   PetscFunctionBegin;
1129   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1130   PetscValidLogicalCollectiveEnum(pc, type, 2);
1131   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1132   PetscFunctionReturn(PETSC_SUCCESS);
1133 }
1134 
1135 /*@
1136   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1137 
1138   Logically Collective
1139 
1140   Input Parameter:
1141 . pc - the preconditioner context
1142 
1143   Output Parameter:
1144 . type - type of composition, one of
1145 .vb
1146   PC_COMPOSITE_ADDITIVE       - local additive combination
1147   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1148 .ve
1149 
1150   Options Database Key:
1151 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1152 
1153   Level: intermediate
1154 
1155 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCCompositeType`
1156 @*/
1157 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1158 {
1159   PetscFunctionBegin;
1160   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1161   PetscAssertPointer(type, 2);
1162   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1163   PetscFunctionReturn(PETSC_SUCCESS);
1164 }
1165 
1166 /*@
1167   PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1168 
1169   Logically Collective
1170 
1171   Input Parameters:
1172 + pc     - the preconditioner context
1173 - doSort - sort the subdomain indices
1174 
1175   Level: intermediate
1176 
1177 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1178           `PCASMCreateSubdomains2D()`
1179 @*/
1180 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1181 {
1182   PetscFunctionBegin;
1183   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1184   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1185   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1186   PetscFunctionReturn(PETSC_SUCCESS);
1187 }
1188 
1189 /*@C
1190   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1191   this processor.
1192 
1193   Collective iff first_local is requested
1194 
1195   Input Parameter:
1196 . pc - the preconditioner context
1197 
1198   Output Parameters:
1199 + n_local     - the number of blocks on this processor or NULL
1200 . first_local - the global number of the first block on this processor or NULL,
1201                  all processors must request or all must pass NULL
1202 - ksp         - the array of `KSP` contexts
1203 
1204   Level: advanced
1205 
1206   Notes:
1207   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1208 
1209   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1210 
1211   Fortran Notes:
1212   The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.
1213 
1214 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1215           `PCASMCreateSubdomains2D()`,
1216 @*/
1217 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1218 {
1219   PetscFunctionBegin;
1220   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1221   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1222   PetscFunctionReturn(PETSC_SUCCESS);
1223 }
1224 
1225 /*MC
1226    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1227            its own `KSP` object, {cite}`dryja1987additive` and {cite}`1sbg`
1228 
1229    Options Database Keys:
1230 +  -pc_asm_blocks <blks>                          - Sets total blocks. Defaults to one block per MPI process.
1231 .  -pc_asm_overlap <ovl>                          - Sets overlap
1232 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1233 -  -pc_asm_local_type [additive, multiplicative]  - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1234 
1235    Level: beginner
1236 
1237    Notes:
1238    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1239    will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1240     -pc_asm_type basic to get the same convergence behavior
1241 
1242    Each processor can have one or more blocks, but a block cannot be shared by more
1243    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1244 
1245    To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC`
1246    options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1247 
1248    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1249    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1250 
1251 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1252           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1253           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1254 M*/
1255 
1256 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1257 {
1258   PC_ASM *osm;
1259 
1260   PetscFunctionBegin;
1261   PetscCall(PetscNew(&osm));
1262 
1263   osm->n             = PETSC_DECIDE;
1264   osm->n_local       = 0;
1265   osm->n_local_true  = PETSC_DECIDE;
1266   osm->overlap       = 1;
1267   osm->ksp           = NULL;
1268   osm->restriction   = NULL;
1269   osm->lprolongation = NULL;
1270   osm->lrestriction  = NULL;
1271   osm->x             = NULL;
1272   osm->y             = NULL;
1273   osm->is            = NULL;
1274   osm->is_local      = NULL;
1275   osm->mat           = NULL;
1276   osm->pmat          = NULL;
1277   osm->type          = PC_ASM_RESTRICT;
1278   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1279   osm->sort_indices  = PETSC_TRUE;
1280   osm->dm_subdomains = PETSC_FALSE;
1281   osm->sub_mat_type  = NULL;
1282 
1283   pc->data                 = (void *)osm;
1284   pc->ops->apply           = PCApply_ASM;
1285   pc->ops->matapply        = PCMatApply_ASM;
1286   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1287   pc->ops->setup           = PCSetUp_ASM;
1288   pc->ops->reset           = PCReset_ASM;
1289   pc->ops->destroy         = PCDestroy_ASM;
1290   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1291   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1292   pc->ops->view            = PCView_ASM;
1293   pc->ops->applyrichardson = NULL;
1294 
1295   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1296   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1297   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1298   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1299   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1300   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1301   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1306   PetscFunctionReturn(PETSC_SUCCESS);
1307 }
1308 
1309 /*@C
1310   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1311   preconditioner, `PCASM`,  for any problem on a general grid.
1312 
1313   Collective
1314 
1315   Input Parameters:
1316 + A - The global matrix operator
1317 - n - the number of local blocks
1318 
1319   Output Parameter:
1320 . outis - the array of index sets defining the subdomains
1321 
1322   Level: advanced
1323 
1324   Note:
1325   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1326   from these if you use `PCASMSetLocalSubdomains()`
1327 
1328   Fortran Notes:
1329   You must provide the array outis[] already allocated of length n.
1330 
1331 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1332 @*/
1333 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1334 {
1335   MatPartitioning mpart;
1336   const char     *prefix;
1337   PetscInt        i, j, rstart, rend, bs;
1338   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1339   Mat             Ad = NULL, adj;
1340   IS              ispart, isnumb, *is;
1341 
1342   PetscFunctionBegin;
1343   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1344   PetscAssertPointer(outis, 3);
1345   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1346 
1347   /* Get prefix, row distribution, and block size */
1348   PetscCall(MatGetOptionsPrefix(A, &prefix));
1349   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1350   PetscCall(MatGetBlockSize(A, &bs));
1351   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1352 
1353   /* Get diagonal block from matrix if possible */
1354   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1355   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1356   if (Ad) {
1357     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1358     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1359   }
1360   if (Ad && n > 1) {
1361     PetscBool match, done;
1362     /* Try to setup a good matrix partitioning if available */
1363     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1364     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1365     PetscCall(MatPartitioningSetFromOptions(mpart));
1366     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1367     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1368     if (!match) { /* assume a "good" partitioner is available */
1369       PetscInt        na;
1370       const PetscInt *ia, *ja;
1371       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1372       if (done) {
1373         /* Build adjacency matrix by hand. Unfortunately a call to
1374            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1375            remove the block-aij structure and we cannot expect
1376            MatPartitioning to split vertices as we need */
1377         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1378         const PetscInt *row;
1379         nnz = 0;
1380         for (i = 0; i < na; i++) { /* count number of nonzeros */
1381           len = ia[i + 1] - ia[i];
1382           row = ja + ia[i];
1383           for (j = 0; j < len; j++) {
1384             if (row[j] == i) { /* don't count diagonal */
1385               len--;
1386               break;
1387             }
1388           }
1389           nnz += len;
1390         }
1391         PetscCall(PetscMalloc1(na + 1, &iia));
1392         PetscCall(PetscMalloc1(nnz, &jja));
1393         nnz    = 0;
1394         iia[0] = 0;
1395         for (i = 0; i < na; i++) { /* fill adjacency */
1396           cnt = 0;
1397           len = ia[i + 1] - ia[i];
1398           row = ja + ia[i];
1399           for (j = 0; j < len; j++) {
1400             if (row[j] != i) { /* if not diagonal */
1401               jja[nnz + cnt++] = row[j];
1402             }
1403           }
1404           nnz += cnt;
1405           iia[i + 1] = nnz;
1406         }
1407         /* Partitioning of the adjacency matrix */
1408         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1409         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1410         PetscCall(MatPartitioningSetNParts(mpart, n));
1411         PetscCall(MatPartitioningApply(mpart, &ispart));
1412         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1413         PetscCall(MatDestroy(&adj));
1414         foundpart = PETSC_TRUE;
1415       }
1416       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1417     }
1418     PetscCall(MatPartitioningDestroy(&mpart));
1419   }
1420 
1421   PetscCall(PetscMalloc1(n, &is));
1422   *outis = is;
1423 
1424   if (!foundpart) {
1425     /* Partitioning by contiguous chunks of rows */
1426 
1427     PetscInt mbs   = (rend - rstart) / bs;
1428     PetscInt start = rstart;
1429     for (i = 0; i < n; i++) {
1430       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1431       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1432       start += count;
1433     }
1434 
1435   } else {
1436     /* Partitioning by adjacency of diagonal block  */
1437 
1438     const PetscInt *numbering;
1439     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1440     /* Get node count in each partition */
1441     PetscCall(PetscMalloc1(n, &count));
1442     PetscCall(ISPartitioningCount(ispart, n, count));
1443     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1444       for (i = 0; i < n; i++) count[i] *= bs;
1445     }
1446     /* Build indices from node numbering */
1447     PetscCall(ISGetLocalSize(isnumb, &nidx));
1448     PetscCall(PetscMalloc1(nidx, &indices));
1449     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1450     PetscCall(ISGetIndices(isnumb, &numbering));
1451     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1452     PetscCall(ISRestoreIndices(isnumb, &numbering));
1453     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1454       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1455       for (i = 0; i < nidx; i++) {
1456         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1457       }
1458       PetscCall(PetscFree(indices));
1459       nidx *= bs;
1460       indices = newidx;
1461     }
1462     /* Shift to get global indices */
1463     for (i = 0; i < nidx; i++) indices[i] += rstart;
1464 
1465     /* Build the index sets for each block */
1466     for (i = 0; i < n; i++) {
1467       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1468       PetscCall(ISSort(is[i]));
1469       start += count[i];
1470     }
1471 
1472     PetscCall(PetscFree(count));
1473     PetscCall(PetscFree(indices));
1474     PetscCall(ISDestroy(&isnumb));
1475     PetscCall(ISDestroy(&ispart));
1476   }
1477   PetscFunctionReturn(PETSC_SUCCESS);
1478 }
1479 
1480 /*@C
1481   PCASMDestroySubdomains - Destroys the index sets created with
1482   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1483 
1484   Collective
1485 
1486   Input Parameters:
1487 + n        - the number of index sets
1488 . is       - the array of index sets
1489 - is_local - the array of local index sets, can be `NULL`
1490 
1491   Level: advanced
1492 
1493 .seealso: [](ch_ksp), `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1494 @*/
1495 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1496 {
1497   PetscInt i;
1498 
1499   PetscFunctionBegin;
1500   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1501   if (is) {
1502     PetscAssertPointer(is, 2);
1503     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1504     PetscCall(PetscFree(is));
1505   }
1506   if (is_local) {
1507     PetscAssertPointer(is_local, 3);
1508     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1509     PetscCall(PetscFree(is_local));
1510   }
1511   PetscFunctionReturn(PETSC_SUCCESS);
1512 }
1513 
1514 /*@C
1515   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1516   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1517 
1518   Not Collective
1519 
1520   Input Parameters:
1521 + m       - the number of mesh points in the x direction
1522 . n       - the number of mesh points in the y direction
1523 . M       - the number of subdomains in the x direction
1524 . N       - the number of subdomains in the y direction
1525 . dof     - degrees of freedom per node
1526 - overlap - overlap in mesh lines
1527 
1528   Output Parameters:
1529 + Nsub     - the number of subdomains created
1530 . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1531 - is_local - array of index sets defining non-overlapping subdomains
1532 
1533   Level: advanced
1534 
1535   Note:
1536   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1537   preconditioners.  More general related routines are
1538   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1539 
1540   Fortran Notes:
1541   The `IS` must be declared as an array of length long enough to hold `Nsub` entries
1542 
1543 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1544           `PCASMSetOverlap()`
1545 @*/
1546 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local)
1547 {
1548   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1549   PetscInt nidx, *idx, loc, ii, jj, count;
1550 
1551   PetscFunctionBegin;
1552   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1553 
1554   *Nsub = N * M;
1555   PetscCall(PetscMalloc1(*Nsub, is));
1556   PetscCall(PetscMalloc1(*Nsub, is_local));
1557   ystart    = 0;
1558   loc_outer = 0;
1559   for (i = 0; i < N; i++) {
1560     height = n / N + ((n % N) > i); /* height of subdomain */
1561     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1562     yleft = ystart - overlap;
1563     if (yleft < 0) yleft = 0;
1564     yright = ystart + height + overlap;
1565     if (yright > n) yright = n;
1566     xstart = 0;
1567     for (j = 0; j < M; j++) {
1568       width = m / M + ((m % M) > j); /* width of subdomain */
1569       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1570       xleft = xstart - overlap;
1571       if (xleft < 0) xleft = 0;
1572       xright = xstart + width + overlap;
1573       if (xright > m) xright = m;
1574       nidx = (xright - xleft) * (yright - yleft);
1575       PetscCall(PetscMalloc1(nidx, &idx));
1576       loc = 0;
1577       for (ii = yleft; ii < yright; ii++) {
1578         count = m * ii + xleft;
1579         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1580       }
1581       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1582       if (overlap == 0) {
1583         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1584 
1585         (*is_local)[loc_outer] = (*is)[loc_outer];
1586       } else {
1587         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1588           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1589         }
1590         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1591       }
1592       PetscCall(PetscFree(idx));
1593       xstart += width;
1594       loc_outer++;
1595     }
1596     ystart += height;
1597   }
1598   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1599   PetscFunctionReturn(PETSC_SUCCESS);
1600 }
1601 
1602 /*@C
1603   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1604   only) for the additive Schwarz preconditioner, `PCASM`.
1605 
1606   Not Collective
1607 
1608   Input Parameter:
1609 . pc - the preconditioner context
1610 
1611   Output Parameters:
1612 + n        - if requested, the number of subdomains for this processor (default value = 1)
1613 . is       - if requested, the index sets that define the subdomains for this processor
1614 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)
1615 
1616   Level: advanced
1617 
1618   Note:
1619   The `IS` numbering is in the parallel, global numbering of the vector.
1620 
1621 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1622           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1623 @*/
1624 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1625 {
1626   PC_ASM   *osm = (PC_ASM *)pc->data;
1627   PetscBool match;
1628 
1629   PetscFunctionBegin;
1630   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1631   if (n) PetscAssertPointer(n, 2);
1632   if (is) PetscAssertPointer(is, 3);
1633   if (is_local) PetscAssertPointer(is_local, 4);
1634   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1635   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1636   if (n) *n = osm->n_local_true;
1637   if (is) *is = osm->is;
1638   if (is_local) *is_local = osm->is_local;
1639   PetscFunctionReturn(PETSC_SUCCESS);
1640 }
1641 
1642 /*@C
1643   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1644   only) for the additive Schwarz preconditioner, `PCASM`.
1645 
1646   Not Collective
1647 
1648   Input Parameter:
1649 . pc - the preconditioner context
1650 
1651   Output Parameters:
1652 + n   - if requested, the number of matrices for this processor (default value = 1)
1653 - mat - if requested, the matrices
1654 
1655   Level: advanced
1656 
1657   Notes:
1658   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1659 
1660   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1661 
1662 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1663           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1664 @*/
1665 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1666 {
1667   PC_ASM   *osm;
1668   PetscBool match;
1669 
1670   PetscFunctionBegin;
1671   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1672   if (n) PetscAssertPointer(n, 2);
1673   if (mat) PetscAssertPointer(mat, 3);
1674   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1675   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1676   if (!match) {
1677     if (n) *n = 0;
1678     if (mat) *mat = NULL;
1679   } else {
1680     osm = (PC_ASM *)pc->data;
1681     if (n) *n = osm->n_local_true;
1682     if (mat) *mat = osm->pmat;
1683   }
1684   PetscFunctionReturn(PETSC_SUCCESS);
1685 }
1686 
1687 /*@
1688   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1689 
1690   Logically Collective
1691 
1692   Input Parameters:
1693 + pc  - the preconditioner
1694 - flg - boolean indicating whether to use subdomains defined by the `DM`
1695 
1696   Options Database Key:
1697 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM`
1698 
1699   Level: intermediate
1700 
1701   Note:
1702   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1703   so setting either of the first two effectively turns the latter off.
1704 
1705 .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1706           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1707 @*/
1708 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1709 {
1710   PC_ASM   *osm = (PC_ASM *)pc->data;
1711   PetscBool match;
1712 
1713   PetscFunctionBegin;
1714   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1715   PetscValidLogicalCollectiveBool(pc, flg, 2);
1716   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1717   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1718   if (match) osm->dm_subdomains = flg;
1719   PetscFunctionReturn(PETSC_SUCCESS);
1720 }
1721 
1722 /*@
1723   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1724 
1725   Not Collective
1726 
1727   Input Parameter:
1728 . pc - the preconditioner
1729 
1730   Output Parameter:
1731 . flg - boolean indicating whether to use subdomains defined by the `DM`
1732 
1733   Level: intermediate
1734 
1735 .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1736           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1737 @*/
1738 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1739 {
1740   PC_ASM   *osm = (PC_ASM *)pc->data;
1741   PetscBool match;
1742 
1743   PetscFunctionBegin;
1744   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1745   PetscAssertPointer(flg, 2);
1746   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1747   if (match) *flg = osm->dm_subdomains;
1748   else *flg = PETSC_FALSE;
1749   PetscFunctionReturn(PETSC_SUCCESS);
1750 }
1751 
1752 /*@C
1753   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1754 
1755   Not Collective
1756 
1757   Input Parameter:
1758 . pc - the `PC`
1759 
1760   Output Parameter:
1761 . sub_mat_type - name of matrix type
1762 
1763   Level: advanced
1764 
1765 .seealso: [](ch_ksp), `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1766 @*/
1767 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1768 {
1769   PetscFunctionBegin;
1770   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1771   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1772   PetscFunctionReturn(PETSC_SUCCESS);
1773 }
1774 
1775 /*@C
1776   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1777 
1778   Collective
1779 
1780   Input Parameters:
1781 + pc           - the `PC` object
1782 - sub_mat_type - the `MatType`
1783 
1784   Options Database Key:
1785 . -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1786    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1787 
1788   Note:
1789   See `MatType` for available types
1790 
1791   Level: advanced
1792 
1793 .seealso: [](ch_ksp), `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1794 @*/
1795 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1796 {
1797   PetscFunctionBegin;
1798   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1799   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1800   PetscFunctionReturn(PETSC_SUCCESS);
1801 }
1802