xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision b498ca8a579f835524ffb6d2b93edfa02ef9f10c)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include "petsc/private/matimpl.h"
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         iascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(viewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(viewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
75   }
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc)
80 {
81   PC_ASM         *osm = (PC_ASM *)pc->data;
82   const char     *prefix;
83   char            fname[PETSC_MAX_PATH_LEN + 1];
84   PetscViewer     viewer, sviewer;
85   char           *s;
86   PetscInt        i, j, nidx;
87   const PetscInt *idx;
88   PetscMPIInt     rank, size;
89 
90   PetscFunctionBegin;
91   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
92   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
93   PetscCall(PCGetOptionsPrefix(pc, &prefix));
94   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
95   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
96   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
97   for (i = 0; i < osm->n_local; i++) {
98     if (i < osm->n_local_true) {
99       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
100       PetscCall(ISGetIndices(osm->is[i], &idx));
101       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
102 #define len 16 * (nidx + 1) + 512
103       PetscCall(PetscMalloc1(len, &s));
104       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
105 #undef len
106       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
107       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
108       PetscCall(ISRestoreIndices(osm->is[i], &idx));
109       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
110       PetscCall(PetscViewerDestroy(&sviewer));
111       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
112       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
113       PetscCall(PetscViewerFlush(viewer));
114       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
115       PetscCall(PetscFree(s));
116       if (osm->is_local) {
117         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
118 #define len 16 * (nidx + 1) + 512
119         PetscCall(PetscMalloc1(len, &s));
120         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
121 #undef len
122         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
123         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
124         PetscCall(ISGetIndices(osm->is_local[i], &idx));
125         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
126         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
127         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
128         PetscCall(PetscViewerDestroy(&sviewer));
129         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
130         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
131         PetscCall(PetscViewerFlush(viewer));
132         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
133         PetscCall(PetscFree(s));
134       }
135     } else {
136       /* Participate in collective viewer calls. */
137       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
138       PetscCall(PetscViewerFlush(viewer));
139       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
140       /* Assume either all ranks have is_local or none do. */
141       if (osm->is_local) {
142         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
143         PetscCall(PetscViewerFlush(viewer));
144         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
145       }
146     }
147   }
148   PetscCall(PetscViewerFlush(viewer));
149   PetscCall(PetscViewerDestroy(&viewer));
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode PCSetUp_ASM(PC pc)
154 {
155   PC_ASM     *osm = (PC_ASM *)pc->data;
156   PetscBool   flg;
157   PetscInt    i, m, m_local;
158   MatReuse    scall = MAT_REUSE_MATRIX;
159   IS          isl;
160   KSP         ksp;
161   PC          subpc;
162   const char *prefix, *pprefix;
163   Vec         vec;
164   DM         *domain_dm = NULL;
165 
166   PetscFunctionBegin;
167   if (!pc->setupcalled) {
168     PetscInt m;
169 
170     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
171     if (osm->n_local_true == PETSC_DECIDE) {
172       /* no subdomains given */
173       /* try pc->dm first, if allowed */
174       if (osm->dm_subdomains && pc->dm) {
175         PetscInt num_domains, d;
176         char   **domain_names;
177         IS      *inner_domain_is, *outer_domain_is;
178         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
179         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
180                               A future improvement of this code might allow one to use
181                               DM-defined subdomains and also increase the overlap,
182                               but that is not currently supported */
183         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
184         for (d = 0; d < num_domains; ++d) {
185           if (domain_names) PetscCall(PetscFree(domain_names[d]));
186           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
187           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
188         }
189         PetscCall(PetscFree(domain_names));
190         PetscCall(PetscFree(inner_domain_is));
191         PetscCall(PetscFree(outer_domain_is));
192       }
193       if (osm->n_local_true == PETSC_DECIDE) {
194         /* still no subdomains; use one subdomain per processor */
195         osm->n_local_true = 1;
196       }
197     }
198     { /* determine the global and max number of subdomains */
199       struct {
200         PetscInt max, sum;
201       } inwork, outwork;
202       PetscMPIInt size;
203 
204       inwork.max = osm->n_local_true;
205       inwork.sum = osm->n_local_true;
206       PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
207       osm->n_local = outwork.max;
208       osm->n       = outwork.sum;
209 
210       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
211       if (outwork.max == 1 && outwork.sum == size) {
212         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
213         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
214       }
215     }
216     if (!osm->is) { /* create the index sets */
217       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
218     }
219     if (osm->n_local_true > 1 && !osm->is_local) {
220       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
221       for (i = 0; i < osm->n_local_true; i++) {
222         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
223           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
224           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
225         } else {
226           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
227           osm->is_local[i] = osm->is[i];
228         }
229       }
230     }
231     PetscCall(PCGetOptionsPrefix(pc, &prefix));
232     if (osm->overlap > 0) {
233       /* Extend the "overlapping" regions by a number of steps */
234       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
235     }
236     if (osm->sort_indices) {
237       for (i = 0; i < osm->n_local_true; i++) {
238         PetscCall(ISSort(osm->is[i]));
239         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
240       }
241     }
242     flg = PETSC_FALSE;
243     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
244     if (flg) PetscCall(PCASMPrintSubdomains(pc));
245     if (!osm->ksp) {
246       /* Create the local solvers */
247       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
248       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
249       for (i = 0; i < osm->n_local_true; i++) {
250         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
251         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
252         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
253         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
254         PetscCall(KSPSetType(ksp, KSPPREONLY));
255         PetscCall(KSPGetPC(ksp, &subpc));
256         PetscCall(PCGetOptionsPrefix(pc, &prefix));
257         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
258         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
259         if (domain_dm) {
260           PetscCall(KSPSetDM(ksp, domain_dm[i]));
261           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
262           PetscCall(DMDestroy(&domain_dm[i]));
263         }
264         osm->ksp[i] = ksp;
265       }
266       if (domain_dm) PetscCall(PetscFree(domain_dm));
267     }
268 
269     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
270     PetscCall(ISSortRemoveDups(osm->lis));
271     PetscCall(ISGetLocalSize(osm->lis, &m));
272 
273     scall = MAT_INITIAL_MATRIX;
274   } else {
275     /*
276        Destroy the blocks from the previous iteration
277     */
278     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
279       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
280       scall = MAT_INITIAL_MATRIX;
281     }
282   }
283 
284   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
285   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
286     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
287     scall = MAT_INITIAL_MATRIX;
288   }
289 
290   /*
291      Extract out the submatrices
292   */
293   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
294   if (scall == MAT_INITIAL_MATRIX) {
295     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
296     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
297   }
298 
299   /* Convert the types of the submatrices (if needbe) */
300   if (osm->sub_mat_type) {
301     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &(osm->pmat[i])));
302   }
303 
304   if (!pc->setupcalled) {
305     VecType vtype;
306 
307     /* Create the local work vectors (from the local matrices) and scatter contexts */
308     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
309 
310     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
311     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
312     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
313     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
314     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
315 
316     PetscCall(ISGetLocalSize(osm->lis, &m));
317     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
318     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
319     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
320     PetscCall(VecSetSizes(osm->lx, m, m));
321     PetscCall(VecSetType(osm->lx, vtype));
322     PetscCall(VecDuplicate(osm->lx, &osm->ly));
323     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
324     PetscCall(ISDestroy(&isl));
325 
326     for (i = 0; i < osm->n_local_true; ++i) {
327       ISLocalToGlobalMapping ltog;
328       IS                     isll;
329       const PetscInt        *idx_is;
330       PetscInt              *idx_lis, nout;
331 
332       PetscCall(ISGetLocalSize(osm->is[i], &m));
333       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
334       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
335 
336       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
337       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
338       PetscCall(ISGetLocalSize(osm->is[i], &m));
339       PetscCall(ISGetIndices(osm->is[i], &idx_is));
340       PetscCall(PetscMalloc1(m, &idx_lis));
341       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
342       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
343       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
344       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
345       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
346       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
347       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
348       PetscCall(ISDestroy(&isll));
349       PetscCall(ISDestroy(&isl));
350       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
351         ISLocalToGlobalMapping ltog;
352         IS                     isll, isll_local;
353         const PetscInt        *idx_local;
354         PetscInt              *idx1, *idx2, nout;
355 
356         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
357         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
358 
359         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
360         PetscCall(PetscMalloc1(m_local, &idx1));
361         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
362         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
363         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
364         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
365 
366         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
367         PetscCall(PetscMalloc1(m_local, &idx2));
368         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
369         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
370         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
371         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
372 
373         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
374         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
375 
376         PetscCall(ISDestroy(&isll));
377         PetscCall(ISDestroy(&isll_local));
378       }
379     }
380     PetscCall(VecDestroy(&vec));
381   }
382 
383   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
384     IS      *cis;
385     PetscInt c;
386 
387     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
388     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
389     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
390     PetscCall(PetscFree(cis));
391   }
392 
393   /* Return control to the user so that the submatrices can be modified (e.g., to apply
394      different boundary conditions for the submatrices than for the global problem) */
395   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
396 
397   /*
398      Loop over subdomains putting them into local ksp
399   */
400   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
401   for (i = 0; i < osm->n_local_true; i++) {
402     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
403     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
404     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
405   }
406   PetscFunctionReturn(PETSC_SUCCESS);
407 }
408 
409 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
410 {
411   PC_ASM            *osm = (PC_ASM *)pc->data;
412   PetscInt           i;
413   KSPConvergedReason reason;
414 
415   PetscFunctionBegin;
416   for (i = 0; i < osm->n_local_true; i++) {
417     PetscCall(KSPSetUp(osm->ksp[i]));
418     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
419     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
420   }
421   PetscFunctionReturn(PETSC_SUCCESS);
422 }
423 
424 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
425 {
426   PC_ASM     *osm = (PC_ASM *)pc->data;
427   PetscInt    i, n_local_true = osm->n_local_true;
428   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
429 
430   PetscFunctionBegin;
431   /*
432      support for limiting the restriction or interpolation to only local
433      subdomain values (leaving the other values 0).
434   */
435   if (!(osm->type & PC_ASM_RESTRICT)) {
436     forward = SCATTER_FORWARD_LOCAL;
437     /* have to zero the work RHS since scatter may leave some slots empty */
438     PetscCall(VecSet(osm->lx, 0.0));
439   }
440   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
441 
442   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
443   /* zero the global and the local solutions */
444   PetscCall(VecSet(y, 0.0));
445   PetscCall(VecSet(osm->ly, 0.0));
446 
447   /* copy the global RHS to local RHS including the ghost nodes */
448   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
449   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
450 
451   /* restrict local RHS to the overlapping 0-block RHS */
452   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
453   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
454 
455   /* do the local solves */
456   for (i = 0; i < n_local_true; ++i) {
457     /* solve the overlapping i-block */
458     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
459     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
460     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
461     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
462 
463     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
464       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
465       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
466     } else { /* interpolate the overlapping i-block solution to the local solution */
467       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
468       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
469     }
470 
471     if (i < n_local_true - 1) {
472       /* restrict local RHS to the overlapping (i+1)-block RHS */
473       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
474       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
475 
476       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
477         /* update the overlapping (i+1)-block RHS using the current local solution */
478         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
479         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
480       }
481     }
482   }
483   /* add the local solution to the global solution including the ghost nodes */
484   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
485   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
486   PetscFunctionReturn(PETSC_SUCCESS);
487 }
488 
489 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
490 {
491   PC_ASM     *osm = (PC_ASM *)pc->data;
492   Mat         Z, W;
493   Vec         x;
494   PetscInt    i, m, N;
495   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
496 
497   PetscFunctionBegin;
498   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
499   /*
500      support for limiting the restriction or interpolation to only local
501      subdomain values (leaving the other values 0).
502   */
503   if (!(osm->type & PC_ASM_RESTRICT)) {
504     forward = SCATTER_FORWARD_LOCAL;
505     /* have to zero the work RHS since scatter may leave some slots empty */
506     PetscCall(VecSet(osm->lx, 0.0));
507   }
508   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
509   PetscCall(VecGetLocalSize(osm->x[0], &m));
510   PetscCall(MatGetSize(X, NULL, &N));
511   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
512 
513   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
514   /* zero the global and the local solutions */
515   PetscCall(MatZeroEntries(Y));
516   PetscCall(VecSet(osm->ly, 0.0));
517 
518   for (i = 0; i < N; ++i) {
519     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
520     /* copy the global RHS to local RHS including the ghost nodes */
521     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
522     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
523     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
524 
525     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
526     /* restrict local RHS to the overlapping 0-block RHS */
527     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
528     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
529     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
530   }
531   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
532   /* solve the overlapping 0-block */
533   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
534   PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
535   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
536   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
537   PetscCall(MatDestroy(&Z));
538 
539   for (i = 0; i < N; ++i) {
540     PetscCall(VecSet(osm->ly, 0.0));
541     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
542     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
543       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
544       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
545     } else { /* interpolate the overlapping 0-block solution to the local solution */
546       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
547       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
548     }
549     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
550 
551     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
552     /* add the local solution to the global solution including the ghost nodes */
553     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
554     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
555     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
556   }
557   PetscCall(MatDestroy(&W));
558   PetscFunctionReturn(PETSC_SUCCESS);
559 }
560 
561 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
562 {
563   PC_ASM     *osm = (PC_ASM *)pc->data;
564   PetscInt    i, n_local_true = osm->n_local_true;
565   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
566 
567   PetscFunctionBegin;
568   /*
569      Support for limiting the restriction or interpolation to only local
570      subdomain values (leaving the other values 0).
571 
572      Note: these are reversed from the PCApply_ASM() because we are applying the
573      transpose of the three terms
574   */
575 
576   if (!(osm->type & PC_ASM_INTERPOLATE)) {
577     forward = SCATTER_FORWARD_LOCAL;
578     /* have to zero the work RHS since scatter may leave some slots empty */
579     PetscCall(VecSet(osm->lx, 0.0));
580   }
581   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
582 
583   /* zero the global and the local solutions */
584   PetscCall(VecSet(y, 0.0));
585   PetscCall(VecSet(osm->ly, 0.0));
586 
587   /* Copy the global RHS to local RHS including the ghost nodes */
588   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
589   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
590 
591   /* Restrict local RHS to the overlapping 0-block RHS */
592   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
593   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
594 
595   /* do the local solves */
596   for (i = 0; i < n_local_true; ++i) {
597     /* solve the overlapping i-block */
598     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
599     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
600     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
601     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
602 
603     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
604       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
605       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
606     } else { /* interpolate the overlapping i-block solution to the local solution */
607       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
608       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
609     }
610 
611     if (i < n_local_true - 1) {
612       /* Restrict local RHS to the overlapping (i+1)-block RHS */
613       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
614       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
615     }
616   }
617   /* Add the local solution to the global solution including the ghost nodes */
618   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
619   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
620   PetscFunctionReturn(PETSC_SUCCESS);
621 }
622 
623 static PetscErrorCode PCReset_ASM(PC pc)
624 {
625   PC_ASM  *osm = (PC_ASM *)pc->data;
626   PetscInt i;
627 
628   PetscFunctionBegin;
629   if (osm->ksp) {
630     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
631   }
632   if (osm->pmat) {
633     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
634   }
635   if (osm->lrestriction) {
636     PetscCall(VecScatterDestroy(&osm->restriction));
637     for (i = 0; i < osm->n_local_true; i++) {
638       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
639       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
640       PetscCall(VecDestroy(&osm->x[i]));
641       PetscCall(VecDestroy(&osm->y[i]));
642     }
643     PetscCall(PetscFree(osm->lrestriction));
644     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
645     PetscCall(PetscFree(osm->x));
646     PetscCall(PetscFree(osm->y));
647   }
648   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
649   PetscCall(ISDestroy(&osm->lis));
650   PetscCall(VecDestroy(&osm->lx));
651   PetscCall(VecDestroy(&osm->ly));
652   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
653 
654   PetscCall(PetscFree(osm->sub_mat_type));
655 
656   osm->is       = NULL;
657   osm->is_local = NULL;
658   PetscFunctionReturn(PETSC_SUCCESS);
659 }
660 
661 static PetscErrorCode PCDestroy_ASM(PC pc)
662 {
663   PC_ASM  *osm = (PC_ASM *)pc->data;
664   PetscInt i;
665 
666   PetscFunctionBegin;
667   PetscCall(PCReset_ASM(pc));
668   if (osm->ksp) {
669     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
670     PetscCall(PetscFree(osm->ksp));
671   }
672   PetscCall(PetscFree(pc->data));
673 
674   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
675   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
676   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
677   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
685   PetscFunctionReturn(PETSC_SUCCESS);
686 }
687 
688 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject)
689 {
690   PC_ASM         *osm = (PC_ASM *)pc->data;
691   PetscInt        blocks, ovl;
692   PetscBool       flg;
693   PCASMType       asmtype;
694   PCCompositeType loctype;
695   char            sub_mat_type[256];
696 
697   PetscFunctionBegin;
698   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
699   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
700   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
701   if (flg) {
702     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
703     osm->dm_subdomains = PETSC_FALSE;
704   }
705   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
706   if (flg) {
707     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
708     osm->dm_subdomains = PETSC_FALSE;
709   }
710   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
711   if (flg) {
712     PetscCall(PCASMSetOverlap(pc, ovl));
713     osm->dm_subdomains = PETSC_FALSE;
714   }
715   flg = PETSC_FALSE;
716   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
717   if (flg) PetscCall(PCASMSetType(pc, asmtype));
718   flg = PETSC_FALSE;
719   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
720   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
721   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
722   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
723   PetscOptionsHeadEnd();
724   PetscFunctionReturn(PETSC_SUCCESS);
725 }
726 
727 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
728 {
729   PC_ASM  *osm = (PC_ASM *)pc->data;
730   PetscInt i;
731 
732   PetscFunctionBegin;
733   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
734   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
735 
736   if (!pc->setupcalled) {
737     if (is) {
738       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
739     }
740     if (is_local) {
741       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
742     }
743     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
744 
745     if (osm->ksp && osm->n_local_true != n) {
746       for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
747       PetscCall(PetscFree(osm->ksp));
748     }
749 
750     osm->n_local_true = n;
751     osm->is           = NULL;
752     osm->is_local     = NULL;
753     if (is) {
754       PetscCall(PetscMalloc1(n, &osm->is));
755       for (i = 0; i < n; i++) osm->is[i] = is[i];
756       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
757       osm->overlap = -1;
758     }
759     if (is_local) {
760       PetscCall(PetscMalloc1(n, &osm->is_local));
761       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
762       if (!is) {
763         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
764         for (i = 0; i < osm->n_local_true; i++) {
765           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
766             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
767             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
768           } else {
769             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
770             osm->is[i] = osm->is_local[i];
771           }
772         }
773       }
774     }
775   }
776   PetscFunctionReturn(PETSC_SUCCESS);
777 }
778 
779 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
780 {
781   PC_ASM     *osm = (PC_ASM *)pc->data;
782   PetscMPIInt rank, size;
783   PetscInt    n;
784 
785   PetscFunctionBegin;
786   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
787   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
788 
789   /*
790      Split the subdomains equally among all processors
791   */
792   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
793   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
794   n = N / size + ((N % size) > rank);
795   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
796   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
797   if (!pc->setupcalled) {
798     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
799 
800     osm->n_local_true = n;
801     osm->is           = NULL;
802     osm->is_local     = NULL;
803   }
804   PetscFunctionReturn(PETSC_SUCCESS);
805 }
806 
807 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
808 {
809   PC_ASM *osm = (PC_ASM *)pc->data;
810 
811   PetscFunctionBegin;
812   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
813   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
814   if (!pc->setupcalled) osm->overlap = ovl;
815   PetscFunctionReturn(PETSC_SUCCESS);
816 }
817 
818 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
819 {
820   PC_ASM *osm = (PC_ASM *)pc->data;
821 
822   PetscFunctionBegin;
823   osm->type     = type;
824   osm->type_set = PETSC_TRUE;
825   PetscFunctionReturn(PETSC_SUCCESS);
826 }
827 
828 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
829 {
830   PC_ASM *osm = (PC_ASM *)pc->data;
831 
832   PetscFunctionBegin;
833   *type = osm->type;
834   PetscFunctionReturn(PETSC_SUCCESS);
835 }
836 
837 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
838 {
839   PC_ASM *osm = (PC_ASM *)pc->data;
840 
841   PetscFunctionBegin;
842   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
843   osm->loctype = type;
844   PetscFunctionReturn(PETSC_SUCCESS);
845 }
846 
847 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
848 {
849   PC_ASM *osm = (PC_ASM *)pc->data;
850 
851   PetscFunctionBegin;
852   *type = osm->loctype;
853   PetscFunctionReturn(PETSC_SUCCESS);
854 }
855 
856 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
857 {
858   PC_ASM *osm = (PC_ASM *)pc->data;
859 
860   PetscFunctionBegin;
861   osm->sort_indices = doSort;
862   PetscFunctionReturn(PETSC_SUCCESS);
863 }
864 
865 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
866 {
867   PC_ASM *osm = (PC_ASM *)pc->data;
868 
869   PetscFunctionBegin;
870   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
871 
872   if (n_local) *n_local = osm->n_local_true;
873   if (first_local) {
874     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
875     *first_local -= osm->n_local_true;
876   }
877   if (ksp) *ksp = osm->ksp;
878   PetscFunctionReturn(PETSC_SUCCESS);
879 }
880 
881 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
882 {
883   PC_ASM *osm = (PC_ASM *)pc->data;
884 
885   PetscFunctionBegin;
886   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
887   PetscAssertPointer(sub_mat_type, 2);
888   *sub_mat_type = osm->sub_mat_type;
889   PetscFunctionReturn(PETSC_SUCCESS);
890 }
891 
892 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
893 {
894   PC_ASM *osm = (PC_ASM *)pc->data;
895 
896   PetscFunctionBegin;
897   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
898   PetscCall(PetscFree(osm->sub_mat_type));
899   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
900   PetscFunctionReturn(PETSC_SUCCESS);
901 }
902 
903 /*@C
904   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
905 
906   Collective
907 
908   Input Parameters:
909 + pc       - the preconditioner context
910 . n        - the number of subdomains for this processor (default value = 1)
911 . is       - the index set that defines the subdomains for this processor
912          (or `NULL` for PETSc to determine subdomains)
913 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
914          (or `NULL` to not provide these)
915 
916   Options Database Key:
917 . -pc_asm_local_blocks <blks> - Sets number of local blocks
918 
919   Level: advanced
920 
921   Notes:
922   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
923 
924   By default the `PCASM` preconditioner uses 1 block per processor.
925 
926   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
927 
928   If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated
929   back to form the global solution (this is the standard restricted additive Schwarz method)
930 
931   If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
932   no code to handle that case.
933 
934 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
935           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
936 @*/
937 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
938 {
939   PetscFunctionBegin;
940   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
941   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
942   PetscFunctionReturn(PETSC_SUCCESS);
943 }
944 
945 /*@C
946   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
947   additive Schwarz preconditioner, `PCASM`.
948 
949   Collective, all MPI ranks must pass in the same array of `IS`
950 
951   Input Parameters:
952 + pc       - the preconditioner context
953 . N        - the number of subdomains for all processors
954 . is       - the index sets that define the subdomains for all processors
955          (or `NULL` to ask PETSc to determine the subdomains)
956 - is_local - the index sets that define the local part of the subdomains for this processor
957          (or `NULL` to not provide this information)
958 
959   Options Database Key:
960 . -pc_asm_blocks <blks> - Sets total blocks
961 
962   Level: advanced
963 
964   Notes:
965   Currently you cannot use this to set the actual subdomains with the argument is or is_local.
966 
967   By default the `PCASM` preconditioner uses 1 block per processor.
968 
969   These index sets cannot be destroyed until after completion of the
970   linear solves for which the `PCASM` preconditioner is being used.
971 
972   Use `PCASMSetLocalSubdomains()` to set local subdomains.
973 
974   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
975 
976 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
977           `PCASMCreateSubdomains2D()`, `PCGASM`
978 @*/
979 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
980 {
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
983   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
984   PetscFunctionReturn(PETSC_SUCCESS);
985 }
986 
987 /*@
988   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
989   additive Schwarz preconditioner, `PCASM`.
990 
991   Logically Collective
992 
993   Input Parameters:
994 + pc  - the preconditioner context
995 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
996 
997   Options Database Key:
998 . -pc_asm_overlap <ovl> - Sets overlap
999 
1000   Level: intermediate
1001 
1002   Notes:
1003   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1004   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1005   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1006 
1007   The overlap defaults to 1, so if one desires that no additional
1008   overlap be computed beyond what may have been set with a call to
1009   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1010   must be set to be 0.  In particular, if one does not explicitly set
1011   the subdomains an application code, then all overlap would be computed
1012   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1013   variant that is equivalent to the block Jacobi preconditioner.
1014 
1015   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1016   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1017 
1018   One can define initial index sets with any overlap via
1019   `PCASMSetLocalSubdomains()`; the routine
1020   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1021   if desired.
1022 
1023 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1024           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1025 @*/
1026 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1027 {
1028   PetscFunctionBegin;
1029   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1030   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1031   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1032   PetscFunctionReturn(PETSC_SUCCESS);
1033 }
1034 
1035 /*@
1036   PCASMSetType - Sets the type of restriction and interpolation used
1037   for local problems in the additive Schwarz method, `PCASM`.
1038 
1039   Logically Collective
1040 
1041   Input Parameters:
1042 + pc   - the preconditioner context
1043 - type - variant of `PCASM`, one of
1044 .vb
1045       PC_ASM_BASIC       - full interpolation and restriction
1046       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1047       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1048       PC_ASM_NONE        - local processor restriction and interpolation
1049 .ve
1050 
1051   Options Database Key:
1052 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1053 
1054   Level: intermediate
1055 
1056   Note:
1057   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1058   to limit the local processor interpolation
1059 
1060 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1061           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1062 @*/
1063 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1064 {
1065   PetscFunctionBegin;
1066   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1067   PetscValidLogicalCollectiveEnum(pc, type, 2);
1068   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1069   PetscFunctionReturn(PETSC_SUCCESS);
1070 }
1071 
1072 /*@
1073   PCASMGetType - Gets the type of restriction and interpolation used
1074   for local problems in the additive Schwarz method, `PCASM`.
1075 
1076   Logically Collective
1077 
1078   Input Parameter:
1079 . pc - the preconditioner context
1080 
1081   Output Parameter:
1082 . type - variant of `PCASM`, one of
1083 .vb
1084       PC_ASM_BASIC       - full interpolation and restriction
1085       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1086       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1087       PC_ASM_NONE        - local processor restriction and interpolation
1088 .ve
1089 
1090   Options Database Key:
1091 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1092 
1093   Level: intermediate
1094 
1095 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1096           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1097 @*/
1098 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1099 {
1100   PetscFunctionBegin;
1101   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1102   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1103   PetscFunctionReturn(PETSC_SUCCESS);
1104 }
1105 
1106 /*@
1107   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1108 
1109   Logically Collective
1110 
1111   Input Parameters:
1112 + pc   - the preconditioner context
1113 - type - type of composition, one of
1114 .vb
1115   PC_COMPOSITE_ADDITIVE       - local additive combination
1116   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1117 .ve
1118 
1119   Options Database Key:
1120 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1121 
1122   Level: intermediate
1123 
1124 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1125 @*/
1126 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1127 {
1128   PetscFunctionBegin;
1129   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1130   PetscValidLogicalCollectiveEnum(pc, type, 2);
1131   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1132   PetscFunctionReturn(PETSC_SUCCESS);
1133 }
1134 
1135 /*@
1136   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1137 
1138   Logically Collective
1139 
1140   Input Parameter:
1141 . pc - the preconditioner context
1142 
1143   Output Parameter:
1144 . type - type of composition, one of
1145 .vb
1146   PC_COMPOSITE_ADDITIVE       - local additive combination
1147   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1148 .ve
1149 
1150   Options Database Key:
1151 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1152 
1153   Level: intermediate
1154 
1155 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCCompositeType`
1156 @*/
1157 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1158 {
1159   PetscFunctionBegin;
1160   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1161   PetscAssertPointer(type, 2);
1162   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1163   PetscFunctionReturn(PETSC_SUCCESS);
1164 }
1165 
1166 /*@
1167   PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1168 
1169   Logically Collective
1170 
1171   Input Parameters:
1172 + pc     - the preconditioner context
1173 - doSort - sort the subdomain indices
1174 
1175   Level: intermediate
1176 
1177 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1178           `PCASMCreateSubdomains2D()`
1179 @*/
1180 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1181 {
1182   PetscFunctionBegin;
1183   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1184   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1185   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1186   PetscFunctionReturn(PETSC_SUCCESS);
1187 }
1188 
1189 /*@C
1190   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1191   this processor.
1192 
1193   Collective iff first_local is requested
1194 
1195   Input Parameter:
1196 . pc - the preconditioner context
1197 
1198   Output Parameters:
1199 + n_local     - the number of blocks on this processor or NULL
1200 . first_local - the global number of the first block on this processor or NULL,
1201                  all processors must request or all must pass NULL
1202 - ksp         - the array of `KSP` contexts
1203 
1204   Level: advanced
1205 
1206   Notes:
1207   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1208 
1209   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1210 
1211   Fortran Notes:
1212   The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.
1213 
1214 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1215           `PCASMCreateSubdomains2D()`,
1216 @*/
1217 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1218 {
1219   PetscFunctionBegin;
1220   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1221   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1222   PetscFunctionReturn(PETSC_SUCCESS);
1223 }
1224 
1225 /*MC
1226    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1227            its own `KSP` object.
1228 
1229    Options Database Keys:
1230 +  -pc_asm_blocks <blks> - Sets total blocks. Defaults to one block per MPI rank.
1231 .  -pc_asm_overlap <ovl> - Sets overlap
1232 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1233 -  -pc_asm_local_type [additive, multiplicative] - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1234 
1235    Level: beginner
1236 
1237    Notes:
1238    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1239    will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1240     -pc_asm_type basic to get the same convergence behavior
1241 
1242    Each processor can have one or more blocks, but a block cannot be shared by more
1243    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1244 
1245    To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC`
1246    options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1247 
1248    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1249    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1250 
1251     References:
1252 +   * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1253      Courant Institute, New York University Technical report
1254 -   * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1255     Cambridge University Press.
1256 
1257 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1258           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1259           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1260 M*/
1261 
1262 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1263 {
1264   PC_ASM *osm;
1265 
1266   PetscFunctionBegin;
1267   PetscCall(PetscNew(&osm));
1268 
1269   osm->n             = PETSC_DECIDE;
1270   osm->n_local       = 0;
1271   osm->n_local_true  = PETSC_DECIDE;
1272   osm->overlap       = 1;
1273   osm->ksp           = NULL;
1274   osm->restriction   = NULL;
1275   osm->lprolongation = NULL;
1276   osm->lrestriction  = NULL;
1277   osm->x             = NULL;
1278   osm->y             = NULL;
1279   osm->is            = NULL;
1280   osm->is_local      = NULL;
1281   osm->mat           = NULL;
1282   osm->pmat          = NULL;
1283   osm->type          = PC_ASM_RESTRICT;
1284   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1285   osm->sort_indices  = PETSC_TRUE;
1286   osm->dm_subdomains = PETSC_FALSE;
1287   osm->sub_mat_type  = NULL;
1288 
1289   pc->data                 = (void *)osm;
1290   pc->ops->apply           = PCApply_ASM;
1291   pc->ops->matapply        = PCMatApply_ASM;
1292   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1293   pc->ops->setup           = PCSetUp_ASM;
1294   pc->ops->reset           = PCReset_ASM;
1295   pc->ops->destroy         = PCDestroy_ASM;
1296   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1297   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1298   pc->ops->view            = PCView_ASM;
1299   pc->ops->applyrichardson = NULL;
1300 
1301   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1310   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1311   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1312   PetscFunctionReturn(PETSC_SUCCESS);
1313 }
1314 
1315 /*@C
1316   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1317   preconditioner, `PCASM`,  for any problem on a general grid.
1318 
1319   Collective
1320 
1321   Input Parameters:
1322 + A - The global matrix operator
1323 - n - the number of local blocks
1324 
1325   Output Parameter:
1326 . outis - the array of index sets defining the subdomains
1327 
1328   Level: advanced
1329 
1330   Note:
1331   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1332   from these if you use `PCASMSetLocalSubdomains()`
1333 
1334   Fortran Notes:
1335   You must provide the array outis[] already allocated of length n.
1336 
1337 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1338 @*/
1339 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1340 {
1341   MatPartitioning mpart;
1342   const char     *prefix;
1343   PetscInt        i, j, rstart, rend, bs;
1344   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1345   Mat             Ad = NULL, adj;
1346   IS              ispart, isnumb, *is;
1347 
1348   PetscFunctionBegin;
1349   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1350   PetscAssertPointer(outis, 3);
1351   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1352 
1353   /* Get prefix, row distribution, and block size */
1354   PetscCall(MatGetOptionsPrefix(A, &prefix));
1355   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1356   PetscCall(MatGetBlockSize(A, &bs));
1357   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1358 
1359   /* Get diagonal block from matrix if possible */
1360   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1361   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1362   if (Ad) {
1363     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1364     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1365   }
1366   if (Ad && n > 1) {
1367     PetscBool match, done;
1368     /* Try to setup a good matrix partitioning if available */
1369     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1370     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1371     PetscCall(MatPartitioningSetFromOptions(mpart));
1372     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1373     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1374     if (!match) { /* assume a "good" partitioner is available */
1375       PetscInt        na;
1376       const PetscInt *ia, *ja;
1377       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1378       if (done) {
1379         /* Build adjacency matrix by hand. Unfortunately a call to
1380            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1381            remove the block-aij structure and we cannot expect
1382            MatPartitioning to split vertices as we need */
1383         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1384         const PetscInt *row;
1385         nnz = 0;
1386         for (i = 0; i < na; i++) { /* count number of nonzeros */
1387           len = ia[i + 1] - ia[i];
1388           row = ja + ia[i];
1389           for (j = 0; j < len; j++) {
1390             if (row[j] == i) { /* don't count diagonal */
1391               len--;
1392               break;
1393             }
1394           }
1395           nnz += len;
1396         }
1397         PetscCall(PetscMalloc1(na + 1, &iia));
1398         PetscCall(PetscMalloc1(nnz, &jja));
1399         nnz    = 0;
1400         iia[0] = 0;
1401         for (i = 0; i < na; i++) { /* fill adjacency */
1402           cnt = 0;
1403           len = ia[i + 1] - ia[i];
1404           row = ja + ia[i];
1405           for (j = 0; j < len; j++) {
1406             if (row[j] != i) { /* if not diagonal */
1407               jja[nnz + cnt++] = row[j];
1408             }
1409           }
1410           nnz += cnt;
1411           iia[i + 1] = nnz;
1412         }
1413         /* Partitioning of the adjacency matrix */
1414         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1415         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1416         PetscCall(MatPartitioningSetNParts(mpart, n));
1417         PetscCall(MatPartitioningApply(mpart, &ispart));
1418         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1419         PetscCall(MatDestroy(&adj));
1420         foundpart = PETSC_TRUE;
1421       }
1422       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1423     }
1424     PetscCall(MatPartitioningDestroy(&mpart));
1425   }
1426 
1427   PetscCall(PetscMalloc1(n, &is));
1428   *outis = is;
1429 
1430   if (!foundpart) {
1431     /* Partitioning by contiguous chunks of rows */
1432 
1433     PetscInt mbs   = (rend - rstart) / bs;
1434     PetscInt start = rstart;
1435     for (i = 0; i < n; i++) {
1436       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1437       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1438       start += count;
1439     }
1440 
1441   } else {
1442     /* Partitioning by adjacency of diagonal block  */
1443 
1444     const PetscInt *numbering;
1445     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1446     /* Get node count in each partition */
1447     PetscCall(PetscMalloc1(n, &count));
1448     PetscCall(ISPartitioningCount(ispart, n, count));
1449     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1450       for (i = 0; i < n; i++) count[i] *= bs;
1451     }
1452     /* Build indices from node numbering */
1453     PetscCall(ISGetLocalSize(isnumb, &nidx));
1454     PetscCall(PetscMalloc1(nidx, &indices));
1455     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1456     PetscCall(ISGetIndices(isnumb, &numbering));
1457     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1458     PetscCall(ISRestoreIndices(isnumb, &numbering));
1459     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1460       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1461       for (i = 0; i < nidx; i++) {
1462         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1463       }
1464       PetscCall(PetscFree(indices));
1465       nidx *= bs;
1466       indices = newidx;
1467     }
1468     /* Shift to get global indices */
1469     for (i = 0; i < nidx; i++) indices[i] += rstart;
1470 
1471     /* Build the index sets for each block */
1472     for (i = 0; i < n; i++) {
1473       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1474       PetscCall(ISSort(is[i]));
1475       start += count[i];
1476     }
1477 
1478     PetscCall(PetscFree(count));
1479     PetscCall(PetscFree(indices));
1480     PetscCall(ISDestroy(&isnumb));
1481     PetscCall(ISDestroy(&ispart));
1482   }
1483   PetscFunctionReturn(PETSC_SUCCESS);
1484 }
1485 
1486 /*@C
1487   PCASMDestroySubdomains - Destroys the index sets created with
1488   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1489 
1490   Collective
1491 
1492   Input Parameters:
1493 + n        - the number of index sets
1494 . is       - the array of index sets
1495 - is_local - the array of local index sets, can be `NULL`
1496 
1497   Level: advanced
1498 
1499 .seealso: `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1500 @*/
1501 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1502 {
1503   PetscInt i;
1504 
1505   PetscFunctionBegin;
1506   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1507   if (is) {
1508     PetscAssertPointer(is, 2);
1509     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1510     PetscCall(PetscFree(is));
1511   }
1512   if (is_local) {
1513     PetscAssertPointer(is_local, 3);
1514     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1515     PetscCall(PetscFree(is_local));
1516   }
1517   PetscFunctionReturn(PETSC_SUCCESS);
1518 }
1519 
1520 /*@C
1521   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1522   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1523 
1524   Not Collective
1525 
1526   Input Parameters:
1527 + m       - the number of mesh points in the x direction
1528 . n       - the number of mesh points in the y direction
1529 . M       - the number of subdomains in the x direction
1530 . N       - the number of subdomains in the y direction
1531 . dof     - degrees of freedom per node
1532 - overlap - overlap in mesh lines
1533 
1534   Output Parameters:
1535 + Nsub     - the number of subdomains created
1536 . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1537 - is_local - array of index sets defining non-overlapping subdomains
1538 
1539   Level: advanced
1540 
1541   Note:
1542   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1543   preconditioners.  More general related routines are
1544   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1545 
1546   Fortran Notes:
1547   The `IS` must be declared as an array of length long enough to hold `Nsub` entries
1548 
1549 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1550           `PCASMSetOverlap()`
1551 @*/
1552 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local)
1553 {
1554   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1555   PetscInt nidx, *idx, loc, ii, jj, count;
1556 
1557   PetscFunctionBegin;
1558   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1559 
1560   *Nsub = N * M;
1561   PetscCall(PetscMalloc1(*Nsub, is));
1562   PetscCall(PetscMalloc1(*Nsub, is_local));
1563   ystart    = 0;
1564   loc_outer = 0;
1565   for (i = 0; i < N; i++) {
1566     height = n / N + ((n % N) > i); /* height of subdomain */
1567     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1568     yleft = ystart - overlap;
1569     if (yleft < 0) yleft = 0;
1570     yright = ystart + height + overlap;
1571     if (yright > n) yright = n;
1572     xstart = 0;
1573     for (j = 0; j < M; j++) {
1574       width = m / M + ((m % M) > j); /* width of subdomain */
1575       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1576       xleft = xstart - overlap;
1577       if (xleft < 0) xleft = 0;
1578       xright = xstart + width + overlap;
1579       if (xright > m) xright = m;
1580       nidx = (xright - xleft) * (yright - yleft);
1581       PetscCall(PetscMalloc1(nidx, &idx));
1582       loc = 0;
1583       for (ii = yleft; ii < yright; ii++) {
1584         count = m * ii + xleft;
1585         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1586       }
1587       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1588       if (overlap == 0) {
1589         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1590 
1591         (*is_local)[loc_outer] = (*is)[loc_outer];
1592       } else {
1593         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1594           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1595         }
1596         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1597       }
1598       PetscCall(PetscFree(idx));
1599       xstart += width;
1600       loc_outer++;
1601     }
1602     ystart += height;
1603   }
1604   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1605   PetscFunctionReturn(PETSC_SUCCESS);
1606 }
1607 
1608 /*@C
1609   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1610   only) for the additive Schwarz preconditioner, `PCASM`.
1611 
1612   Not Collective
1613 
1614   Input Parameter:
1615 . pc - the preconditioner context
1616 
1617   Output Parameters:
1618 + n        - if requested, the number of subdomains for this processor (default value = 1)
1619 . is       - if requested, the index sets that define the subdomains for this processor
1620 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)
1621 
1622   Level: advanced
1623 
1624   Note:
1625   The `IS` numbering is in the parallel, global numbering of the vector.
1626 
1627 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1628           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1629 @*/
1630 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1631 {
1632   PC_ASM   *osm = (PC_ASM *)pc->data;
1633   PetscBool match;
1634 
1635   PetscFunctionBegin;
1636   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1637   if (n) PetscAssertPointer(n, 2);
1638   if (is) PetscAssertPointer(is, 3);
1639   if (is_local) PetscAssertPointer(is_local, 4);
1640   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1641   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1642   if (n) *n = osm->n_local_true;
1643   if (is) *is = osm->is;
1644   if (is_local) *is_local = osm->is_local;
1645   PetscFunctionReturn(PETSC_SUCCESS);
1646 }
1647 
1648 /*@C
1649   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1650   only) for the additive Schwarz preconditioner, `PCASM`.
1651 
1652   Not Collective
1653 
1654   Input Parameter:
1655 . pc - the preconditioner context
1656 
1657   Output Parameters:
1658 + n   - if requested, the number of matrices for this processor (default value = 1)
1659 - mat - if requested, the matrices
1660 
1661   Level: advanced
1662 
1663   Notes:
1664   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1665 
1666   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1667 
1668 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1669           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1670 @*/
1671 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1672 {
1673   PC_ASM   *osm;
1674   PetscBool match;
1675 
1676   PetscFunctionBegin;
1677   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1678   if (n) PetscAssertPointer(n, 2);
1679   if (mat) PetscAssertPointer(mat, 3);
1680   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1681   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1682   if (!match) {
1683     if (n) *n = 0;
1684     if (mat) *mat = NULL;
1685   } else {
1686     osm = (PC_ASM *)pc->data;
1687     if (n) *n = osm->n_local_true;
1688     if (mat) *mat = osm->pmat;
1689   }
1690   PetscFunctionReturn(PETSC_SUCCESS);
1691 }
1692 
1693 /*@
1694   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1695 
1696   Logically Collective
1697 
1698   Input Parameters:
1699 + pc  - the preconditioner
1700 - flg - boolean indicating whether to use subdomains defined by the `DM`
1701 
1702   Options Database Key:
1703 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM`
1704 
1705   Level: intermediate
1706 
1707   Note:
1708   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1709   so setting either of the first two effectively turns the latter off.
1710 
1711 .seealso: `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1712           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1713 @*/
1714 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1715 {
1716   PC_ASM   *osm = (PC_ASM *)pc->data;
1717   PetscBool match;
1718 
1719   PetscFunctionBegin;
1720   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1721   PetscValidLogicalCollectiveBool(pc, flg, 2);
1722   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1723   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1724   if (match) osm->dm_subdomains = flg;
1725   PetscFunctionReturn(PETSC_SUCCESS);
1726 }
1727 
1728 /*@
1729   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1730 
1731   Not Collective
1732 
1733   Input Parameter:
1734 . pc - the preconditioner
1735 
1736   Output Parameter:
1737 . flg - boolean indicating whether to use subdomains defined by the `DM`
1738 
1739   Level: intermediate
1740 
1741 .seealso: `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1742           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1743 @*/
1744 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1745 {
1746   PC_ASM   *osm = (PC_ASM *)pc->data;
1747   PetscBool match;
1748 
1749   PetscFunctionBegin;
1750   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1751   PetscAssertPointer(flg, 2);
1752   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1753   if (match) *flg = osm->dm_subdomains;
1754   else *flg = PETSC_FALSE;
1755   PetscFunctionReturn(PETSC_SUCCESS);
1756 }
1757 
1758 /*@C
1759   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1760 
1761   Not Collective
1762 
1763   Input Parameter:
1764 . pc - the `PC`
1765 
1766   Output Parameter:
1767 . sub_mat_type - name of matrix type
1768 
1769   Level: advanced
1770 
1771 .seealso: `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1772 @*/
1773 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1774 {
1775   PetscFunctionBegin;
1776   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1777   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1778   PetscFunctionReturn(PETSC_SUCCESS);
1779 }
1780 
1781 /*@C
1782   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1783 
1784   Collective
1785 
1786   Input Parameters:
1787 + pc           - the `PC` object
1788 - sub_mat_type - the `MatType`
1789 
1790   Options Database Key:
1791 . -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1792    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1793 
1794   Note:
1795   See `MatType` for available types
1796 
1797   Level: advanced
1798 
1799 .seealso: `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1800 @*/
1801 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1802 {
1803   PetscFunctionBegin;
1804   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1805   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1806   PetscFunctionReturn(PETSC_SUCCESS);
1807 }
1808