xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision e0f5bfbec699682fa3e8b8532b1176849ea4e12a)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include "petsc/private/matimpl.h"
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer) {
17   PC_ASM           *osm = (PC_ASM *)pc->data;
18   PetscMPIInt       rank;
19   PetscInt          i, bsz;
20   PetscBool         iascii, isstring;
21   PetscViewer       sviewer;
22   PetscViewerFormat format;
23   const char       *prefix;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
28   if (iascii) {
29     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
30     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
31     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
32     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
34     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
35     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
36     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
37     PetscCall(PetscViewerGetFormat(viewer, &format));
38     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
39       if (osm->ksp) {
40         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
41         PetscCall(PCGetOptionsPrefix(pc, &prefix));
42         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
43         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
44         if (rank == 0) {
45           PetscCall(PetscViewerASCIIPushTab(viewer));
46           PetscCall(KSPView(osm->ksp[0], sviewer));
47           PetscCall(PetscViewerASCIIPopTab(viewer));
48         }
49         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
50       }
51     } else {
52       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
53       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
54       PetscCall(PetscViewerFlush(viewer));
55       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
56       PetscCall(PetscViewerASCIIPushTab(viewer));
57       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
58       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
59       for (i = 0; i < osm->n_local_true; i++) {
60         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
61         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
62         PetscCall(KSPView(osm->ksp[i], sviewer));
63         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
64       }
65       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
66       PetscCall(PetscViewerASCIIPopTab(viewer));
67       PetscCall(PetscViewerFlush(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
75   }
76   PetscFunctionReturn(0);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc) {
80   PC_ASM         *osm = (PC_ASM *)pc->data;
81   const char     *prefix;
82   char            fname[PETSC_MAX_PATH_LEN + 1];
83   PetscViewer     viewer, sviewer;
84   char           *s;
85   PetscInt        i, j, nidx;
86   const PetscInt *idx;
87   PetscMPIInt     rank, size;
88 
89   PetscFunctionBegin;
90   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
91   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
92   PetscCall(PCGetOptionsPrefix(pc, &prefix));
93   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
94   if (fname[0] == 0) PetscCall(PetscStrcpy(fname, "stdout"));
95   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
96   for (i = 0; i < osm->n_local; i++) {
97     if (i < osm->n_local_true) {
98       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
99       PetscCall(ISGetIndices(osm->is[i], &idx));
100       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
101 #define len 16 * (nidx + 1) + 512
102       PetscCall(PetscMalloc1(len, &s));
103       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
104 #undef len
105       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
106       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
107       PetscCall(ISRestoreIndices(osm->is[i], &idx));
108       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
109       PetscCall(PetscViewerDestroy(&sviewer));
110       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
111       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
112       PetscCall(PetscViewerFlush(viewer));
113       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
114       PetscCall(PetscFree(s));
115       if (osm->is_local) {
116         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
117 #define len 16 * (nidx + 1) + 512
118         PetscCall(PetscMalloc1(len, &s));
119         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
120 #undef len
121         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
122         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
123         PetscCall(ISGetIndices(osm->is_local[i], &idx));
124         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
125         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
126         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
127         PetscCall(PetscViewerDestroy(&sviewer));
128         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
129         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
130         PetscCall(PetscViewerFlush(viewer));
131         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
132         PetscCall(PetscFree(s));
133       }
134     } else {
135       /* Participate in collective viewer calls. */
136       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
137       PetscCall(PetscViewerFlush(viewer));
138       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
139       /* Assume either all ranks have is_local or none do. */
140       if (osm->is_local) {
141         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
142         PetscCall(PetscViewerFlush(viewer));
143         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
144       }
145     }
146   }
147   PetscCall(PetscViewerFlush(viewer));
148   PetscCall(PetscViewerDestroy(&viewer));
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode PCSetUp_ASM(PC pc) {
153   PC_ASM     *osm = (PC_ASM *)pc->data;
154   PetscBool   flg;
155   PetscInt    i, m, m_local;
156   MatReuse    scall = MAT_REUSE_MATRIX;
157   IS          isl;
158   KSP         ksp;
159   PC          subpc;
160   const char *prefix, *pprefix;
161   Vec         vec;
162   DM         *domain_dm = NULL;
163 
164   PetscFunctionBegin;
165   if (!pc->setupcalled) {
166     PetscInt m;
167 
168     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
169     if (osm->n_local_true == PETSC_DECIDE) {
170       /* no subdomains given */
171       /* try pc->dm first, if allowed */
172       if (osm->dm_subdomains && pc->dm) {
173         PetscInt num_domains, d;
174         char   **domain_names;
175         IS      *inner_domain_is, *outer_domain_is;
176         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
177         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
178                               A future improvement of this code might allow one to use
179                               DM-defined subdomains and also increase the overlap,
180                               but that is not currently supported */
181         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
182         for (d = 0; d < num_domains; ++d) {
183           if (domain_names) PetscCall(PetscFree(domain_names[d]));
184           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
185           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
186         }
187         PetscCall(PetscFree(domain_names));
188         PetscCall(PetscFree(inner_domain_is));
189         PetscCall(PetscFree(outer_domain_is));
190       }
191       if (osm->n_local_true == PETSC_DECIDE) {
192         /* still no subdomains; use one subdomain per processor */
193         osm->n_local_true = 1;
194       }
195     }
196     { /* determine the global and max number of subdomains */
197       struct {
198         PetscInt max, sum;
199       } inwork, outwork;
200       PetscMPIInt size;
201 
202       inwork.max = osm->n_local_true;
203       inwork.sum = osm->n_local_true;
204       PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
205       osm->n_local = outwork.max;
206       osm->n       = outwork.sum;
207 
208       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
209       if (outwork.max == 1 && outwork.sum == size) {
210         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
211         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
212       }
213     }
214     if (!osm->is) { /* create the index sets */
215       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
216     }
217     if (osm->n_local_true > 1 && !osm->is_local) {
218       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
219       for (i = 0; i < osm->n_local_true; i++) {
220         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
221           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
222           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
223         } else {
224           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
225           osm->is_local[i] = osm->is[i];
226         }
227       }
228     }
229     PetscCall(PCGetOptionsPrefix(pc, &prefix));
230     if (osm->overlap > 0) {
231       /* Extend the "overlapping" regions by a number of steps */
232       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
233     }
234     if (osm->sort_indices) {
235       for (i = 0; i < osm->n_local_true; i++) {
236         PetscCall(ISSort(osm->is[i]));
237         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
238       }
239     }
240     flg = PETSC_FALSE;
241     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
242     if (flg) PetscCall(PCASMPrintSubdomains(pc));
243     if (!osm->ksp) {
244       /* Create the local solvers */
245       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
246       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
247       for (i = 0; i < osm->n_local_true; i++) {
248         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
249         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
250         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
251         PetscCall(KSPSetType(ksp, KSPPREONLY));
252         PetscCall(KSPGetPC(ksp, &subpc));
253         PetscCall(PCGetOptionsPrefix(pc, &prefix));
254         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
255         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
256         if (domain_dm) {
257           PetscCall(KSPSetDM(ksp, domain_dm[i]));
258           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
259           PetscCall(DMDestroy(&domain_dm[i]));
260         }
261         osm->ksp[i] = ksp;
262       }
263       if (domain_dm) PetscCall(PetscFree(domain_dm));
264     }
265 
266     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
267     PetscCall(ISSortRemoveDups(osm->lis));
268     PetscCall(ISGetLocalSize(osm->lis, &m));
269 
270     scall = MAT_INITIAL_MATRIX;
271   } else {
272     /*
273        Destroy the blocks from the previous iteration
274     */
275     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
276       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
277       scall = MAT_INITIAL_MATRIX;
278     }
279   }
280 
281   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
282   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
283     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
284     scall = MAT_INITIAL_MATRIX;
285   }
286 
287   /*
288      Extract out the submatrices
289   */
290   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
291   if (scall == MAT_INITIAL_MATRIX) {
292     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
293     for (i = 0; i < osm->n_local_true; i++) { PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix)); }
294   }
295 
296   /* Convert the types of the submatrices (if needbe) */
297   if (osm->sub_mat_type) {
298     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &(osm->pmat[i])));
299   }
300 
301   if (!pc->setupcalled) {
302     VecType vtype;
303 
304     /* Create the local work vectors (from the local matrices) and scatter contexts */
305     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
306 
307     PetscCheck(!osm->is_local || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
308     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
309     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
310     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
311     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
312 
313     PetscCall(ISGetLocalSize(osm->lis, &m));
314     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
315     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
316     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
317     PetscCall(VecSetSizes(osm->lx, m, m));
318     PetscCall(VecSetType(osm->lx, vtype));
319     PetscCall(VecDuplicate(osm->lx, &osm->ly));
320     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
321     PetscCall(ISDestroy(&isl));
322 
323     for (i = 0; i < osm->n_local_true; ++i) {
324       ISLocalToGlobalMapping ltog;
325       IS                     isll;
326       const PetscInt        *idx_is;
327       PetscInt              *idx_lis, nout;
328 
329       PetscCall(ISGetLocalSize(osm->is[i], &m));
330       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
331       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
332 
333       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
334       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
335       PetscCall(ISGetLocalSize(osm->is[i], &m));
336       PetscCall(ISGetIndices(osm->is[i], &idx_is));
337       PetscCall(PetscMalloc1(m, &idx_lis));
338       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
339       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
340       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
341       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
342       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
343       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
344       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
345       PetscCall(ISDestroy(&isll));
346       PetscCall(ISDestroy(&isl));
347       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
348         ISLocalToGlobalMapping ltog;
349         IS                     isll, isll_local;
350         const PetscInt        *idx_local;
351         PetscInt              *idx1, *idx2, nout;
352 
353         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
354         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
355 
356         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
357         PetscCall(PetscMalloc1(m_local, &idx1));
358         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
359         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
360         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
361         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
362 
363         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
364         PetscCall(PetscMalloc1(m_local, &idx2));
365         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
366         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
367         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
368         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
369 
370         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
371         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
372 
373         PetscCall(ISDestroy(&isll));
374         PetscCall(ISDestroy(&isll_local));
375       }
376     }
377     PetscCall(VecDestroy(&vec));
378   }
379 
380   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
381     IS      *cis;
382     PetscInt c;
383 
384     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
385     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
386     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
387     PetscCall(PetscFree(cis));
388   }
389 
390   /* Return control to the user so that the submatrices can be modified (e.g., to apply
391      different boundary conditions for the submatrices than for the global problem) */
392   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
393 
394   /*
395      Loop over subdomains putting them into local ksp
396   */
397   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
398   for (i = 0; i < osm->n_local_true; i++) {
399     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
400     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
401     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
402   }
403   PetscFunctionReturn(0);
404 }
405 
406 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc) {
407   PC_ASM            *osm = (PC_ASM *)pc->data;
408   PetscInt           i;
409   KSPConvergedReason reason;
410 
411   PetscFunctionBegin;
412   for (i = 0; i < osm->n_local_true; i++) {
413     PetscCall(KSPSetUp(osm->ksp[i]));
414     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
415     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
416   }
417   PetscFunctionReturn(0);
418 }
419 
420 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y) {
421   PC_ASM     *osm = (PC_ASM *)pc->data;
422   PetscInt    i, n_local_true = osm->n_local_true;
423   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
424 
425   PetscFunctionBegin;
426   /*
427      support for limiting the restriction or interpolation to only local
428      subdomain values (leaving the other values 0).
429   */
430   if (!(osm->type & PC_ASM_RESTRICT)) {
431     forward = SCATTER_FORWARD_LOCAL;
432     /* have to zero the work RHS since scatter may leave some slots empty */
433     PetscCall(VecSet(osm->lx, 0.0));
434   }
435   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
436 
437   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
438     /* zero the global and the local solutions */
439     PetscCall(VecSet(y, 0.0));
440     PetscCall(VecSet(osm->ly, 0.0));
441 
442     /* copy the global RHS to local RHS including the ghost nodes */
443     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
444     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
445 
446     /* restrict local RHS to the overlapping 0-block RHS */
447     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
448     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
449 
450     /* do the local solves */
451     for (i = 0; i < n_local_true; ++i) {
452       /* solve the overlapping i-block */
453       PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
454       PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
455       PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
456       PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
457 
458       if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
459         PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
460         PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
461       } else { /* interpolate the overlapping i-block solution to the local solution */
462         PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
463         PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
464       }
465 
466       if (i < n_local_true - 1) {
467         /* restrict local RHS to the overlapping (i+1)-block RHS */
468         PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
469         PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
470 
471         if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
472           /* update the overlapping (i+1)-block RHS using the current local solution */
473           PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
474           PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
475         }
476       }
477     }
478     /* add the local solution to the global solution including the ghost nodes */
479     PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
480     PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
481   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
482   PetscFunctionReturn(0);
483 }
484 
485 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y) {
486   PC_ASM     *osm = (PC_ASM *)pc->data;
487   Mat         Z, W;
488   Vec         x;
489   PetscInt    i, m, N;
490   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
491 
492   PetscFunctionBegin;
493   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
494   /*
495      support for limiting the restriction or interpolation to only local
496      subdomain values (leaving the other values 0).
497   */
498   if (!(osm->type & PC_ASM_RESTRICT)) {
499     forward = SCATTER_FORWARD_LOCAL;
500     /* have to zero the work RHS since scatter may leave some slots empty */
501     PetscCall(VecSet(osm->lx, 0.0));
502   }
503   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
504   PetscCall(VecGetLocalSize(osm->x[0], &m));
505   PetscCall(MatGetSize(X, NULL, &N));
506   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
507   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
508     /* zero the global and the local solutions */
509     PetscCall(MatZeroEntries(Y));
510     PetscCall(VecSet(osm->ly, 0.0));
511 
512     for (i = 0; i < N; ++i) {
513       PetscCall(MatDenseGetColumnVecRead(X, i, &x));
514       /* copy the global RHS to local RHS including the ghost nodes */
515       PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
516       PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
517       PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
518 
519       PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
520       /* restrict local RHS to the overlapping 0-block RHS */
521       PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
522       PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
523       PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
524     }
525     PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
526     /* solve the overlapping 0-block */
527     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
528     PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
529     PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
530     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
531     PetscCall(MatDestroy(&Z));
532 
533     for (i = 0; i < N; ++i) {
534       PetscCall(VecSet(osm->ly, 0.0));
535       PetscCall(MatDenseGetColumnVecRead(W, i, &x));
536       if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
537         PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
538         PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
539       } else { /* interpolate the overlapping 0-block solution to the local solution */
540         PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
541         PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
542       }
543       PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
544 
545       PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
546       /* add the local solution to the global solution including the ghost nodes */
547       PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
548       PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
549       PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
550     }
551     PetscCall(MatDestroy(&W));
552   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
553   PetscFunctionReturn(0);
554 }
555 
556 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y) {
557   PC_ASM     *osm = (PC_ASM *)pc->data;
558   PetscInt    i, n_local_true = osm->n_local_true;
559   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
560 
561   PetscFunctionBegin;
562   /*
563      Support for limiting the restriction or interpolation to only local
564      subdomain values (leaving the other values 0).
565 
566      Note: these are reversed from the PCApply_ASM() because we are applying the
567      transpose of the three terms
568   */
569 
570   if (!(osm->type & PC_ASM_INTERPOLATE)) {
571     forward = SCATTER_FORWARD_LOCAL;
572     /* have to zero the work RHS since scatter may leave some slots empty */
573     PetscCall(VecSet(osm->lx, 0.0));
574   }
575   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
576 
577   /* zero the global and the local solutions */
578   PetscCall(VecSet(y, 0.0));
579   PetscCall(VecSet(osm->ly, 0.0));
580 
581   /* Copy the global RHS to local RHS including the ghost nodes */
582   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
583   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
584 
585   /* Restrict local RHS to the overlapping 0-block RHS */
586   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
587   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
588 
589   /* do the local solves */
590   for (i = 0; i < n_local_true; ++i) {
591     /* solve the overlapping i-block */
592     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
593     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
594     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
595     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
596 
597     if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
598       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
599       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
600     } else { /* interpolate the overlapping i-block solution to the local solution */
601       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
602       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
603     }
604 
605     if (i < n_local_true - 1) {
606       /* Restrict local RHS to the overlapping (i+1)-block RHS */
607       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
608       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
609     }
610   }
611   /* Add the local solution to the global solution including the ghost nodes */
612   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
613   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
614   PetscFunctionReturn(0);
615 }
616 
617 static PetscErrorCode PCReset_ASM(PC pc) {
618   PC_ASM  *osm = (PC_ASM *)pc->data;
619   PetscInt i;
620 
621   PetscFunctionBegin;
622   if (osm->ksp) {
623     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
624   }
625   if (osm->pmat) {
626     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
627   }
628   if (osm->lrestriction) {
629     PetscCall(VecScatterDestroy(&osm->restriction));
630     for (i = 0; i < osm->n_local_true; i++) {
631       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
632       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
633       PetscCall(VecDestroy(&osm->x[i]));
634       PetscCall(VecDestroy(&osm->y[i]));
635     }
636     PetscCall(PetscFree(osm->lrestriction));
637     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
638     PetscCall(PetscFree(osm->x));
639     PetscCall(PetscFree(osm->y));
640   }
641   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
642   PetscCall(ISDestroy(&osm->lis));
643   PetscCall(VecDestroy(&osm->lx));
644   PetscCall(VecDestroy(&osm->ly));
645   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
646 
647   PetscCall(PetscFree(osm->sub_mat_type));
648 
649   osm->is       = NULL;
650   osm->is_local = NULL;
651   PetscFunctionReturn(0);
652 }
653 
654 static PetscErrorCode PCDestroy_ASM(PC pc) {
655   PC_ASM  *osm = (PC_ASM *)pc->data;
656   PetscInt i;
657 
658   PetscFunctionBegin;
659   PetscCall(PCReset_ASM(pc));
660   if (osm->ksp) {
661     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
662     PetscCall(PetscFree(osm->ksp));
663   }
664   PetscCall(PetscFree(pc->data));
665 
666   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
667   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
668   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
669   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
670   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
671   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
672   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
673   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
674   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
675   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
676   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
677   PetscFunctionReturn(0);
678 }
679 
680 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject) {
681   PC_ASM         *osm = (PC_ASM *)pc->data;
682   PetscInt        blocks, ovl;
683   PetscBool       flg;
684   PCASMType       asmtype;
685   PCCompositeType loctype;
686   char            sub_mat_type[256];
687 
688   PetscFunctionBegin;
689   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
690   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
691   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
692   if (flg) {
693     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
694     osm->dm_subdomains = PETSC_FALSE;
695   }
696   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
697   if (flg) {
698     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
699     osm->dm_subdomains = PETSC_FALSE;
700   }
701   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
702   if (flg) {
703     PetscCall(PCASMSetOverlap(pc, ovl));
704     osm->dm_subdomains = PETSC_FALSE;
705   }
706   flg = PETSC_FALSE;
707   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
708   if (flg) PetscCall(PCASMSetType(pc, asmtype));
709   flg = PETSC_FALSE;
710   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
711   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
712   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
713   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
714   PetscOptionsHeadEnd();
715   PetscFunctionReturn(0);
716 }
717 
718 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[]) {
719   PC_ASM  *osm = (PC_ASM *)pc->data;
720   PetscInt i;
721 
722   PetscFunctionBegin;
723   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
724   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
725 
726   if (!pc->setupcalled) {
727     if (is) {
728       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
729     }
730     if (is_local) {
731       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
732     }
733     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
734 
735     osm->n_local_true = n;
736     osm->is           = NULL;
737     osm->is_local     = NULL;
738     if (is) {
739       PetscCall(PetscMalloc1(n, &osm->is));
740       for (i = 0; i < n; i++) osm->is[i] = is[i];
741       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
742       osm->overlap = -1;
743     }
744     if (is_local) {
745       PetscCall(PetscMalloc1(n, &osm->is_local));
746       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
747       if (!is) {
748         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
749         for (i = 0; i < osm->n_local_true; i++) {
750           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
751             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
752             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
753           } else {
754             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
755             osm->is[i] = osm->is_local[i];
756           }
757         }
758       }
759     }
760   }
761   PetscFunctionReturn(0);
762 }
763 
764 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local) {
765   PC_ASM     *osm = (PC_ASM *)pc->data;
766   PetscMPIInt rank, size;
767   PetscInt    n;
768 
769   PetscFunctionBegin;
770   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
771   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
772 
773   /*
774      Split the subdomains equally among all processors
775   */
776   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
777   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
778   n = N / size + ((N % size) > rank);
779   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
780   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
781   if (!pc->setupcalled) {
782     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
783 
784     osm->n_local_true = n;
785     osm->is           = NULL;
786     osm->is_local     = NULL;
787   }
788   PetscFunctionReturn(0);
789 }
790 
791 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl) {
792   PC_ASM *osm = (PC_ASM *)pc->data;
793 
794   PetscFunctionBegin;
795   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
796   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
797   if (!pc->setupcalled) osm->overlap = ovl;
798   PetscFunctionReturn(0);
799 }
800 
801 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type) {
802   PC_ASM *osm = (PC_ASM *)pc->data;
803 
804   PetscFunctionBegin;
805   osm->type     = type;
806   osm->type_set = PETSC_TRUE;
807   PetscFunctionReturn(0);
808 }
809 
810 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type) {
811   PC_ASM *osm = (PC_ASM *)pc->data;
812 
813   PetscFunctionBegin;
814   *type = osm->type;
815   PetscFunctionReturn(0);
816 }
817 
818 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type) {
819   PC_ASM *osm = (PC_ASM *)pc->data;
820 
821   PetscFunctionBegin;
822   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
823   osm->loctype = type;
824   PetscFunctionReturn(0);
825 }
826 
827 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type) {
828   PC_ASM *osm = (PC_ASM *)pc->data;
829 
830   PetscFunctionBegin;
831   *type = osm->loctype;
832   PetscFunctionReturn(0);
833 }
834 
835 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort) {
836   PC_ASM *osm = (PC_ASM *)pc->data;
837 
838   PetscFunctionBegin;
839   osm->sort_indices = doSort;
840   PetscFunctionReturn(0);
841 }
842 
843 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp) {
844   PC_ASM *osm = (PC_ASM *)pc->data;
845 
846   PetscFunctionBegin;
847   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
848 
849   if (n_local) *n_local = osm->n_local_true;
850   if (first_local) {
851     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
852     *first_local -= osm->n_local_true;
853   }
854   if (ksp) *ksp = osm->ksp;
855   PetscFunctionReturn(0);
856 }
857 
858 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type) {
859   PC_ASM *osm = (PC_ASM *)pc->data;
860 
861   PetscFunctionBegin;
862   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
863   PetscValidPointer(sub_mat_type, 2);
864   *sub_mat_type = osm->sub_mat_type;
865   PetscFunctionReturn(0);
866 }
867 
868 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type) {
869   PC_ASM *osm = (PC_ASM *)pc->data;
870 
871   PetscFunctionBegin;
872   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
873   PetscCall(PetscFree(osm->sub_mat_type));
874   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
875   PetscFunctionReturn(0);
876 }
877 
878 /*@C
879     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
880 
881     Collective on pc
882 
883     Input Parameters:
884 +   pc - the preconditioner context
885 .   n - the number of subdomains for this processor (default value = 1)
886 .   is - the index set that defines the subdomains for this processor
887          (or NULL for PETSc to determine subdomains)
888 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
889          (or NULL to not provide these)
890 
891     Options Database Key:
892     To set the total number of subdomain blocks rather than specify the
893     index sets, use the option
894 .    -pc_asm_local_blocks <blks> - Sets local blocks
895 
896     Notes:
897     The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
898 
899     By default the `PCASM` preconditioner uses 1 block per processor.
900 
901     Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
902 
903     If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated
904     back to form the global solution (this is the standard restricted additive Schwarz method)
905 
906     If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
907     no code to handle that case.
908 
909     Level: advanced
910 
911 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
912           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
913 @*/
914 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[]) {
915   PetscFunctionBegin;
916   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
917   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
918   PetscFunctionReturn(0);
919 }
920 
921 /*@C
922     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
923     additive Schwarz preconditioner, `PCASM`.
924 
925     Collective on pc, all MPI ranks must pass in the same array of `IS`
926 
927     Input Parameters:
928 +   pc - the preconditioner context
929 .   N  - the number of subdomains for all processors
930 .   is - the index sets that define the subdomains for all processors
931          (or NULL to ask PETSc to determine the subdomains)
932 -   is_local - the index sets that define the local part of the subdomains for this processor
933          (or NULL to not provide this information)
934 
935     Options Database Key:
936     To set the total number of subdomain blocks rather than specify the
937     index sets, use the option
938 .    -pc_asm_blocks <blks> - Sets total blocks
939 
940     Notes:
941     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
942 
943     By default the `PCASM` preconditioner uses 1 block per processor.
944 
945     These index sets cannot be destroyed until after completion of the
946     linear solves for which the `PCASM` preconditioner is being used.
947 
948     Use `PCASMSetLocalSubdomains()` to set local subdomains.
949 
950     The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
951 
952     Level: advanced
953 
954 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
955           `PCASMCreateSubdomains2D()`, `PCGASM`
956 @*/
957 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[]) {
958   PetscFunctionBegin;
959   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
960   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
961   PetscFunctionReturn(0);
962 }
963 
964 /*@
965     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
966     additive Schwarz preconditioner, `PCASM`.
967 
968     Logically Collective on pc
969 
970     Input Parameters:
971 +   pc  - the preconditioner context
972 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
973 
974     Options Database Key:
975 .   -pc_asm_overlap <ovl> - Sets overlap
976 
977     Notes:
978     By default the `PCASM` preconditioner uses 1 block per processor.  To use
979     multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
980     `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
981 
982     The overlap defaults to 1, so if one desires that no additional
983     overlap be computed beyond what may have been set with a call to
984     `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
985     must be set to be 0.  In particular, if one does not explicitly set
986     the subdomains an application code, then all overlap would be computed
987     internally by PETSc, and using an overlap of 0 would result in an `PCASM`
988     variant that is equivalent to the block Jacobi preconditioner.
989 
990     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
991     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
992 
993     Note that one can define initial index sets with any overlap via
994     `PCASMSetLocalSubdomains()`; the routine
995     `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
996     if desired.
997 
998     Level: intermediate
999 
1000 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1001           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1002 @*/
1003 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl) {
1004   PetscFunctionBegin;
1005   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1006   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1007   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1008   PetscFunctionReturn(0);
1009 }
1010 
1011 /*@
1012     PCASMSetType - Sets the type of restriction and interpolation used
1013     for local problems in the additive Schwarz method, `PCASM`.
1014 
1015     Logically Collective on pc
1016 
1017     Input Parameters:
1018 +   pc  - the preconditioner context
1019 -   type - variant of `PCASM`, one of
1020 .vb
1021       PC_ASM_BASIC       - full interpolation and restriction
1022       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1023       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1024       PC_ASM_NONE        - local processor restriction and interpolation
1025 .ve
1026 
1027     Options Database Key:
1028 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1029 
1030     Note:
1031     if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1032     to limit the local processor interpolation
1033 
1034     Level: intermediate
1035 
1036 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1037           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1038 @*/
1039 PetscErrorCode PCASMSetType(PC pc, PCASMType type) {
1040   PetscFunctionBegin;
1041   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1042   PetscValidLogicalCollectiveEnum(pc, type, 2);
1043   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1044   PetscFunctionReturn(0);
1045 }
1046 
1047 /*@
1048     PCASMGetType - Gets the type of restriction and interpolation used
1049     for local problems in the additive Schwarz method, `PCASM`.
1050 
1051     Logically Collective on pc
1052 
1053     Input Parameter:
1054 .   pc  - the preconditioner context
1055 
1056     Output Parameter:
1057 .   type - variant of `PCASM`, one of
1058 
1059 .vb
1060       PC_ASM_BASIC       - full interpolation and restriction
1061       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1062       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1063       PC_ASM_NONE        - local processor restriction and interpolation
1064 .ve
1065 
1066     Options Database Key:
1067 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1068 
1069     Level: intermediate
1070 
1071 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1072           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1073 @*/
1074 PetscErrorCode PCASMGetType(PC pc, PCASMType *type) {
1075   PetscFunctionBegin;
1076   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1077   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1078   PetscFunctionReturn(0);
1079 }
1080 
1081 /*@
1082   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1083 
1084   Logically Collective on pc
1085 
1086   Input Parameters:
1087 + pc  - the preconditioner context
1088 - type - type of composition, one of
1089 .vb
1090   PC_COMPOSITE_ADDITIVE       - local additive combination
1091   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1092 .ve
1093 
1094   Options Database Key:
1095 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1096 
1097   Level: intermediate
1098 
1099 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASM`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1100 @*/
1101 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type) {
1102   PetscFunctionBegin;
1103   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1104   PetscValidLogicalCollectiveEnum(pc, type, 2);
1105   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1106   PetscFunctionReturn(0);
1107 }
1108 
1109 /*@
1110   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1111 
1112   Logically Collective on pc
1113 
1114   Input Parameter:
1115 . pc  - the preconditioner context
1116 
1117   Output Parameter:
1118 . type - type of composition, one of
1119 .vb
1120   PC_COMPOSITE_ADDITIVE       - local additive combination
1121   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1122 .ve
1123 
1124   Options Database Key:
1125 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1126 
1127   Level: intermediate
1128 
1129 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1130 @*/
1131 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type) {
1132   PetscFunctionBegin;
1133   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1134   PetscValidPointer(type, 2);
1135   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1136   PetscFunctionReturn(0);
1137 }
1138 
1139 /*@
1140     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1141 
1142     Logically Collective on pc
1143 
1144     Input Parameters:
1145 +   pc  - the preconditioner context
1146 -   doSort - sort the subdomain indices
1147 
1148     Level: intermediate
1149 
1150 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1151           `PCASMCreateSubdomains2D()`
1152 @*/
1153 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort) {
1154   PetscFunctionBegin;
1155   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1156   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1157   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1158   PetscFunctionReturn(0);
1159 }
1160 
1161 /*@C
1162    PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1163    this processor.
1164 
1165    Collective on pc iff first_local is requested
1166 
1167    Input Parameter:
1168 .  pc - the preconditioner context
1169 
1170    Output Parameters:
1171 +  n_local - the number of blocks on this processor or NULL
1172 .  first_local - the global number of the first block on this processor or NULL,
1173                  all processors must request or all must pass NULL
1174 -  ksp - the array of `KSP` contexts
1175 
1176    Notes:
1177    After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1178 
1179    You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1180 
1181    Fortran Note:
1182    The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.
1183 
1184    Level: advanced
1185 
1186 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1187           `PCASMCreateSubdomains2D()`,
1188 @*/
1189 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[]) {
1190   PetscFunctionBegin;
1191   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1192   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1193   PetscFunctionReturn(0);
1194 }
1195 
1196 /*MC
1197    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1198            its own `KSP` object.
1199 
1200    Options Database Keys:
1201 +  -pc_asm_blocks <blks> - Sets total blocks. Defaults to one block per MPI rank.
1202 .  -pc_asm_overlap <ovl> - Sets overlap
1203 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1204 -  -pc_asm_local_type [additive, multiplicative] - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1205 
1206    Level: beginner
1207 
1208    Notes:
1209    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1210    will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1211     -pc_asm_type basic to get the same convergence behavior
1212 
1213    Each processor can have one or more blocks, but a block cannot be shared by more
1214    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1215 
1216    To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC`
1217    options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1218 
1219    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1220    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1221 
1222     References:
1223 +   * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1224      Courant Institute, New York University Technical report
1225 -   * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1226     Cambridge University Press.
1227 
1228 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1229           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1230           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1231 M*/
1232 
1233 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc) {
1234   PC_ASM *osm;
1235 
1236   PetscFunctionBegin;
1237   PetscCall(PetscNew(&osm));
1238 
1239   osm->n             = PETSC_DECIDE;
1240   osm->n_local       = 0;
1241   osm->n_local_true  = PETSC_DECIDE;
1242   osm->overlap       = 1;
1243   osm->ksp           = NULL;
1244   osm->restriction   = NULL;
1245   osm->lprolongation = NULL;
1246   osm->lrestriction  = NULL;
1247   osm->x             = NULL;
1248   osm->y             = NULL;
1249   osm->is            = NULL;
1250   osm->is_local      = NULL;
1251   osm->mat           = NULL;
1252   osm->pmat          = NULL;
1253   osm->type          = PC_ASM_RESTRICT;
1254   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1255   osm->sort_indices  = PETSC_TRUE;
1256   osm->dm_subdomains = PETSC_FALSE;
1257   osm->sub_mat_type  = NULL;
1258 
1259   pc->data                 = (void *)osm;
1260   pc->ops->apply           = PCApply_ASM;
1261   pc->ops->matapply        = PCMatApply_ASM;
1262   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1263   pc->ops->setup           = PCSetUp_ASM;
1264   pc->ops->reset           = PCReset_ASM;
1265   pc->ops->destroy         = PCDestroy_ASM;
1266   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1267   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1268   pc->ops->view            = PCView_ASM;
1269   pc->ops->applyrichardson = NULL;
1270 
1271   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1272   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1273   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1274   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1275   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1276   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1277   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1278   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1279   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1280   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1281   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1282   PetscFunctionReturn(0);
1283 }
1284 
1285 /*@C
1286    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1287    preconditioner, `PCASM`,  for any problem on a general grid.
1288 
1289    Collective
1290 
1291    Input Parameters:
1292 +  A - The global matrix operator
1293 -  n - the number of local blocks
1294 
1295    Output Parameters:
1296 .  outis - the array of index sets defining the subdomains
1297 
1298    Level: advanced
1299 
1300    Note:
1301    This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1302    from these if you use `PCASMSetLocalSubdomains()`
1303 
1304    Fortran Note:
1305    You must provide the array outis[] already allocated of length n.
1306 
1307 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1308 @*/
1309 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[]) {
1310   MatPartitioning mpart;
1311   const char     *prefix;
1312   PetscInt        i, j, rstart, rend, bs;
1313   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1314   Mat             Ad = NULL, adj;
1315   IS              ispart, isnumb, *is;
1316 
1317   PetscFunctionBegin;
1318   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1319   PetscValidPointer(outis, 3);
1320   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1321 
1322   /* Get prefix, row distribution, and block size */
1323   PetscCall(MatGetOptionsPrefix(A, &prefix));
1324   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1325   PetscCall(MatGetBlockSize(A, &bs));
1326   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1327 
1328   /* Get diagonal block from matrix if possible */
1329   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1330   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1331   if (Ad) {
1332     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1333     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1334   }
1335   if (Ad && n > 1) {
1336     PetscBool match, done;
1337     /* Try to setup a good matrix partitioning if available */
1338     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1339     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1340     PetscCall(MatPartitioningSetFromOptions(mpart));
1341     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1342     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1343     if (!match) { /* assume a "good" partitioner is available */
1344       PetscInt        na;
1345       const PetscInt *ia, *ja;
1346       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1347       if (done) {
1348         /* Build adjacency matrix by hand. Unfortunately a call to
1349            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1350            remove the block-aij structure and we cannot expect
1351            MatPartitioning to split vertices as we need */
1352         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1353         const PetscInt *row;
1354         nnz = 0;
1355         for (i = 0; i < na; i++) { /* count number of nonzeros */
1356           len = ia[i + 1] - ia[i];
1357           row = ja + ia[i];
1358           for (j = 0; j < len; j++) {
1359             if (row[j] == i) { /* don't count diagonal */
1360               len--;
1361               break;
1362             }
1363           }
1364           nnz += len;
1365         }
1366         PetscCall(PetscMalloc1(na + 1, &iia));
1367         PetscCall(PetscMalloc1(nnz, &jja));
1368         nnz    = 0;
1369         iia[0] = 0;
1370         for (i = 0; i < na; i++) { /* fill adjacency */
1371           cnt = 0;
1372           len = ia[i + 1] - ia[i];
1373           row = ja + ia[i];
1374           for (j = 0; j < len; j++) {
1375             if (row[j] != i) { /* if not diagonal */
1376               jja[nnz + cnt++] = row[j];
1377             }
1378           }
1379           nnz += cnt;
1380           iia[i + 1] = nnz;
1381         }
1382         /* Partitioning of the adjacency matrix */
1383         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1384         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1385         PetscCall(MatPartitioningSetNParts(mpart, n));
1386         PetscCall(MatPartitioningApply(mpart, &ispart));
1387         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1388         PetscCall(MatDestroy(&adj));
1389         foundpart = PETSC_TRUE;
1390       }
1391       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1392     }
1393     PetscCall(MatPartitioningDestroy(&mpart));
1394   }
1395 
1396   PetscCall(PetscMalloc1(n, &is));
1397   *outis = is;
1398 
1399   if (!foundpart) {
1400     /* Partitioning by contiguous chunks of rows */
1401 
1402     PetscInt mbs   = (rend - rstart) / bs;
1403     PetscInt start = rstart;
1404     for (i = 0; i < n; i++) {
1405       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1406       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1407       start += count;
1408     }
1409 
1410   } else {
1411     /* Partitioning by adjacency of diagonal block  */
1412 
1413     const PetscInt *numbering;
1414     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1415     /* Get node count in each partition */
1416     PetscCall(PetscMalloc1(n, &count));
1417     PetscCall(ISPartitioningCount(ispart, n, count));
1418     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1419       for (i = 0; i < n; i++) count[i] *= bs;
1420     }
1421     /* Build indices from node numbering */
1422     PetscCall(ISGetLocalSize(isnumb, &nidx));
1423     PetscCall(PetscMalloc1(nidx, &indices));
1424     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1425     PetscCall(ISGetIndices(isnumb, &numbering));
1426     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1427     PetscCall(ISRestoreIndices(isnumb, &numbering));
1428     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1429       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1430       for (i = 0; i < nidx; i++) {
1431         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1432       }
1433       PetscCall(PetscFree(indices));
1434       nidx *= bs;
1435       indices = newidx;
1436     }
1437     /* Shift to get global indices */
1438     for (i = 0; i < nidx; i++) indices[i] += rstart;
1439 
1440     /* Build the index sets for each block */
1441     for (i = 0; i < n; i++) {
1442       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1443       PetscCall(ISSort(is[i]));
1444       start += count[i];
1445     }
1446 
1447     PetscCall(PetscFree(count));
1448     PetscCall(PetscFree(indices));
1449     PetscCall(ISDestroy(&isnumb));
1450     PetscCall(ISDestroy(&ispart));
1451   }
1452   PetscFunctionReturn(0);
1453 }
1454 
1455 /*@C
1456    PCASMDestroySubdomains - Destroys the index sets created with
1457    `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1458 
1459    Collective
1460 
1461    Input Parameters:
1462 +  n - the number of index sets
1463 .  is - the array of index sets
1464 -  is_local - the array of local index sets, can be NULL
1465 
1466    Level: advanced
1467 
1468 .seealso: `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1469 @*/
1470 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[]) {
1471   PetscInt i;
1472 
1473   PetscFunctionBegin;
1474   if (n <= 0) PetscFunctionReturn(0);
1475   if (is) {
1476     PetscValidPointer(is, 2);
1477     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1478     PetscCall(PetscFree(is));
1479   }
1480   if (is_local) {
1481     PetscValidPointer(is_local, 3);
1482     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1483     PetscCall(PetscFree(is_local));
1484   }
1485   PetscFunctionReturn(0);
1486 }
1487 
1488 /*@
1489    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1490    preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1491 
1492    Not Collective
1493 
1494    Input Parameters:
1495 +  m   - the number of mesh points in the x direction
1496 .  n   - the number of mesh points in the y direction
1497 .  M   - the number of subdomains in the x direction
1498 .  N   - the number of subdomains in the y direction
1499 .  dof - degrees of freedom per node
1500 -  overlap - overlap in mesh lines
1501 
1502    Output Parameters:
1503 +  Nsub - the number of subdomains created
1504 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1505 -  is_local - array of index sets defining non-overlapping subdomains
1506 
1507    Note:
1508    Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1509    preconditioners.  More general related routines are
1510    `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1511 
1512    Level: advanced
1513 
1514 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1515           `PCASMSetOverlap()`
1516 @*/
1517 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local) {
1518   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1519   PetscInt nidx, *idx, loc, ii, jj, count;
1520 
1521   PetscFunctionBegin;
1522   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1523 
1524   *Nsub = N * M;
1525   PetscCall(PetscMalloc1(*Nsub, is));
1526   PetscCall(PetscMalloc1(*Nsub, is_local));
1527   ystart    = 0;
1528   loc_outer = 0;
1529   for (i = 0; i < N; i++) {
1530     height = n / N + ((n % N) > i); /* height of subdomain */
1531     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1532     yleft = ystart - overlap;
1533     if (yleft < 0) yleft = 0;
1534     yright = ystart + height + overlap;
1535     if (yright > n) yright = n;
1536     xstart = 0;
1537     for (j = 0; j < M; j++) {
1538       width = m / M + ((m % M) > j); /* width of subdomain */
1539       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1540       xleft = xstart - overlap;
1541       if (xleft < 0) xleft = 0;
1542       xright = xstart + width + overlap;
1543       if (xright > m) xright = m;
1544       nidx = (xright - xleft) * (yright - yleft);
1545       PetscCall(PetscMalloc1(nidx, &idx));
1546       loc = 0;
1547       for (ii = yleft; ii < yright; ii++) {
1548         count = m * ii + xleft;
1549         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1550       }
1551       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1552       if (overlap == 0) {
1553         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1554 
1555         (*is_local)[loc_outer] = (*is)[loc_outer];
1556       } else {
1557         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1558           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1559         }
1560         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1561       }
1562       PetscCall(PetscFree(idx));
1563       xstart += width;
1564       loc_outer++;
1565     }
1566     ystart += height;
1567   }
1568   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1569   PetscFunctionReturn(0);
1570 }
1571 
1572 /*@C
1573     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1574     only) for the additive Schwarz preconditioner, `PCASM`.
1575 
1576     Not Collective
1577 
1578     Input Parameter:
1579 .   pc - the preconditioner context
1580 
1581     Output Parameters:
1582 +   n - if requested, the number of subdomains for this processor (default value = 1)
1583 .   is - if requested, the index sets that define the subdomains for this processor
1584 -   is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be NULL)
1585 
1586     Note:
1587     The `IS` numbering is in the parallel, global numbering of the vector.
1588 
1589     Level: advanced
1590 
1591 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1592           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1593 @*/
1594 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[]) {
1595   PC_ASM   *osm = (PC_ASM *)pc->data;
1596   PetscBool match;
1597 
1598   PetscFunctionBegin;
1599   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1600   if (n) PetscValidIntPointer(n, 2);
1601   if (is) PetscValidPointer(is, 3);
1602   if (is_local) PetscValidPointer(is_local, 4);
1603   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1604   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1605   if (n) *n = osm->n_local_true;
1606   if (is) *is = osm->is;
1607   if (is_local) *is_local = osm->is_local;
1608   PetscFunctionReturn(0);
1609 }
1610 
1611 /*@C
1612     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1613     only) for the additive Schwarz preconditioner, `PCASM`.
1614 
1615     Not Collective
1616 
1617     Input Parameter:
1618 .   pc - the preconditioner context
1619 
1620     Output Parameters:
1621 +   n - if requested, the number of matrices for this processor (default value = 1)
1622 -   mat - if requested, the matrices
1623 
1624     Level: advanced
1625 
1626     Notes:
1627     Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1628 
1629     Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1630 
1631 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1632           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1633 @*/
1634 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[]) {
1635   PC_ASM   *osm;
1636   PetscBool match;
1637 
1638   PetscFunctionBegin;
1639   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1640   if (n) PetscValidIntPointer(n, 2);
1641   if (mat) PetscValidPointer(mat, 3);
1642   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1643   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1644   if (!match) {
1645     if (n) *n = 0;
1646     if (mat) *mat = NULL;
1647   } else {
1648     osm = (PC_ASM *)pc->data;
1649     if (n) *n = osm->n_local_true;
1650     if (mat) *mat = osm->pmat;
1651   }
1652   PetscFunctionReturn(0);
1653 }
1654 
1655 /*@
1656     PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1657 
1658     Logically Collective
1659 
1660     Input Parameters:
1661 +   pc  - the preconditioner
1662 -   flg - boolean indicating whether to use subdomains defined by the `DM`
1663 
1664     Options Database Key:
1665 .   -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM`
1666 
1667     Level: intermediate
1668 
1669     Note:
1670     `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1671     so setting either of the first two effectively turns the latter off.
1672 
1673 .seealso: `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1674           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1675 @*/
1676 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg) {
1677   PC_ASM   *osm = (PC_ASM *)pc->data;
1678   PetscBool match;
1679 
1680   PetscFunctionBegin;
1681   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1682   PetscValidLogicalCollectiveBool(pc, flg, 2);
1683   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1684   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1685   if (match) osm->dm_subdomains = flg;
1686   PetscFunctionReturn(0);
1687 }
1688 
1689 /*@
1690     PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1691 
1692     Not Collective
1693 
1694     Input Parameter:
1695 .   pc  - the preconditioner
1696 
1697     Output Parameter:
1698 .   flg - boolean indicating whether to use subdomains defined by the DM
1699 
1700     Level: intermediate
1701 
1702 .seealso: `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1703           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1704 @*/
1705 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg) {
1706   PC_ASM   *osm = (PC_ASM *)pc->data;
1707   PetscBool match;
1708 
1709   PetscFunctionBegin;
1710   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1711   PetscValidBoolPointer(flg, 2);
1712   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1713   if (match) *flg = osm->dm_subdomains;
1714   else *flg = PETSC_FALSE;
1715   PetscFunctionReturn(0);
1716 }
1717 
1718 /*@
1719      PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1720 
1721    Not Collective
1722 
1723    Input Parameter:
1724 .  pc - the `PC`
1725 
1726    Output Parameter:
1727 .  pc_asm_sub_mat_type - name of matrix type
1728 
1729    Level: advanced
1730 
1731 .seealso: `PCASM`, `PCASMSetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1732 @*/
1733 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type) {
1734   PetscFunctionBegin;
1735   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1736   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1737   PetscFunctionReturn(0);
1738 }
1739 
1740 /*@
1741      PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1742 
1743    Collective on pc
1744 
1745    Input Parameters:
1746 +  pc             - the `PC` object
1747 -  sub_mat_type   - the `MatType`
1748 
1749    Options Database Key:
1750 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1751    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1752 
1753    Note:
1754    See "${PETSC_DIR}/include/petscmat.h" for available types
1755 
1756   Level: advanced
1757 
1758 .seealso: `PCASM`, `PCASMGetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1759 @*/
1760 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type) {
1761   PetscFunctionBegin;
1762   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1763   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1764   PetscFunctionReturn(0);
1765 }
1766