xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision d5b43468fb8780a8feea140ccd6fa3e6a50411cc)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include "petsc/private/matimpl.h"
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         iascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(viewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(viewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerFlush(viewer));
69       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
70     }
71   } else if (isstring) {
72     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
73     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
74     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
75     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 static PetscErrorCode PCASMPrintSubdomains(PC pc)
81 {
82   PC_ASM         *osm = (PC_ASM *)pc->data;
83   const char     *prefix;
84   char            fname[PETSC_MAX_PATH_LEN + 1];
85   PetscViewer     viewer, sviewer;
86   char           *s;
87   PetscInt        i, j, nidx;
88   const PetscInt *idx;
89   PetscMPIInt     rank, size;
90 
91   PetscFunctionBegin;
92   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
93   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
94   PetscCall(PCGetOptionsPrefix(pc, &prefix));
95   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
96   if (fname[0] == 0) PetscCall(PetscStrcpy(fname, "stdout"));
97   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
98   for (i = 0; i < osm->n_local; i++) {
99     if (i < osm->n_local_true) {
100       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
101       PetscCall(ISGetIndices(osm->is[i], &idx));
102       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
103 #define len 16 * (nidx + 1) + 512
104       PetscCall(PetscMalloc1(len, &s));
105       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
106 #undef len
107       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
108       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
109       PetscCall(ISRestoreIndices(osm->is[i], &idx));
110       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
111       PetscCall(PetscViewerDestroy(&sviewer));
112       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
113       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
114       PetscCall(PetscViewerFlush(viewer));
115       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
116       PetscCall(PetscFree(s));
117       if (osm->is_local) {
118         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
119 #define len 16 * (nidx + 1) + 512
120         PetscCall(PetscMalloc1(len, &s));
121         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
122 #undef len
123         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
124         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
125         PetscCall(ISGetIndices(osm->is_local[i], &idx));
126         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
127         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
128         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
129         PetscCall(PetscViewerDestroy(&sviewer));
130         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
131         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
132         PetscCall(PetscViewerFlush(viewer));
133         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
134         PetscCall(PetscFree(s));
135       }
136     } else {
137       /* Participate in collective viewer calls. */
138       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
139       PetscCall(PetscViewerFlush(viewer));
140       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
141       /* Assume either all ranks have is_local or none do. */
142       if (osm->is_local) {
143         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
144         PetscCall(PetscViewerFlush(viewer));
145         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
146       }
147     }
148   }
149   PetscCall(PetscViewerFlush(viewer));
150   PetscCall(PetscViewerDestroy(&viewer));
151   PetscFunctionReturn(0);
152 }
153 
154 static PetscErrorCode PCSetUp_ASM(PC pc)
155 {
156   PC_ASM     *osm = (PC_ASM *)pc->data;
157   PetscBool   flg;
158   PetscInt    i, m, m_local;
159   MatReuse    scall = MAT_REUSE_MATRIX;
160   IS          isl;
161   KSP         ksp;
162   PC          subpc;
163   const char *prefix, *pprefix;
164   Vec         vec;
165   DM         *domain_dm = NULL;
166 
167   PetscFunctionBegin;
168   if (!pc->setupcalled) {
169     PetscInt m;
170 
171     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172     if (osm->n_local_true == PETSC_DECIDE) {
173       /* no subdomains given */
174       /* try pc->dm first, if allowed */
175       if (osm->dm_subdomains && pc->dm) {
176         PetscInt num_domains, d;
177         char   **domain_names;
178         IS      *inner_domain_is, *outer_domain_is;
179         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181                               A future improvement of this code might allow one to use
182                               DM-defined subdomains and also increase the overlap,
183                               but that is not currently supported */
184         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185         for (d = 0; d < num_domains; ++d) {
186           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189         }
190         PetscCall(PetscFree(domain_names));
191         PetscCall(PetscFree(inner_domain_is));
192         PetscCall(PetscFree(outer_domain_is));
193       }
194       if (osm->n_local_true == PETSC_DECIDE) {
195         /* still no subdomains; use one subdomain per processor */
196         osm->n_local_true = 1;
197       }
198     }
199     { /* determine the global and max number of subdomains */
200       struct {
201         PetscInt max, sum;
202       } inwork, outwork;
203       PetscMPIInt size;
204 
205       inwork.max = osm->n_local_true;
206       inwork.sum = osm->n_local_true;
207       PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208       osm->n_local = outwork.max;
209       osm->n       = outwork.sum;
210 
211       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212       if (outwork.max == 1 && outwork.sum == size) {
213         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215       }
216     }
217     if (!osm->is) { /* create the index sets */
218       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219     }
220     if (osm->n_local_true > 1 && !osm->is_local) {
221       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222       for (i = 0; i < osm->n_local_true; i++) {
223         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226         } else {
227           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228           osm->is_local[i] = osm->is[i];
229         }
230       }
231     }
232     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233     if (osm->overlap > 0) {
234       /* Extend the "overlapping" regions by a number of steps */
235       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236     }
237     if (osm->sort_indices) {
238       for (i = 0; i < osm->n_local_true; i++) {
239         PetscCall(ISSort(osm->is[i]));
240         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241       }
242     }
243     flg = PETSC_FALSE;
244     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246     if (!osm->ksp) {
247       /* Create the local solvers */
248       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250       for (i = 0; i < osm->n_local_true; i++) {
251         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
253         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
254         PetscCall(KSPSetType(ksp, KSPPREONLY));
255         PetscCall(KSPGetPC(ksp, &subpc));
256         PetscCall(PCGetOptionsPrefix(pc, &prefix));
257         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
258         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
259         if (domain_dm) {
260           PetscCall(KSPSetDM(ksp, domain_dm[i]));
261           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
262           PetscCall(DMDestroy(&domain_dm[i]));
263         }
264         osm->ksp[i] = ksp;
265       }
266       if (domain_dm) PetscCall(PetscFree(domain_dm));
267     }
268 
269     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
270     PetscCall(ISSortRemoveDups(osm->lis));
271     PetscCall(ISGetLocalSize(osm->lis, &m));
272 
273     scall = MAT_INITIAL_MATRIX;
274   } else {
275     /*
276        Destroy the blocks from the previous iteration
277     */
278     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
279       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
280       scall = MAT_INITIAL_MATRIX;
281     }
282   }
283 
284   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
285   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
286     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
287     scall = MAT_INITIAL_MATRIX;
288   }
289 
290   /*
291      Extract out the submatrices
292   */
293   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
294   if (scall == MAT_INITIAL_MATRIX) {
295     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
296     for (i = 0; i < osm->n_local_true; i++) { PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix)); }
297   }
298 
299   /* Convert the types of the submatrices (if needbe) */
300   if (osm->sub_mat_type) {
301     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &(osm->pmat[i])));
302   }
303 
304   if (!pc->setupcalled) {
305     VecType vtype;
306 
307     /* Create the local work vectors (from the local matrices) and scatter contexts */
308     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
309 
310     PetscCheck(!osm->is_local || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
311     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
312     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
313     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
314     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
315 
316     PetscCall(ISGetLocalSize(osm->lis, &m));
317     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
318     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
319     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
320     PetscCall(VecSetSizes(osm->lx, m, m));
321     PetscCall(VecSetType(osm->lx, vtype));
322     PetscCall(VecDuplicate(osm->lx, &osm->ly));
323     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
324     PetscCall(ISDestroy(&isl));
325 
326     for (i = 0; i < osm->n_local_true; ++i) {
327       ISLocalToGlobalMapping ltog;
328       IS                     isll;
329       const PetscInt        *idx_is;
330       PetscInt              *idx_lis, nout;
331 
332       PetscCall(ISGetLocalSize(osm->is[i], &m));
333       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
334       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
335 
336       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
337       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
338       PetscCall(ISGetLocalSize(osm->is[i], &m));
339       PetscCall(ISGetIndices(osm->is[i], &idx_is));
340       PetscCall(PetscMalloc1(m, &idx_lis));
341       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
342       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
343       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
344       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
345       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
346       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
347       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
348       PetscCall(ISDestroy(&isll));
349       PetscCall(ISDestroy(&isl));
350       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
351         ISLocalToGlobalMapping ltog;
352         IS                     isll, isll_local;
353         const PetscInt        *idx_local;
354         PetscInt              *idx1, *idx2, nout;
355 
356         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
357         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
358 
359         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
360         PetscCall(PetscMalloc1(m_local, &idx1));
361         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
362         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
363         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
364         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
365 
366         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
367         PetscCall(PetscMalloc1(m_local, &idx2));
368         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
369         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
370         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
371         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
372 
373         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
374         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
375 
376         PetscCall(ISDestroy(&isll));
377         PetscCall(ISDestroy(&isll_local));
378       }
379     }
380     PetscCall(VecDestroy(&vec));
381   }
382 
383   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
384     IS      *cis;
385     PetscInt c;
386 
387     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
388     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
389     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
390     PetscCall(PetscFree(cis));
391   }
392 
393   /* Return control to the user so that the submatrices can be modified (e.g., to apply
394      different boundary conditions for the submatrices than for the global problem) */
395   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
396 
397   /*
398      Loop over subdomains putting them into local ksp
399   */
400   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
401   for (i = 0; i < osm->n_local_true; i++) {
402     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
403     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
404     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
405   }
406   PetscFunctionReturn(0);
407 }
408 
409 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
410 {
411   PC_ASM            *osm = (PC_ASM *)pc->data;
412   PetscInt           i;
413   KSPConvergedReason reason;
414 
415   PetscFunctionBegin;
416   for (i = 0; i < osm->n_local_true; i++) {
417     PetscCall(KSPSetUp(osm->ksp[i]));
418     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
419     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
420   }
421   PetscFunctionReturn(0);
422 }
423 
424 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
425 {
426   PC_ASM     *osm = (PC_ASM *)pc->data;
427   PetscInt    i, n_local_true = osm->n_local_true;
428   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
429 
430   PetscFunctionBegin;
431   /*
432      support for limiting the restriction or interpolation to only local
433      subdomain values (leaving the other values 0).
434   */
435   if (!(osm->type & PC_ASM_RESTRICT)) {
436     forward = SCATTER_FORWARD_LOCAL;
437     /* have to zero the work RHS since scatter may leave some slots empty */
438     PetscCall(VecSet(osm->lx, 0.0));
439   }
440   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
441 
442   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
443     /* zero the global and the local solutions */
444     PetscCall(VecSet(y, 0.0));
445     PetscCall(VecSet(osm->ly, 0.0));
446 
447     /* copy the global RHS to local RHS including the ghost nodes */
448     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
449     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
450 
451     /* restrict local RHS to the overlapping 0-block RHS */
452     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
453     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
454 
455     /* do the local solves */
456     for (i = 0; i < n_local_true; ++i) {
457       /* solve the overlapping i-block */
458       PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
459       PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
460       PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
461       PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
462 
463       if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
464         PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
465         PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
466       } else { /* interpolate the overlapping i-block solution to the local solution */
467         PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
468         PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
469       }
470 
471       if (i < n_local_true - 1) {
472         /* restrict local RHS to the overlapping (i+1)-block RHS */
473         PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
474         PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
475 
476         if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
477           /* update the overlapping (i+1)-block RHS using the current local solution */
478           PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
479           PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
480         }
481       }
482     }
483     /* add the local solution to the global solution including the ghost nodes */
484     PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
485     PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
486   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
487   PetscFunctionReturn(0);
488 }
489 
490 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
491 {
492   PC_ASM     *osm = (PC_ASM *)pc->data;
493   Mat         Z, W;
494   Vec         x;
495   PetscInt    i, m, N;
496   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
497 
498   PetscFunctionBegin;
499   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
500   /*
501      support for limiting the restriction or interpolation to only local
502      subdomain values (leaving the other values 0).
503   */
504   if (!(osm->type & PC_ASM_RESTRICT)) {
505     forward = SCATTER_FORWARD_LOCAL;
506     /* have to zero the work RHS since scatter may leave some slots empty */
507     PetscCall(VecSet(osm->lx, 0.0));
508   }
509   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
510   PetscCall(VecGetLocalSize(osm->x[0], &m));
511   PetscCall(MatGetSize(X, NULL, &N));
512   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
513   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
514     /* zero the global and the local solutions */
515     PetscCall(MatZeroEntries(Y));
516     PetscCall(VecSet(osm->ly, 0.0));
517 
518     for (i = 0; i < N; ++i) {
519       PetscCall(MatDenseGetColumnVecRead(X, i, &x));
520       /* copy the global RHS to local RHS including the ghost nodes */
521       PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
522       PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
523       PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
524 
525       PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
526       /* restrict local RHS to the overlapping 0-block RHS */
527       PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
528       PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
529       PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
530     }
531     PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
532     /* solve the overlapping 0-block */
533     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
534     PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
535     PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
536     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
537     PetscCall(MatDestroy(&Z));
538 
539     for (i = 0; i < N; ++i) {
540       PetscCall(VecSet(osm->ly, 0.0));
541       PetscCall(MatDenseGetColumnVecRead(W, i, &x));
542       if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
543         PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
544         PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
545       } else { /* interpolate the overlapping 0-block solution to the local solution */
546         PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
547         PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
548       }
549       PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
550 
551       PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
552       /* add the local solution to the global solution including the ghost nodes */
553       PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
554       PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
555       PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
556     }
557     PetscCall(MatDestroy(&W));
558   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
559   PetscFunctionReturn(0);
560 }
561 
562 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
563 {
564   PC_ASM     *osm = (PC_ASM *)pc->data;
565   PetscInt    i, n_local_true = osm->n_local_true;
566   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
567 
568   PetscFunctionBegin;
569   /*
570      Support for limiting the restriction or interpolation to only local
571      subdomain values (leaving the other values 0).
572 
573      Note: these are reversed from the PCApply_ASM() because we are applying the
574      transpose of the three terms
575   */
576 
577   if (!(osm->type & PC_ASM_INTERPOLATE)) {
578     forward = SCATTER_FORWARD_LOCAL;
579     /* have to zero the work RHS since scatter may leave some slots empty */
580     PetscCall(VecSet(osm->lx, 0.0));
581   }
582   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
583 
584   /* zero the global and the local solutions */
585   PetscCall(VecSet(y, 0.0));
586   PetscCall(VecSet(osm->ly, 0.0));
587 
588   /* Copy the global RHS to local RHS including the ghost nodes */
589   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
590   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
591 
592   /* Restrict local RHS to the overlapping 0-block RHS */
593   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
594   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
595 
596   /* do the local solves */
597   for (i = 0; i < n_local_true; ++i) {
598     /* solve the overlapping i-block */
599     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
600     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
601     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
602     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
603 
604     if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
605       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
606       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
607     } else { /* interpolate the overlapping i-block solution to the local solution */
608       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
609       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
610     }
611 
612     if (i < n_local_true - 1) {
613       /* Restrict local RHS to the overlapping (i+1)-block RHS */
614       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
615       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
616     }
617   }
618   /* Add the local solution to the global solution including the ghost nodes */
619   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
620   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
621   PetscFunctionReturn(0);
622 }
623 
624 static PetscErrorCode PCReset_ASM(PC pc)
625 {
626   PC_ASM  *osm = (PC_ASM *)pc->data;
627   PetscInt i;
628 
629   PetscFunctionBegin;
630   if (osm->ksp) {
631     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
632   }
633   if (osm->pmat) {
634     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
635   }
636   if (osm->lrestriction) {
637     PetscCall(VecScatterDestroy(&osm->restriction));
638     for (i = 0; i < osm->n_local_true; i++) {
639       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
640       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
641       PetscCall(VecDestroy(&osm->x[i]));
642       PetscCall(VecDestroy(&osm->y[i]));
643     }
644     PetscCall(PetscFree(osm->lrestriction));
645     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
646     PetscCall(PetscFree(osm->x));
647     PetscCall(PetscFree(osm->y));
648   }
649   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
650   PetscCall(ISDestroy(&osm->lis));
651   PetscCall(VecDestroy(&osm->lx));
652   PetscCall(VecDestroy(&osm->ly));
653   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
654 
655   PetscCall(PetscFree(osm->sub_mat_type));
656 
657   osm->is       = NULL;
658   osm->is_local = NULL;
659   PetscFunctionReturn(0);
660 }
661 
662 static PetscErrorCode PCDestroy_ASM(PC pc)
663 {
664   PC_ASM  *osm = (PC_ASM *)pc->data;
665   PetscInt i;
666 
667   PetscFunctionBegin;
668   PetscCall(PCReset_ASM(pc));
669   if (osm->ksp) {
670     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
671     PetscCall(PetscFree(osm->ksp));
672   }
673   PetscCall(PetscFree(pc->data));
674 
675   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
676   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
677   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
685   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
686   PetscFunctionReturn(0);
687 }
688 
689 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject)
690 {
691   PC_ASM         *osm = (PC_ASM *)pc->data;
692   PetscInt        blocks, ovl;
693   PetscBool       flg;
694   PCASMType       asmtype;
695   PCCompositeType loctype;
696   char            sub_mat_type[256];
697 
698   PetscFunctionBegin;
699   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
700   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
701   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
702   if (flg) {
703     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
704     osm->dm_subdomains = PETSC_FALSE;
705   }
706   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
707   if (flg) {
708     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
709     osm->dm_subdomains = PETSC_FALSE;
710   }
711   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
712   if (flg) {
713     PetscCall(PCASMSetOverlap(pc, ovl));
714     osm->dm_subdomains = PETSC_FALSE;
715   }
716   flg = PETSC_FALSE;
717   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
718   if (flg) PetscCall(PCASMSetType(pc, asmtype));
719   flg = PETSC_FALSE;
720   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
721   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
722   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
723   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
724   PetscOptionsHeadEnd();
725   PetscFunctionReturn(0);
726 }
727 
728 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
729 {
730   PC_ASM  *osm = (PC_ASM *)pc->data;
731   PetscInt i;
732 
733   PetscFunctionBegin;
734   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
735   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
736 
737   if (!pc->setupcalled) {
738     if (is) {
739       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
740     }
741     if (is_local) {
742       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
743     }
744     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
745 
746     osm->n_local_true = n;
747     osm->is           = NULL;
748     osm->is_local     = NULL;
749     if (is) {
750       PetscCall(PetscMalloc1(n, &osm->is));
751       for (i = 0; i < n; i++) osm->is[i] = is[i];
752       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
753       osm->overlap = -1;
754     }
755     if (is_local) {
756       PetscCall(PetscMalloc1(n, &osm->is_local));
757       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
758       if (!is) {
759         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
760         for (i = 0; i < osm->n_local_true; i++) {
761           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
762             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
763             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
764           } else {
765             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
766             osm->is[i] = osm->is_local[i];
767           }
768         }
769       }
770     }
771   }
772   PetscFunctionReturn(0);
773 }
774 
775 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
776 {
777   PC_ASM     *osm = (PC_ASM *)pc->data;
778   PetscMPIInt rank, size;
779   PetscInt    n;
780 
781   PetscFunctionBegin;
782   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
783   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
784 
785   /*
786      Split the subdomains equally among all processors
787   */
788   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
789   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
790   n = N / size + ((N % size) > rank);
791   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
792   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
793   if (!pc->setupcalled) {
794     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
795 
796     osm->n_local_true = n;
797     osm->is           = NULL;
798     osm->is_local     = NULL;
799   }
800   PetscFunctionReturn(0);
801 }
802 
803 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
804 {
805   PC_ASM *osm = (PC_ASM *)pc->data;
806 
807   PetscFunctionBegin;
808   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
809   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
810   if (!pc->setupcalled) osm->overlap = ovl;
811   PetscFunctionReturn(0);
812 }
813 
814 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
815 {
816   PC_ASM *osm = (PC_ASM *)pc->data;
817 
818   PetscFunctionBegin;
819   osm->type     = type;
820   osm->type_set = PETSC_TRUE;
821   PetscFunctionReturn(0);
822 }
823 
824 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
825 {
826   PC_ASM *osm = (PC_ASM *)pc->data;
827 
828   PetscFunctionBegin;
829   *type = osm->type;
830   PetscFunctionReturn(0);
831 }
832 
833 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
834 {
835   PC_ASM *osm = (PC_ASM *)pc->data;
836 
837   PetscFunctionBegin;
838   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
839   osm->loctype = type;
840   PetscFunctionReturn(0);
841 }
842 
843 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
844 {
845   PC_ASM *osm = (PC_ASM *)pc->data;
846 
847   PetscFunctionBegin;
848   *type = osm->loctype;
849   PetscFunctionReturn(0);
850 }
851 
852 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
853 {
854   PC_ASM *osm = (PC_ASM *)pc->data;
855 
856   PetscFunctionBegin;
857   osm->sort_indices = doSort;
858   PetscFunctionReturn(0);
859 }
860 
861 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
862 {
863   PC_ASM *osm = (PC_ASM *)pc->data;
864 
865   PetscFunctionBegin;
866   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
867 
868   if (n_local) *n_local = osm->n_local_true;
869   if (first_local) {
870     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
871     *first_local -= osm->n_local_true;
872   }
873   if (ksp) *ksp = osm->ksp;
874   PetscFunctionReturn(0);
875 }
876 
877 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
878 {
879   PC_ASM *osm = (PC_ASM *)pc->data;
880 
881   PetscFunctionBegin;
882   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
883   PetscValidPointer(sub_mat_type, 2);
884   *sub_mat_type = osm->sub_mat_type;
885   PetscFunctionReturn(0);
886 }
887 
888 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
889 {
890   PC_ASM *osm = (PC_ASM *)pc->data;
891 
892   PetscFunctionBegin;
893   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
894   PetscCall(PetscFree(osm->sub_mat_type));
895   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
896   PetscFunctionReturn(0);
897 }
898 
899 /*@C
900     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
901 
902     Collective on pc
903 
904     Input Parameters:
905 +   pc - the preconditioner context
906 .   n - the number of subdomains for this processor (default value = 1)
907 .   is - the index set that defines the subdomains for this processor
908          (or NULL for PETSc to determine subdomains)
909 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
910          (or NULL to not provide these)
911 
912     Options Database Key:
913     To set the total number of subdomain blocks rather than specify the
914     index sets, use the option
915 .    -pc_asm_local_blocks <blks> - Sets local blocks
916 
917     Notes:
918     The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
919 
920     By default the `PCASM` preconditioner uses 1 block per processor.
921 
922     Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
923 
924     If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated
925     back to form the global solution (this is the standard restricted additive Schwarz method)
926 
927     If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
928     no code to handle that case.
929 
930     Level: advanced
931 
932 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
933           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
934 @*/
935 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
936 {
937   PetscFunctionBegin;
938   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
939   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
940   PetscFunctionReturn(0);
941 }
942 
943 /*@C
944     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
945     additive Schwarz preconditioner, `PCASM`.
946 
947     Collective on pc, all MPI ranks must pass in the same array of `IS`
948 
949     Input Parameters:
950 +   pc - the preconditioner context
951 .   N  - the number of subdomains for all processors
952 .   is - the index sets that define the subdomains for all processors
953          (or NULL to ask PETSc to determine the subdomains)
954 -   is_local - the index sets that define the local part of the subdomains for this processor
955          (or NULL to not provide this information)
956 
957     Options Database Key:
958     To set the total number of subdomain blocks rather than specify the
959     index sets, use the option
960 .    -pc_asm_blocks <blks> - Sets total blocks
961 
962     Notes:
963     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
964 
965     By default the `PCASM` preconditioner uses 1 block per processor.
966 
967     These index sets cannot be destroyed until after completion of the
968     linear solves for which the `PCASM` preconditioner is being used.
969 
970     Use `PCASMSetLocalSubdomains()` to set local subdomains.
971 
972     The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
973 
974     Level: advanced
975 
976 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
977           `PCASMCreateSubdomains2D()`, `PCGASM`
978 @*/
979 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
980 {
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
983   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
984   PetscFunctionReturn(0);
985 }
986 
987 /*@
988     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
989     additive Schwarz preconditioner, `PCASM`.
990 
991     Logically Collective on pc
992 
993     Input Parameters:
994 +   pc  - the preconditioner context
995 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
996 
997     Options Database Key:
998 .   -pc_asm_overlap <ovl> - Sets overlap
999 
1000     Notes:
1001     By default the `PCASM` preconditioner uses 1 block per processor.  To use
1002     multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1003     `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1004 
1005     The overlap defaults to 1, so if one desires that no additional
1006     overlap be computed beyond what may have been set with a call to
1007     `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1008     must be set to be 0.  In particular, if one does not explicitly set
1009     the subdomains an application code, then all overlap would be computed
1010     internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1011     variant that is equivalent to the block Jacobi preconditioner.
1012 
1013     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1014     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1015 
1016     Note that one can define initial index sets with any overlap via
1017     `PCASMSetLocalSubdomains()`; the routine
1018     `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1019     if desired.
1020 
1021     Level: intermediate
1022 
1023 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1024           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1025 @*/
1026 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1027 {
1028   PetscFunctionBegin;
1029   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1030   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1031   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 /*@
1036     PCASMSetType - Sets the type of restriction and interpolation used
1037     for local problems in the additive Schwarz method, `PCASM`.
1038 
1039     Logically Collective on pc
1040 
1041     Input Parameters:
1042 +   pc  - the preconditioner context
1043 -   type - variant of `PCASM`, one of
1044 .vb
1045       PC_ASM_BASIC       - full interpolation and restriction
1046       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1047       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1048       PC_ASM_NONE        - local processor restriction and interpolation
1049 .ve
1050 
1051     Options Database Key:
1052 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1053 
1054     Note:
1055     if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1056     to limit the local processor interpolation
1057 
1058     Level: intermediate
1059 
1060 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1061           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1062 @*/
1063 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1064 {
1065   PetscFunctionBegin;
1066   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1067   PetscValidLogicalCollectiveEnum(pc, type, 2);
1068   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 /*@
1073     PCASMGetType - Gets the type of restriction and interpolation used
1074     for local problems in the additive Schwarz method, `PCASM`.
1075 
1076     Logically Collective on pc
1077 
1078     Input Parameter:
1079 .   pc  - the preconditioner context
1080 
1081     Output Parameter:
1082 .   type - variant of `PCASM`, one of
1083 
1084 .vb
1085       PC_ASM_BASIC       - full interpolation and restriction
1086       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1087       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1088       PC_ASM_NONE        - local processor restriction and interpolation
1089 .ve
1090 
1091     Options Database Key:
1092 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1093 
1094     Level: intermediate
1095 
1096 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1097           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1098 @*/
1099 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1100 {
1101   PetscFunctionBegin;
1102   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1103   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1104   PetscFunctionReturn(0);
1105 }
1106 
1107 /*@
1108   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1109 
1110   Logically Collective on pc
1111 
1112   Input Parameters:
1113 + pc  - the preconditioner context
1114 - type - type of composition, one of
1115 .vb
1116   PC_COMPOSITE_ADDITIVE       - local additive combination
1117   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1118 .ve
1119 
1120   Options Database Key:
1121 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1122 
1123   Level: intermediate
1124 
1125 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASM`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1126 @*/
1127 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1128 {
1129   PetscFunctionBegin;
1130   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1131   PetscValidLogicalCollectiveEnum(pc, type, 2);
1132   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1133   PetscFunctionReturn(0);
1134 }
1135 
1136 /*@
1137   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1138 
1139   Logically Collective on pc
1140 
1141   Input Parameter:
1142 . pc  - the preconditioner context
1143 
1144   Output Parameter:
1145 . type - type of composition, one of
1146 .vb
1147   PC_COMPOSITE_ADDITIVE       - local additive combination
1148   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1149 .ve
1150 
1151   Options Database Key:
1152 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1153 
1154   Level: intermediate
1155 
1156 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1157 @*/
1158 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1159 {
1160   PetscFunctionBegin;
1161   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1162   PetscValidPointer(type, 2);
1163   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1164   PetscFunctionReturn(0);
1165 }
1166 
1167 /*@
1168     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1169 
1170     Logically Collective on pc
1171 
1172     Input Parameters:
1173 +   pc  - the preconditioner context
1174 -   doSort - sort the subdomain indices
1175 
1176     Level: intermediate
1177 
1178 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1179           `PCASMCreateSubdomains2D()`
1180 @*/
1181 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1182 {
1183   PetscFunctionBegin;
1184   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1185   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1186   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1187   PetscFunctionReturn(0);
1188 }
1189 
1190 /*@C
1191    PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1192    this processor.
1193 
1194    Collective on pc iff first_local is requested
1195 
1196    Input Parameter:
1197 .  pc - the preconditioner context
1198 
1199    Output Parameters:
1200 +  n_local - the number of blocks on this processor or NULL
1201 .  first_local - the global number of the first block on this processor or NULL,
1202                  all processors must request or all must pass NULL
1203 -  ksp - the array of `KSP` contexts
1204 
1205    Notes:
1206    After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1207 
1208    You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1209 
1210    Fortran Note:
1211    The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.
1212 
1213    Level: advanced
1214 
1215 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1216           `PCASMCreateSubdomains2D()`,
1217 @*/
1218 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1219 {
1220   PetscFunctionBegin;
1221   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1222   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 /*MC
1227    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1228            its own `KSP` object.
1229 
1230    Options Database Keys:
1231 +  -pc_asm_blocks <blks> - Sets total blocks. Defaults to one block per MPI rank.
1232 .  -pc_asm_overlap <ovl> - Sets overlap
1233 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1234 -  -pc_asm_local_type [additive, multiplicative] - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1235 
1236    Level: beginner
1237 
1238    Notes:
1239    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1240    will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1241     -pc_asm_type basic to get the same convergence behavior
1242 
1243    Each processor can have one or more blocks, but a block cannot be shared by more
1244    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1245 
1246    To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC`
1247    options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1248 
1249    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1250    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1251 
1252     References:
1253 +   * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1254      Courant Institute, New York University Technical report
1255 -   * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1256     Cambridge University Press.
1257 
1258 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1259           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1260           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1261 M*/
1262 
1263 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1264 {
1265   PC_ASM *osm;
1266 
1267   PetscFunctionBegin;
1268   PetscCall(PetscNew(&osm));
1269 
1270   osm->n             = PETSC_DECIDE;
1271   osm->n_local       = 0;
1272   osm->n_local_true  = PETSC_DECIDE;
1273   osm->overlap       = 1;
1274   osm->ksp           = NULL;
1275   osm->restriction   = NULL;
1276   osm->lprolongation = NULL;
1277   osm->lrestriction  = NULL;
1278   osm->x             = NULL;
1279   osm->y             = NULL;
1280   osm->is            = NULL;
1281   osm->is_local      = NULL;
1282   osm->mat           = NULL;
1283   osm->pmat          = NULL;
1284   osm->type          = PC_ASM_RESTRICT;
1285   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1286   osm->sort_indices  = PETSC_TRUE;
1287   osm->dm_subdomains = PETSC_FALSE;
1288   osm->sub_mat_type  = NULL;
1289 
1290   pc->data                 = (void *)osm;
1291   pc->ops->apply           = PCApply_ASM;
1292   pc->ops->matapply        = PCMatApply_ASM;
1293   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1294   pc->ops->setup           = PCSetUp_ASM;
1295   pc->ops->reset           = PCReset_ASM;
1296   pc->ops->destroy         = PCDestroy_ASM;
1297   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1298   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1299   pc->ops->view            = PCView_ASM;
1300   pc->ops->applyrichardson = NULL;
1301 
1302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1310   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1311   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1312   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1313   PetscFunctionReturn(0);
1314 }
1315 
1316 /*@C
1317    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1318    preconditioner, `PCASM`,  for any problem on a general grid.
1319 
1320    Collective
1321 
1322    Input Parameters:
1323 +  A - The global matrix operator
1324 -  n - the number of local blocks
1325 
1326    Output Parameters:
1327 .  outis - the array of index sets defining the subdomains
1328 
1329    Level: advanced
1330 
1331    Note:
1332    This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1333    from these if you use `PCASMSetLocalSubdomains()`
1334 
1335    Fortran Note:
1336    You must provide the array outis[] already allocated of length n.
1337 
1338 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1339 @*/
1340 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1341 {
1342   MatPartitioning mpart;
1343   const char     *prefix;
1344   PetscInt        i, j, rstart, rend, bs;
1345   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1346   Mat             Ad = NULL, adj;
1347   IS              ispart, isnumb, *is;
1348 
1349   PetscFunctionBegin;
1350   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1351   PetscValidPointer(outis, 3);
1352   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1353 
1354   /* Get prefix, row distribution, and block size */
1355   PetscCall(MatGetOptionsPrefix(A, &prefix));
1356   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1357   PetscCall(MatGetBlockSize(A, &bs));
1358   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1359 
1360   /* Get diagonal block from matrix if possible */
1361   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1362   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1363   if (Ad) {
1364     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1365     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1366   }
1367   if (Ad && n > 1) {
1368     PetscBool match, done;
1369     /* Try to setup a good matrix partitioning if available */
1370     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1371     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1372     PetscCall(MatPartitioningSetFromOptions(mpart));
1373     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1374     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1375     if (!match) { /* assume a "good" partitioner is available */
1376       PetscInt        na;
1377       const PetscInt *ia, *ja;
1378       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1379       if (done) {
1380         /* Build adjacency matrix by hand. Unfortunately a call to
1381            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1382            remove the block-aij structure and we cannot expect
1383            MatPartitioning to split vertices as we need */
1384         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1385         const PetscInt *row;
1386         nnz = 0;
1387         for (i = 0; i < na; i++) { /* count number of nonzeros */
1388           len = ia[i + 1] - ia[i];
1389           row = ja + ia[i];
1390           for (j = 0; j < len; j++) {
1391             if (row[j] == i) { /* don't count diagonal */
1392               len--;
1393               break;
1394             }
1395           }
1396           nnz += len;
1397         }
1398         PetscCall(PetscMalloc1(na + 1, &iia));
1399         PetscCall(PetscMalloc1(nnz, &jja));
1400         nnz    = 0;
1401         iia[0] = 0;
1402         for (i = 0; i < na; i++) { /* fill adjacency */
1403           cnt = 0;
1404           len = ia[i + 1] - ia[i];
1405           row = ja + ia[i];
1406           for (j = 0; j < len; j++) {
1407             if (row[j] != i) { /* if not diagonal */
1408               jja[nnz + cnt++] = row[j];
1409             }
1410           }
1411           nnz += cnt;
1412           iia[i + 1] = nnz;
1413         }
1414         /* Partitioning of the adjacency matrix */
1415         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1416         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1417         PetscCall(MatPartitioningSetNParts(mpart, n));
1418         PetscCall(MatPartitioningApply(mpart, &ispart));
1419         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1420         PetscCall(MatDestroy(&adj));
1421         foundpart = PETSC_TRUE;
1422       }
1423       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1424     }
1425     PetscCall(MatPartitioningDestroy(&mpart));
1426   }
1427 
1428   PetscCall(PetscMalloc1(n, &is));
1429   *outis = is;
1430 
1431   if (!foundpart) {
1432     /* Partitioning by contiguous chunks of rows */
1433 
1434     PetscInt mbs   = (rend - rstart) / bs;
1435     PetscInt start = rstart;
1436     for (i = 0; i < n; i++) {
1437       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1438       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1439       start += count;
1440     }
1441 
1442   } else {
1443     /* Partitioning by adjacency of diagonal block  */
1444 
1445     const PetscInt *numbering;
1446     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1447     /* Get node count in each partition */
1448     PetscCall(PetscMalloc1(n, &count));
1449     PetscCall(ISPartitioningCount(ispart, n, count));
1450     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1451       for (i = 0; i < n; i++) count[i] *= bs;
1452     }
1453     /* Build indices from node numbering */
1454     PetscCall(ISGetLocalSize(isnumb, &nidx));
1455     PetscCall(PetscMalloc1(nidx, &indices));
1456     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1457     PetscCall(ISGetIndices(isnumb, &numbering));
1458     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1459     PetscCall(ISRestoreIndices(isnumb, &numbering));
1460     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1461       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1462       for (i = 0; i < nidx; i++) {
1463         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1464       }
1465       PetscCall(PetscFree(indices));
1466       nidx *= bs;
1467       indices = newidx;
1468     }
1469     /* Shift to get global indices */
1470     for (i = 0; i < nidx; i++) indices[i] += rstart;
1471 
1472     /* Build the index sets for each block */
1473     for (i = 0; i < n; i++) {
1474       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1475       PetscCall(ISSort(is[i]));
1476       start += count[i];
1477     }
1478 
1479     PetscCall(PetscFree(count));
1480     PetscCall(PetscFree(indices));
1481     PetscCall(ISDestroy(&isnumb));
1482     PetscCall(ISDestroy(&ispart));
1483   }
1484   PetscFunctionReturn(0);
1485 }
1486 
1487 /*@C
1488    PCASMDestroySubdomains - Destroys the index sets created with
1489    `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1490 
1491    Collective
1492 
1493    Input Parameters:
1494 +  n - the number of index sets
1495 .  is - the array of index sets
1496 -  is_local - the array of local index sets, can be NULL
1497 
1498    Level: advanced
1499 
1500 .seealso: `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1501 @*/
1502 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1503 {
1504   PetscInt i;
1505 
1506   PetscFunctionBegin;
1507   if (n <= 0) PetscFunctionReturn(0);
1508   if (is) {
1509     PetscValidPointer(is, 2);
1510     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1511     PetscCall(PetscFree(is));
1512   }
1513   if (is_local) {
1514     PetscValidPointer(is_local, 3);
1515     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1516     PetscCall(PetscFree(is_local));
1517   }
1518   PetscFunctionReturn(0);
1519 }
1520 
1521 /*@
1522    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1523    preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1524 
1525    Not Collective
1526 
1527    Input Parameters:
1528 +  m   - the number of mesh points in the x direction
1529 .  n   - the number of mesh points in the y direction
1530 .  M   - the number of subdomains in the x direction
1531 .  N   - the number of subdomains in the y direction
1532 .  dof - degrees of freedom per node
1533 -  overlap - overlap in mesh lines
1534 
1535    Output Parameters:
1536 +  Nsub - the number of subdomains created
1537 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1538 -  is_local - array of index sets defining non-overlapping subdomains
1539 
1540    Note:
1541    Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1542    preconditioners.  More general related routines are
1543    `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1544 
1545    Level: advanced
1546 
1547 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1548           `PCASMSetOverlap()`
1549 @*/
1550 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local)
1551 {
1552   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1553   PetscInt nidx, *idx, loc, ii, jj, count;
1554 
1555   PetscFunctionBegin;
1556   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1557 
1558   *Nsub = N * M;
1559   PetscCall(PetscMalloc1(*Nsub, is));
1560   PetscCall(PetscMalloc1(*Nsub, is_local));
1561   ystart    = 0;
1562   loc_outer = 0;
1563   for (i = 0; i < N; i++) {
1564     height = n / N + ((n % N) > i); /* height of subdomain */
1565     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1566     yleft = ystart - overlap;
1567     if (yleft < 0) yleft = 0;
1568     yright = ystart + height + overlap;
1569     if (yright > n) yright = n;
1570     xstart = 0;
1571     for (j = 0; j < M; j++) {
1572       width = m / M + ((m % M) > j); /* width of subdomain */
1573       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1574       xleft = xstart - overlap;
1575       if (xleft < 0) xleft = 0;
1576       xright = xstart + width + overlap;
1577       if (xright > m) xright = m;
1578       nidx = (xright - xleft) * (yright - yleft);
1579       PetscCall(PetscMalloc1(nidx, &idx));
1580       loc = 0;
1581       for (ii = yleft; ii < yright; ii++) {
1582         count = m * ii + xleft;
1583         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1584       }
1585       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1586       if (overlap == 0) {
1587         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1588 
1589         (*is_local)[loc_outer] = (*is)[loc_outer];
1590       } else {
1591         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1592           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1593         }
1594         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1595       }
1596       PetscCall(PetscFree(idx));
1597       xstart += width;
1598       loc_outer++;
1599     }
1600     ystart += height;
1601   }
1602   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1603   PetscFunctionReturn(0);
1604 }
1605 
1606 /*@C
1607     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1608     only) for the additive Schwarz preconditioner, `PCASM`.
1609 
1610     Not Collective
1611 
1612     Input Parameter:
1613 .   pc - the preconditioner context
1614 
1615     Output Parameters:
1616 +   n - if requested, the number of subdomains for this processor (default value = 1)
1617 .   is - if requested, the index sets that define the subdomains for this processor
1618 -   is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be NULL)
1619 
1620     Note:
1621     The `IS` numbering is in the parallel, global numbering of the vector.
1622 
1623     Level: advanced
1624 
1625 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1626           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1627 @*/
1628 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1629 {
1630   PC_ASM   *osm = (PC_ASM *)pc->data;
1631   PetscBool match;
1632 
1633   PetscFunctionBegin;
1634   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1635   if (n) PetscValidIntPointer(n, 2);
1636   if (is) PetscValidPointer(is, 3);
1637   if (is_local) PetscValidPointer(is_local, 4);
1638   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1639   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1640   if (n) *n = osm->n_local_true;
1641   if (is) *is = osm->is;
1642   if (is_local) *is_local = osm->is_local;
1643   PetscFunctionReturn(0);
1644 }
1645 
1646 /*@C
1647     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1648     only) for the additive Schwarz preconditioner, `PCASM`.
1649 
1650     Not Collective
1651 
1652     Input Parameter:
1653 .   pc - the preconditioner context
1654 
1655     Output Parameters:
1656 +   n - if requested, the number of matrices for this processor (default value = 1)
1657 -   mat - if requested, the matrices
1658 
1659     Level: advanced
1660 
1661     Notes:
1662     Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1663 
1664     Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1665 
1666 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1667           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1668 @*/
1669 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1670 {
1671   PC_ASM   *osm;
1672   PetscBool match;
1673 
1674   PetscFunctionBegin;
1675   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1676   if (n) PetscValidIntPointer(n, 2);
1677   if (mat) PetscValidPointer(mat, 3);
1678   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1679   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1680   if (!match) {
1681     if (n) *n = 0;
1682     if (mat) *mat = NULL;
1683   } else {
1684     osm = (PC_ASM *)pc->data;
1685     if (n) *n = osm->n_local_true;
1686     if (mat) *mat = osm->pmat;
1687   }
1688   PetscFunctionReturn(0);
1689 }
1690 
1691 /*@
1692     PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1693 
1694     Logically Collective
1695 
1696     Input Parameters:
1697 +   pc  - the preconditioner
1698 -   flg - boolean indicating whether to use subdomains defined by the `DM`
1699 
1700     Options Database Key:
1701 .   -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM`
1702 
1703     Level: intermediate
1704 
1705     Note:
1706     `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1707     so setting either of the first two effectively turns the latter off.
1708 
1709 .seealso: `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1710           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1711 @*/
1712 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1713 {
1714   PC_ASM   *osm = (PC_ASM *)pc->data;
1715   PetscBool match;
1716 
1717   PetscFunctionBegin;
1718   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1719   PetscValidLogicalCollectiveBool(pc, flg, 2);
1720   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1721   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1722   if (match) osm->dm_subdomains = flg;
1723   PetscFunctionReturn(0);
1724 }
1725 
1726 /*@
1727     PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1728 
1729     Not Collective
1730 
1731     Input Parameter:
1732 .   pc  - the preconditioner
1733 
1734     Output Parameter:
1735 .   flg - boolean indicating whether to use subdomains defined by the DM
1736 
1737     Level: intermediate
1738 
1739 .seealso: `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1740           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1741 @*/
1742 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1743 {
1744   PC_ASM   *osm = (PC_ASM *)pc->data;
1745   PetscBool match;
1746 
1747   PetscFunctionBegin;
1748   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1749   PetscValidBoolPointer(flg, 2);
1750   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1751   if (match) *flg = osm->dm_subdomains;
1752   else *flg = PETSC_FALSE;
1753   PetscFunctionReturn(0);
1754 }
1755 
1756 /*@
1757      PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1758 
1759    Not Collective
1760 
1761    Input Parameter:
1762 .  pc - the `PC`
1763 
1764    Output Parameter:
1765 .  pc_asm_sub_mat_type - name of matrix type
1766 
1767    Level: advanced
1768 
1769 .seealso: `PCASM`, `PCASMSetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1770 @*/
1771 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1772 {
1773   PetscFunctionBegin;
1774   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1775   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1776   PetscFunctionReturn(0);
1777 }
1778 
1779 /*@
1780      PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1781 
1782    Collective on pc
1783 
1784    Input Parameters:
1785 +  pc             - the `PC` object
1786 -  sub_mat_type   - the `MatType`
1787 
1788    Options Database Key:
1789 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1790    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1791 
1792    Note:
1793    See "${PETSC_DIR}/include/petscmat.h" for available types
1794 
1795   Level: advanced
1796 
1797 .seealso: `PCASM`, `PCASMGetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1798 @*/
1799 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1800 {
1801   PetscFunctionBegin;
1802   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1803   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1804   PetscFunctionReturn(0);
1805 }
1806