xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision e0f629dd96229b85b8d4fbf4a5ed4f197d232daa)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include "petsc/private/matimpl.h"
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer) {
17   PC_ASM           *osm = (PC_ASM *)pc->data;
18   PetscMPIInt       rank;
19   PetscInt          i, bsz;
20   PetscBool         iascii, isstring;
21   PetscViewer       sviewer;
22   PetscViewerFormat format;
23   const char       *prefix;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
28   if (iascii) {
29     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
30     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
31     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
32     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
34     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
35     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
36     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
37     PetscCall(PetscViewerGetFormat(viewer, &format));
38     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
39       if (osm->ksp) {
40         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
41         PetscCall(PCGetOptionsPrefix(pc, &prefix));
42         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
43         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
44         if (rank == 0) {
45           PetscCall(PetscViewerASCIIPushTab(viewer));
46           PetscCall(KSPView(osm->ksp[0], sviewer));
47           PetscCall(PetscViewerASCIIPopTab(viewer));
48         }
49         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
50       }
51     } else {
52       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
53       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
54       PetscCall(PetscViewerFlush(viewer));
55       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
56       PetscCall(PetscViewerASCIIPushTab(viewer));
57       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
58       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
59       for (i = 0; i < osm->n_local_true; i++) {
60         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
61         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
62         PetscCall(KSPView(osm->ksp[i], sviewer));
63         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
64       }
65       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
66       PetscCall(PetscViewerASCIIPopTab(viewer));
67       PetscCall(PetscViewerFlush(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
75   }
76   PetscFunctionReturn(0);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc) {
80   PC_ASM         *osm = (PC_ASM *)pc->data;
81   const char     *prefix;
82   char            fname[PETSC_MAX_PATH_LEN + 1];
83   PetscViewer     viewer, sviewer;
84   char           *s;
85   PetscInt        i, j, nidx;
86   const PetscInt *idx;
87   PetscMPIInt     rank, size;
88 
89   PetscFunctionBegin;
90   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
91   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
92   PetscCall(PCGetOptionsPrefix(pc, &prefix));
93   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
94   if (fname[0] == 0) PetscCall(PetscStrcpy(fname, "stdout"));
95   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
96   for (i = 0; i < osm->n_local; i++) {
97     if (i < osm->n_local_true) {
98       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
99       PetscCall(ISGetIndices(osm->is[i], &idx));
100       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
101 #define len 16 * (nidx + 1) + 512
102       PetscCall(PetscMalloc1(len, &s));
103       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
104 #undef len
105       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
106       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
107       PetscCall(ISRestoreIndices(osm->is[i], &idx));
108       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
109       PetscCall(PetscViewerDestroy(&sviewer));
110       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
111       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
112       PetscCall(PetscViewerFlush(viewer));
113       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
114       PetscCall(PetscFree(s));
115       if (osm->is_local) {
116         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
117 #define len 16 * (nidx + 1) + 512
118         PetscCall(PetscMalloc1(len, &s));
119         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
120 #undef len
121         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
122         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
123         PetscCall(ISGetIndices(osm->is_local[i], &idx));
124         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
125         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
126         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
127         PetscCall(PetscViewerDestroy(&sviewer));
128         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
129         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
130         PetscCall(PetscViewerFlush(viewer));
131         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
132         PetscCall(PetscFree(s));
133       }
134     } else {
135       /* Participate in collective viewer calls. */
136       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
137       PetscCall(PetscViewerFlush(viewer));
138       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
139       /* Assume either all ranks have is_local or none do. */
140       if (osm->is_local) {
141         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
142         PetscCall(PetscViewerFlush(viewer));
143         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
144       }
145     }
146   }
147   PetscCall(PetscViewerFlush(viewer));
148   PetscCall(PetscViewerDestroy(&viewer));
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode PCSetUp_ASM(PC pc) {
153   PC_ASM     *osm = (PC_ASM *)pc->data;
154   PetscBool   flg;
155   PetscInt    i, m, m_local;
156   MatReuse    scall = MAT_REUSE_MATRIX;
157   IS          isl;
158   KSP         ksp;
159   PC          subpc;
160   const char *prefix, *pprefix;
161   Vec         vec;
162   DM         *domain_dm = NULL;
163 
164   PetscFunctionBegin;
165   if (!pc->setupcalled) {
166     PetscInt m;
167 
168     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
169     if (osm->n_local_true == PETSC_DECIDE) {
170       /* no subdomains given */
171       /* try pc->dm first, if allowed */
172       if (osm->dm_subdomains && pc->dm) {
173         PetscInt num_domains, d;
174         char   **domain_names;
175         IS      *inner_domain_is, *outer_domain_is;
176         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
177         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
178                               A future improvement of this code might allow one to use
179                               DM-defined subdomains and also increase the overlap,
180                               but that is not currently supported */
181         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
182         for (d = 0; d < num_domains; ++d) {
183           if (domain_names) PetscCall(PetscFree(domain_names[d]));
184           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
185           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
186         }
187         PetscCall(PetscFree(domain_names));
188         PetscCall(PetscFree(inner_domain_is));
189         PetscCall(PetscFree(outer_domain_is));
190       }
191       if (osm->n_local_true == PETSC_DECIDE) {
192         /* still no subdomains; use one subdomain per processor */
193         osm->n_local_true = 1;
194       }
195     }
196     { /* determine the global and max number of subdomains */
197       struct {
198         PetscInt max, sum;
199       } inwork, outwork;
200       PetscMPIInt size;
201 
202       inwork.max = osm->n_local_true;
203       inwork.sum = osm->n_local_true;
204       PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
205       osm->n_local = outwork.max;
206       osm->n       = outwork.sum;
207 
208       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
209       if (outwork.max == 1 && outwork.sum == size) {
210         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
211         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
212       }
213     }
214     if (!osm->is) { /* create the index sets */
215       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
216     }
217     if (osm->n_local_true > 1 && !osm->is_local) {
218       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
219       for (i = 0; i < osm->n_local_true; i++) {
220         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
221           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
222           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
223         } else {
224           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
225           osm->is_local[i] = osm->is[i];
226         }
227       }
228     }
229     PetscCall(PCGetOptionsPrefix(pc, &prefix));
230     if (osm->overlap > 0) {
231       /* Extend the "overlapping" regions by a number of steps */
232       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
233     }
234     if (osm->sort_indices) {
235       for (i = 0; i < osm->n_local_true; i++) {
236         PetscCall(ISSort(osm->is[i]));
237         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
238       }
239     }
240     flg = PETSC_FALSE;
241     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
242     if (flg) PetscCall(PCASMPrintSubdomains(pc));
243     if (!osm->ksp) {
244       /* Create the local solvers */
245       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
246       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
247       for (i = 0; i < osm->n_local_true; i++) {
248         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
249         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
250         PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)ksp));
251         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
252         PetscCall(KSPSetType(ksp, KSPPREONLY));
253         PetscCall(KSPGetPC(ksp, &subpc));
254         PetscCall(PCGetOptionsPrefix(pc, &prefix));
255         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
256         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
257         if (domain_dm) {
258           PetscCall(KSPSetDM(ksp, domain_dm[i]));
259           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
260           PetscCall(DMDestroy(&domain_dm[i]));
261         }
262         osm->ksp[i] = ksp;
263       }
264       if (domain_dm) PetscCall(PetscFree(domain_dm));
265     }
266 
267     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
268     PetscCall(ISSortRemoveDups(osm->lis));
269     PetscCall(ISGetLocalSize(osm->lis, &m));
270 
271     scall = MAT_INITIAL_MATRIX;
272   } else {
273     /*
274        Destroy the blocks from the previous iteration
275     */
276     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
277       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
278       scall = MAT_INITIAL_MATRIX;
279     }
280   }
281 
282   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
283   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
284     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
285     scall = MAT_INITIAL_MATRIX;
286   }
287 
288   /*
289      Extract out the submatrices
290   */
291   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
292   if (scall == MAT_INITIAL_MATRIX) {
293     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
294     for (i = 0; i < osm->n_local_true; i++) {
295       PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)osm->pmat[i]));
296       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
297     }
298   }
299 
300   /* Convert the types of the submatrices (if needbe) */
301   if (osm->sub_mat_type) {
302     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &(osm->pmat[i])));
303   }
304 
305   if (!pc->setupcalled) {
306     VecType vtype;
307 
308     /* Create the local work vectors (from the local matrices) and scatter contexts */
309     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
310 
311     PetscCheck(!osm->is_local || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
312     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
313     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
314     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
315     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
316 
317     PetscCall(ISGetLocalSize(osm->lis, &m));
318     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
319     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
320     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
321     PetscCall(VecSetSizes(osm->lx, m, m));
322     PetscCall(VecSetType(osm->lx, vtype));
323     PetscCall(VecDuplicate(osm->lx, &osm->ly));
324     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
325     PetscCall(ISDestroy(&isl));
326 
327     for (i = 0; i < osm->n_local_true; ++i) {
328       ISLocalToGlobalMapping ltog;
329       IS                     isll;
330       const PetscInt        *idx_is;
331       PetscInt              *idx_lis, nout;
332 
333       PetscCall(ISGetLocalSize(osm->is[i], &m));
334       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
335       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
336 
337       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
338       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
339       PetscCall(ISGetLocalSize(osm->is[i], &m));
340       PetscCall(ISGetIndices(osm->is[i], &idx_is));
341       PetscCall(PetscMalloc1(m, &idx_lis));
342       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
343       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
344       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
345       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
346       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
347       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
348       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
349       PetscCall(ISDestroy(&isll));
350       PetscCall(ISDestroy(&isl));
351       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
352         ISLocalToGlobalMapping ltog;
353         IS                     isll, isll_local;
354         const PetscInt        *idx_local;
355         PetscInt              *idx1, *idx2, nout;
356 
357         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
358         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
359 
360         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
361         PetscCall(PetscMalloc1(m_local, &idx1));
362         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
363         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
364         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
365         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
366 
367         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
368         PetscCall(PetscMalloc1(m_local, &idx2));
369         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
370         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
371         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
372         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
373 
374         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
375         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
376 
377         PetscCall(ISDestroy(&isll));
378         PetscCall(ISDestroy(&isll_local));
379       }
380     }
381     PetscCall(VecDestroy(&vec));
382   }
383 
384   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
385     IS      *cis;
386     PetscInt c;
387 
388     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
389     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
390     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
391     PetscCall(PetscFree(cis));
392   }
393 
394   /* Return control to the user so that the submatrices can be modified (e.g., to apply
395      different boundary conditions for the submatrices than for the global problem) */
396   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
397 
398   /*
399      Loop over subdomains putting them into local ksp
400   */
401   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
402   for (i = 0; i < osm->n_local_true; i++) {
403     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
404     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
405     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
406   }
407   PetscFunctionReturn(0);
408 }
409 
410 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc) {
411   PC_ASM            *osm = (PC_ASM *)pc->data;
412   PetscInt           i;
413   KSPConvergedReason reason;
414 
415   PetscFunctionBegin;
416   for (i = 0; i < osm->n_local_true; i++) {
417     PetscCall(KSPSetUp(osm->ksp[i]));
418     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
419     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
420   }
421   PetscFunctionReturn(0);
422 }
423 
424 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y) {
425   PC_ASM     *osm = (PC_ASM *)pc->data;
426   PetscInt    i, n_local_true = osm->n_local_true;
427   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
428 
429   PetscFunctionBegin;
430   /*
431      support for limiting the restriction or interpolation to only local
432      subdomain values (leaving the other values 0).
433   */
434   if (!(osm->type & PC_ASM_RESTRICT)) {
435     forward = SCATTER_FORWARD_LOCAL;
436     /* have to zero the work RHS since scatter may leave some slots empty */
437     PetscCall(VecSet(osm->lx, 0.0));
438   }
439   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
440 
441   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
442     /* zero the global and the local solutions */
443     PetscCall(VecSet(y, 0.0));
444     PetscCall(VecSet(osm->ly, 0.0));
445 
446     /* copy the global RHS to local RHS including the ghost nodes */
447     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
448     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
449 
450     /* restrict local RHS to the overlapping 0-block RHS */
451     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
452     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
453 
454     /* do the local solves */
455     for (i = 0; i < n_local_true; ++i) {
456       /* solve the overlapping i-block */
457       PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
458       PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
459       PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
460       PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
461 
462       if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
463         PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
464         PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
465       } else { /* interpolate the overlapping i-block solution to the local solution */
466         PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
467         PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
468       }
469 
470       if (i < n_local_true - 1) {
471         /* restrict local RHS to the overlapping (i+1)-block RHS */
472         PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
473         PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
474 
475         if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
476           /* update the overlapping (i+1)-block RHS using the current local solution */
477           PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
478           PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
479         }
480       }
481     }
482     /* add the local solution to the global solution including the ghost nodes */
483     PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
484     PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
485   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
486   PetscFunctionReturn(0);
487 }
488 
489 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y) {
490   PC_ASM     *osm = (PC_ASM *)pc->data;
491   Mat         Z, W;
492   Vec         x;
493   PetscInt    i, m, N;
494   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
495 
496   PetscFunctionBegin;
497   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
498   /*
499      support for limiting the restriction or interpolation to only local
500      subdomain values (leaving the other values 0).
501   */
502   if (!(osm->type & PC_ASM_RESTRICT)) {
503     forward = SCATTER_FORWARD_LOCAL;
504     /* have to zero the work RHS since scatter may leave some slots empty */
505     PetscCall(VecSet(osm->lx, 0.0));
506   }
507   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
508   PetscCall(VecGetLocalSize(osm->x[0], &m));
509   PetscCall(MatGetSize(X, NULL, &N));
510   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
511   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
512     /* zero the global and the local solutions */
513     PetscCall(MatZeroEntries(Y));
514     PetscCall(VecSet(osm->ly, 0.0));
515 
516     for (i = 0; i < N; ++i) {
517       PetscCall(MatDenseGetColumnVecRead(X, i, &x));
518       /* copy the global RHS to local RHS including the ghost nodes */
519       PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
520       PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
521       PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
522 
523       PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
524       /* restrict local RHS to the overlapping 0-block RHS */
525       PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
526       PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
527       PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
528     }
529     PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
530     /* solve the overlapping 0-block */
531     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
532     PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
533     PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
534     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
535     PetscCall(MatDestroy(&Z));
536 
537     for (i = 0; i < N; ++i) {
538       PetscCall(VecSet(osm->ly, 0.0));
539       PetscCall(MatDenseGetColumnVecRead(W, i, &x));
540       if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
541         PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
542         PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
543       } else { /* interpolate the overlapping 0-block solution to the local solution */
544         PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
545         PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
546       }
547       PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
548 
549       PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
550       /* add the local solution to the global solution including the ghost nodes */
551       PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
552       PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
553       PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
554     }
555     PetscCall(MatDestroy(&W));
556   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
557   PetscFunctionReturn(0);
558 }
559 
560 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y) {
561   PC_ASM     *osm = (PC_ASM *)pc->data;
562   PetscInt    i, n_local_true = osm->n_local_true;
563   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
564 
565   PetscFunctionBegin;
566   /*
567      Support for limiting the restriction or interpolation to only local
568      subdomain values (leaving the other values 0).
569 
570      Note: these are reversed from the PCApply_ASM() because we are applying the
571      transpose of the three terms
572   */
573 
574   if (!(osm->type & PC_ASM_INTERPOLATE)) {
575     forward = SCATTER_FORWARD_LOCAL;
576     /* have to zero the work RHS since scatter may leave some slots empty */
577     PetscCall(VecSet(osm->lx, 0.0));
578   }
579   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
580 
581   /* zero the global and the local solutions */
582   PetscCall(VecSet(y, 0.0));
583   PetscCall(VecSet(osm->ly, 0.0));
584 
585   /* Copy the global RHS to local RHS including the ghost nodes */
586   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
587   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
588 
589   /* Restrict local RHS to the overlapping 0-block RHS */
590   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
591   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
592 
593   /* do the local solves */
594   for (i = 0; i < n_local_true; ++i) {
595     /* solve the overlapping i-block */
596     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
597     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
598     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
599     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
600 
601     if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
602       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
603       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
604     } else { /* interpolate the overlapping i-block solution to the local solution */
605       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
606       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
607     }
608 
609     if (i < n_local_true - 1) {
610       /* Restrict local RHS to the overlapping (i+1)-block RHS */
611       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
612       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
613     }
614   }
615   /* Add the local solution to the global solution including the ghost nodes */
616   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
617   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
618   PetscFunctionReturn(0);
619 }
620 
621 static PetscErrorCode PCReset_ASM(PC pc) {
622   PC_ASM  *osm = (PC_ASM *)pc->data;
623   PetscInt i;
624 
625   PetscFunctionBegin;
626   if (osm->ksp) {
627     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
628   }
629   if (osm->pmat) {
630     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
631   }
632   if (osm->lrestriction) {
633     PetscCall(VecScatterDestroy(&osm->restriction));
634     for (i = 0; i < osm->n_local_true; i++) {
635       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
636       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
637       PetscCall(VecDestroy(&osm->x[i]));
638       PetscCall(VecDestroy(&osm->y[i]));
639     }
640     PetscCall(PetscFree(osm->lrestriction));
641     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
642     PetscCall(PetscFree(osm->x));
643     PetscCall(PetscFree(osm->y));
644   }
645   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
646   PetscCall(ISDestroy(&osm->lis));
647   PetscCall(VecDestroy(&osm->lx));
648   PetscCall(VecDestroy(&osm->ly));
649   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
650 
651   PetscCall(PetscFree(osm->sub_mat_type));
652 
653   osm->is       = NULL;
654   osm->is_local = NULL;
655   PetscFunctionReturn(0);
656 }
657 
658 static PetscErrorCode PCDestroy_ASM(PC pc) {
659   PC_ASM  *osm = (PC_ASM *)pc->data;
660   PetscInt i;
661 
662   PetscFunctionBegin;
663   PetscCall(PCReset_ASM(pc));
664   if (osm->ksp) {
665     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
666     PetscCall(PetscFree(osm->ksp));
667   }
668   PetscCall(PetscFree(pc->data));
669 
670   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
671   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
672   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
673   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
674   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
675   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
676   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
677   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
681   PetscFunctionReturn(0);
682 }
683 
684 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject) {
685   PC_ASM         *osm = (PC_ASM *)pc->data;
686   PetscInt        blocks, ovl;
687   PetscBool       flg;
688   PCASMType       asmtype;
689   PCCompositeType loctype;
690   char            sub_mat_type[256];
691 
692   PetscFunctionBegin;
693   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
694   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
695   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
696   if (flg) {
697     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
698     osm->dm_subdomains = PETSC_FALSE;
699   }
700   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
701   if (flg) {
702     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
703     osm->dm_subdomains = PETSC_FALSE;
704   }
705   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
706   if (flg) {
707     PetscCall(PCASMSetOverlap(pc, ovl));
708     osm->dm_subdomains = PETSC_FALSE;
709   }
710   flg = PETSC_FALSE;
711   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
712   if (flg) PetscCall(PCASMSetType(pc, asmtype));
713   flg = PETSC_FALSE;
714   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
715   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
716   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
717   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
718   PetscOptionsHeadEnd();
719   PetscFunctionReturn(0);
720 }
721 
722 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[]) {
723   PC_ASM  *osm = (PC_ASM *)pc->data;
724   PetscInt i;
725 
726   PetscFunctionBegin;
727   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
728   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
729 
730   if (!pc->setupcalled) {
731     if (is) {
732       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
733     }
734     if (is_local) {
735       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
736     }
737     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
738 
739     osm->n_local_true = n;
740     osm->is           = NULL;
741     osm->is_local     = NULL;
742     if (is) {
743       PetscCall(PetscMalloc1(n, &osm->is));
744       for (i = 0; i < n; i++) osm->is[i] = is[i];
745       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
746       osm->overlap = -1;
747     }
748     if (is_local) {
749       PetscCall(PetscMalloc1(n, &osm->is_local));
750       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
751       if (!is) {
752         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
753         for (i = 0; i < osm->n_local_true; i++) {
754           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
755             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
756             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
757           } else {
758             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
759             osm->is[i] = osm->is_local[i];
760           }
761         }
762       }
763     }
764   }
765   PetscFunctionReturn(0);
766 }
767 
768 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local) {
769   PC_ASM     *osm = (PC_ASM *)pc->data;
770   PetscMPIInt rank, size;
771   PetscInt    n;
772 
773   PetscFunctionBegin;
774   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
775   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
776 
777   /*
778      Split the subdomains equally among all processors
779   */
780   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
781   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
782   n = N / size + ((N % size) > rank);
783   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
784   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
785   if (!pc->setupcalled) {
786     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
787 
788     osm->n_local_true = n;
789     osm->is           = NULL;
790     osm->is_local     = NULL;
791   }
792   PetscFunctionReturn(0);
793 }
794 
795 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl) {
796   PC_ASM *osm = (PC_ASM *)pc->data;
797 
798   PetscFunctionBegin;
799   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
800   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
801   if (!pc->setupcalled) osm->overlap = ovl;
802   PetscFunctionReturn(0);
803 }
804 
805 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type) {
806   PC_ASM *osm = (PC_ASM *)pc->data;
807 
808   PetscFunctionBegin;
809   osm->type     = type;
810   osm->type_set = PETSC_TRUE;
811   PetscFunctionReturn(0);
812 }
813 
814 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type) {
815   PC_ASM *osm = (PC_ASM *)pc->data;
816 
817   PetscFunctionBegin;
818   *type = osm->type;
819   PetscFunctionReturn(0);
820 }
821 
822 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type) {
823   PC_ASM *osm = (PC_ASM *)pc->data;
824 
825   PetscFunctionBegin;
826   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
827   osm->loctype = type;
828   PetscFunctionReturn(0);
829 }
830 
831 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type) {
832   PC_ASM *osm = (PC_ASM *)pc->data;
833 
834   PetscFunctionBegin;
835   *type = osm->loctype;
836   PetscFunctionReturn(0);
837 }
838 
839 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort) {
840   PC_ASM *osm = (PC_ASM *)pc->data;
841 
842   PetscFunctionBegin;
843   osm->sort_indices = doSort;
844   PetscFunctionReturn(0);
845 }
846 
847 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp) {
848   PC_ASM *osm = (PC_ASM *)pc->data;
849 
850   PetscFunctionBegin;
851   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
852 
853   if (n_local) *n_local = osm->n_local_true;
854   if (first_local) {
855     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
856     *first_local -= osm->n_local_true;
857   }
858   if (ksp) *ksp = osm->ksp;
859   PetscFunctionReturn(0);
860 }
861 
862 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type) {
863   PC_ASM *osm = (PC_ASM *)pc->data;
864 
865   PetscFunctionBegin;
866   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
867   PetscValidPointer(sub_mat_type, 2);
868   *sub_mat_type = osm->sub_mat_type;
869   PetscFunctionReturn(0);
870 }
871 
872 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type) {
873   PC_ASM *osm = (PC_ASM *)pc->data;
874 
875   PetscFunctionBegin;
876   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
877   PetscCall(PetscFree(osm->sub_mat_type));
878   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
879   PetscFunctionReturn(0);
880 }
881 
882 /*@C
883     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
884 
885     Collective on pc
886 
887     Input Parameters:
888 +   pc - the preconditioner context
889 .   n - the number of subdomains for this processor (default value = 1)
890 .   is - the index set that defines the subdomains for this processor
891          (or NULL for PETSc to determine subdomains)
892 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
893          (or NULL to not provide these)
894 
895     Options Database Key:
896     To set the total number of subdomain blocks rather than specify the
897     index sets, use the option
898 .    -pc_asm_local_blocks <blks> - Sets local blocks
899 
900     Notes:
901     The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
902 
903     By default the `PCASM` preconditioner uses 1 block per processor.
904 
905     Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
906 
907     If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated
908     back to form the global solution (this is the standard restricted additive Schwarz method)
909 
910     If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
911     no code to handle that case.
912 
913     Level: advanced
914 
915 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
916           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
917 @*/
918 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[]) {
919   PetscFunctionBegin;
920   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
921   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
922   PetscFunctionReturn(0);
923 }
924 
925 /*@C
926     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
927     additive Schwarz preconditioner, `PCASM`.
928 
929     Collective on pc, all MPI ranks must pass in the same array of `IS`
930 
931     Input Parameters:
932 +   pc - the preconditioner context
933 .   N  - the number of subdomains for all processors
934 .   is - the index sets that define the subdomains for all processors
935          (or NULL to ask PETSc to determine the subdomains)
936 -   is_local - the index sets that define the local part of the subdomains for this processor
937          (or NULL to not provide this information)
938 
939     Options Database Key:
940     To set the total number of subdomain blocks rather than specify the
941     index sets, use the option
942 .    -pc_asm_blocks <blks> - Sets total blocks
943 
944     Notes:
945     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
946 
947     By default the `PCASM` preconditioner uses 1 block per processor.
948 
949     These index sets cannot be destroyed until after completion of the
950     linear solves for which the `PCASM` preconditioner is being used.
951 
952     Use `PCASMSetLocalSubdomains()` to set local subdomains.
953 
954     The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
955 
956     Level: advanced
957 
958 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
959           `PCASMCreateSubdomains2D()`, `PCGASM`
960 @*/
961 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[]) {
962   PetscFunctionBegin;
963   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
964   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
965   PetscFunctionReturn(0);
966 }
967 
968 /*@
969     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
970     additive Schwarz preconditioner, `PCASM`.
971 
972     Logically Collective on pc
973 
974     Input Parameters:
975 +   pc  - the preconditioner context
976 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
977 
978     Options Database Key:
979 .   -pc_asm_overlap <ovl> - Sets overlap
980 
981     Notes:
982     By default the `PCASM` preconditioner uses 1 block per processor.  To use
983     multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
984     `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
985 
986     The overlap defaults to 1, so if one desires that no additional
987     overlap be computed beyond what may have been set with a call to
988     `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
989     must be set to be 0.  In particular, if one does not explicitly set
990     the subdomains an application code, then all overlap would be computed
991     internally by PETSc, and using an overlap of 0 would result in an `PCASM`
992     variant that is equivalent to the block Jacobi preconditioner.
993 
994     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
995     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
996 
997     Note that one can define initial index sets with any overlap via
998     `PCASMSetLocalSubdomains()`; the routine
999     `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1000     if desired.
1001 
1002     Level: intermediate
1003 
1004 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1005           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1006 @*/
1007 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl) {
1008   PetscFunctionBegin;
1009   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1010   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1011   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1012   PetscFunctionReturn(0);
1013 }
1014 
1015 /*@
1016     PCASMSetType - Sets the type of restriction and interpolation used
1017     for local problems in the additive Schwarz method, `PCASM`.
1018 
1019     Logically Collective on pc
1020 
1021     Input Parameters:
1022 +   pc  - the preconditioner context
1023 -   type - variant of `PCASM`, one of
1024 .vb
1025       PC_ASM_BASIC       - full interpolation and restriction
1026       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1027       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1028       PC_ASM_NONE        - local processor restriction and interpolation
1029 .ve
1030 
1031     Options Database Key:
1032 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1033 
1034     Note:
1035     if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1036     to limit the local processor interpolation
1037 
1038     Level: intermediate
1039 
1040 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1041           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1042 @*/
1043 PetscErrorCode PCASMSetType(PC pc, PCASMType type) {
1044   PetscFunctionBegin;
1045   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1046   PetscValidLogicalCollectiveEnum(pc, type, 2);
1047   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1048   PetscFunctionReturn(0);
1049 }
1050 
1051 /*@
1052     PCASMGetType - Gets the type of restriction and interpolation used
1053     for local problems in the additive Schwarz method, `PCASM`.
1054 
1055     Logically Collective on pc
1056 
1057     Input Parameter:
1058 .   pc  - the preconditioner context
1059 
1060     Output Parameter:
1061 .   type - variant of `PCASM`, one of
1062 
1063 .vb
1064       PC_ASM_BASIC       - full interpolation and restriction
1065       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1066       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1067       PC_ASM_NONE        - local processor restriction and interpolation
1068 .ve
1069 
1070     Options Database Key:
1071 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1072 
1073     Level: intermediate
1074 
1075 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1076           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1077 @*/
1078 PetscErrorCode PCASMGetType(PC pc, PCASMType *type) {
1079   PetscFunctionBegin;
1080   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1081   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1082   PetscFunctionReturn(0);
1083 }
1084 
1085 /*@
1086   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1087 
1088   Logically Collective on pc
1089 
1090   Input Parameters:
1091 + pc  - the preconditioner context
1092 - type - type of composition, one of
1093 .vb
1094   PC_COMPOSITE_ADDITIVE       - local additive combination
1095   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1096 .ve
1097 
1098   Options Database Key:
1099 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1100 
1101   Level: intermediate
1102 
1103 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASM`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1104 @*/
1105 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type) {
1106   PetscFunctionBegin;
1107   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1108   PetscValidLogicalCollectiveEnum(pc, type, 2);
1109   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1110   PetscFunctionReturn(0);
1111 }
1112 
1113 /*@
1114   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1115 
1116   Logically Collective on pc
1117 
1118   Input Parameter:
1119 . pc  - the preconditioner context
1120 
1121   Output Parameter:
1122 . type - type of composition, one of
1123 .vb
1124   PC_COMPOSITE_ADDITIVE       - local additive combination
1125   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1126 .ve
1127 
1128   Options Database Key:
1129 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1130 
1131   Level: intermediate
1132 
1133 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1134 @*/
1135 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type) {
1136   PetscFunctionBegin;
1137   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1138   PetscValidPointer(type, 2);
1139   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 /*@
1144     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1145 
1146     Logically Collective on pc
1147 
1148     Input Parameters:
1149 +   pc  - the preconditioner context
1150 -   doSort - sort the subdomain indices
1151 
1152     Level: intermediate
1153 
1154 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1155           `PCASMCreateSubdomains2D()`
1156 @*/
1157 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort) {
1158   PetscFunctionBegin;
1159   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1160   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1161   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1162   PetscFunctionReturn(0);
1163 }
1164 
1165 /*@C
1166    PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1167    this processor.
1168 
1169    Collective on pc iff first_local is requested
1170 
1171    Input Parameter:
1172 .  pc - the preconditioner context
1173 
1174    Output Parameters:
1175 +  n_local - the number of blocks on this processor or NULL
1176 .  first_local - the global number of the first block on this processor or NULL,
1177                  all processors must request or all must pass NULL
1178 -  ksp - the array of `KSP` contexts
1179 
1180    Notes:
1181    After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1182 
1183    You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1184 
1185    Fortran Note:
1186    The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.
1187 
1188    Level: advanced
1189 
1190 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1191           `PCASMCreateSubdomains2D()`,
1192 @*/
1193 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[]) {
1194   PetscFunctionBegin;
1195   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1196   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1197   PetscFunctionReturn(0);
1198 }
1199 
1200 /*MC
1201    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1202            its own `KSP` object.
1203 
1204    Options Database Keys:
1205 +  -pc_asm_blocks <blks> - Sets total blocks. Defaults to one block per MPI rank.
1206 .  -pc_asm_overlap <ovl> - Sets overlap
1207 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1208 -  -pc_asm_local_type [additive, multiplicative] - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1209 
1210    Level: beginner
1211 
1212    Notes:
1213    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1214    will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1215     -pc_asm_type basic to get the same convergence behavior
1216 
1217    Each processor can have one or more blocks, but a block cannot be shared by more
1218    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1219 
1220    To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC`
1221    options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1222 
1223    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1224    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1225 
1226     References:
1227 +   * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1228      Courant Institute, New York University Technical report
1229 -   * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1230     Cambridge University Press.
1231 
1232 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1233           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1234           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1235 M*/
1236 
1237 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc) {
1238   PC_ASM *osm;
1239 
1240   PetscFunctionBegin;
1241   PetscCall(PetscNewLog(pc, &osm));
1242 
1243   osm->n             = PETSC_DECIDE;
1244   osm->n_local       = 0;
1245   osm->n_local_true  = PETSC_DECIDE;
1246   osm->overlap       = 1;
1247   osm->ksp           = NULL;
1248   osm->restriction   = NULL;
1249   osm->lprolongation = NULL;
1250   osm->lrestriction  = NULL;
1251   osm->x             = NULL;
1252   osm->y             = NULL;
1253   osm->is            = NULL;
1254   osm->is_local      = NULL;
1255   osm->mat           = NULL;
1256   osm->pmat          = NULL;
1257   osm->type          = PC_ASM_RESTRICT;
1258   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1259   osm->sort_indices  = PETSC_TRUE;
1260   osm->dm_subdomains = PETSC_FALSE;
1261   osm->sub_mat_type  = NULL;
1262 
1263   pc->data                 = (void *)osm;
1264   pc->ops->apply           = PCApply_ASM;
1265   pc->ops->matapply        = PCMatApply_ASM;
1266   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1267   pc->ops->setup           = PCSetUp_ASM;
1268   pc->ops->reset           = PCReset_ASM;
1269   pc->ops->destroy         = PCDestroy_ASM;
1270   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1271   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1272   pc->ops->view            = PCView_ASM;
1273   pc->ops->applyrichardson = NULL;
1274 
1275   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1276   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1277   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1278   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1279   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1280   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1281   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1282   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1283   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1284   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1285   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1286   PetscFunctionReturn(0);
1287 }
1288 
1289 /*@C
1290    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1291    preconditioner, `PCASM`,  for any problem on a general grid.
1292 
1293    Collective
1294 
1295    Input Parameters:
1296 +  A - The global matrix operator
1297 -  n - the number of local blocks
1298 
1299    Output Parameters:
1300 .  outis - the array of index sets defining the subdomains
1301 
1302    Level: advanced
1303 
1304    Note:
1305    This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1306    from these if you use `PCASMSetLocalSubdomains()`
1307 
1308    Fortran Note:
1309    You must provide the array outis[] already allocated of length n.
1310 
1311 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1312 @*/
1313 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[]) {
1314   MatPartitioning mpart;
1315   const char     *prefix;
1316   PetscInt        i, j, rstart, rend, bs;
1317   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1318   Mat             Ad = NULL, adj;
1319   IS              ispart, isnumb, *is;
1320 
1321   PetscFunctionBegin;
1322   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1323   PetscValidPointer(outis, 3);
1324   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1325 
1326   /* Get prefix, row distribution, and block size */
1327   PetscCall(MatGetOptionsPrefix(A, &prefix));
1328   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1329   PetscCall(MatGetBlockSize(A, &bs));
1330   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1331 
1332   /* Get diagonal block from matrix if possible */
1333   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1334   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1335   if (Ad) {
1336     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1337     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1338   }
1339   if (Ad && n > 1) {
1340     PetscBool match, done;
1341     /* Try to setup a good matrix partitioning if available */
1342     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1343     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1344     PetscCall(MatPartitioningSetFromOptions(mpart));
1345     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1346     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1347     if (!match) { /* assume a "good" partitioner is available */
1348       PetscInt        na;
1349       const PetscInt *ia, *ja;
1350       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1351       if (done) {
1352         /* Build adjacency matrix by hand. Unfortunately a call to
1353            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1354            remove the block-aij structure and we cannot expect
1355            MatPartitioning to split vertices as we need */
1356         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1357         const PetscInt *row;
1358         nnz = 0;
1359         for (i = 0; i < na; i++) { /* count number of nonzeros */
1360           len = ia[i + 1] - ia[i];
1361           row = ja + ia[i];
1362           for (j = 0; j < len; j++) {
1363             if (row[j] == i) { /* don't count diagonal */
1364               len--;
1365               break;
1366             }
1367           }
1368           nnz += len;
1369         }
1370         PetscCall(PetscMalloc1(na + 1, &iia));
1371         PetscCall(PetscMalloc1(nnz, &jja));
1372         nnz    = 0;
1373         iia[0] = 0;
1374         for (i = 0; i < na; i++) { /* fill adjacency */
1375           cnt = 0;
1376           len = ia[i + 1] - ia[i];
1377           row = ja + ia[i];
1378           for (j = 0; j < len; j++) {
1379             if (row[j] != i) { /* if not diagonal */
1380               jja[nnz + cnt++] = row[j];
1381             }
1382           }
1383           nnz += cnt;
1384           iia[i + 1] = nnz;
1385         }
1386         /* Partitioning of the adjacency matrix */
1387         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1388         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1389         PetscCall(MatPartitioningSetNParts(mpart, n));
1390         PetscCall(MatPartitioningApply(mpart, &ispart));
1391         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1392         PetscCall(MatDestroy(&adj));
1393         foundpart = PETSC_TRUE;
1394       }
1395       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1396     }
1397     PetscCall(MatPartitioningDestroy(&mpart));
1398   }
1399 
1400   PetscCall(PetscMalloc1(n, &is));
1401   *outis = is;
1402 
1403   if (!foundpart) {
1404     /* Partitioning by contiguous chunks of rows */
1405 
1406     PetscInt mbs   = (rend - rstart) / bs;
1407     PetscInt start = rstart;
1408     for (i = 0; i < n; i++) {
1409       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1410       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1411       start += count;
1412     }
1413 
1414   } else {
1415     /* Partitioning by adjacency of diagonal block  */
1416 
1417     const PetscInt *numbering;
1418     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1419     /* Get node count in each partition */
1420     PetscCall(PetscMalloc1(n, &count));
1421     PetscCall(ISPartitioningCount(ispart, n, count));
1422     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1423       for (i = 0; i < n; i++) count[i] *= bs;
1424     }
1425     /* Build indices from node numbering */
1426     PetscCall(ISGetLocalSize(isnumb, &nidx));
1427     PetscCall(PetscMalloc1(nidx, &indices));
1428     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1429     PetscCall(ISGetIndices(isnumb, &numbering));
1430     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1431     PetscCall(ISRestoreIndices(isnumb, &numbering));
1432     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1433       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1434       for (i = 0; i < nidx; i++) {
1435         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1436       }
1437       PetscCall(PetscFree(indices));
1438       nidx *= bs;
1439       indices = newidx;
1440     }
1441     /* Shift to get global indices */
1442     for (i = 0; i < nidx; i++) indices[i] += rstart;
1443 
1444     /* Build the index sets for each block */
1445     for (i = 0; i < n; i++) {
1446       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1447       PetscCall(ISSort(is[i]));
1448       start += count[i];
1449     }
1450 
1451     PetscCall(PetscFree(count));
1452     PetscCall(PetscFree(indices));
1453     PetscCall(ISDestroy(&isnumb));
1454     PetscCall(ISDestroy(&ispart));
1455   }
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 /*@C
1460    PCASMDestroySubdomains - Destroys the index sets created with
1461    `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1462 
1463    Collective
1464 
1465    Input Parameters:
1466 +  n - the number of index sets
1467 .  is - the array of index sets
1468 -  is_local - the array of local index sets, can be NULL
1469 
1470    Level: advanced
1471 
1472 .seealso: `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1473 @*/
1474 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[]) {
1475   PetscInt i;
1476 
1477   PetscFunctionBegin;
1478   if (n <= 0) PetscFunctionReturn(0);
1479   if (is) {
1480     PetscValidPointer(is, 2);
1481     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1482     PetscCall(PetscFree(is));
1483   }
1484   if (is_local) {
1485     PetscValidPointer(is_local, 3);
1486     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1487     PetscCall(PetscFree(is_local));
1488   }
1489   PetscFunctionReturn(0);
1490 }
1491 
1492 /*@
1493    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1494    preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1495 
1496    Not Collective
1497 
1498    Input Parameters:
1499 +  m   - the number of mesh points in the x direction
1500 .  n   - the number of mesh points in the y direction
1501 .  M   - the number of subdomains in the x direction
1502 .  N   - the number of subdomains in the y direction
1503 .  dof - degrees of freedom per node
1504 -  overlap - overlap in mesh lines
1505 
1506    Output Parameters:
1507 +  Nsub - the number of subdomains created
1508 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1509 -  is_local - array of index sets defining non-overlapping subdomains
1510 
1511    Note:
1512    Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1513    preconditioners.  More general related routines are
1514    `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1515 
1516    Level: advanced
1517 
1518 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1519           `PCASMSetOverlap()`
1520 @*/
1521 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local) {
1522   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1523   PetscInt nidx, *idx, loc, ii, jj, count;
1524 
1525   PetscFunctionBegin;
1526   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1527 
1528   *Nsub = N * M;
1529   PetscCall(PetscMalloc1(*Nsub, is));
1530   PetscCall(PetscMalloc1(*Nsub, is_local));
1531   ystart    = 0;
1532   loc_outer = 0;
1533   for (i = 0; i < N; i++) {
1534     height = n / N + ((n % N) > i); /* height of subdomain */
1535     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1536     yleft = ystart - overlap;
1537     if (yleft < 0) yleft = 0;
1538     yright = ystart + height + overlap;
1539     if (yright > n) yright = n;
1540     xstart = 0;
1541     for (j = 0; j < M; j++) {
1542       width = m / M + ((m % M) > j); /* width of subdomain */
1543       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1544       xleft = xstart - overlap;
1545       if (xleft < 0) xleft = 0;
1546       xright = xstart + width + overlap;
1547       if (xright > m) xright = m;
1548       nidx = (xright - xleft) * (yright - yleft);
1549       PetscCall(PetscMalloc1(nidx, &idx));
1550       loc = 0;
1551       for (ii = yleft; ii < yright; ii++) {
1552         count = m * ii + xleft;
1553         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1554       }
1555       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1556       if (overlap == 0) {
1557         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1558 
1559         (*is_local)[loc_outer] = (*is)[loc_outer];
1560       } else {
1561         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1562           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1563         }
1564         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1565       }
1566       PetscCall(PetscFree(idx));
1567       xstart += width;
1568       loc_outer++;
1569     }
1570     ystart += height;
1571   }
1572   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1573   PetscFunctionReturn(0);
1574 }
1575 
1576 /*@C
1577     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1578     only) for the additive Schwarz preconditioner, `PCASM`.
1579 
1580     Not Collective
1581 
1582     Input Parameter:
1583 .   pc - the preconditioner context
1584 
1585     Output Parameters:
1586 +   n - if requested, the number of subdomains for this processor (default value = 1)
1587 .   is - if requested, the index sets that define the subdomains for this processor
1588 -   is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be NULL)
1589 
1590     Note:
1591     The `IS` numbering is in the parallel, global numbering of the vector.
1592 
1593     Level: advanced
1594 
1595 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1596           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1597 @*/
1598 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[]) {
1599   PC_ASM   *osm = (PC_ASM *)pc->data;
1600   PetscBool match;
1601 
1602   PetscFunctionBegin;
1603   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1604   if (n) PetscValidIntPointer(n, 2);
1605   if (is) PetscValidPointer(is, 3);
1606   if (is_local) PetscValidPointer(is_local, 4);
1607   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1608   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1609   if (n) *n = osm->n_local_true;
1610   if (is) *is = osm->is;
1611   if (is_local) *is_local = osm->is_local;
1612   PetscFunctionReturn(0);
1613 }
1614 
1615 /*@C
1616     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1617     only) for the additive Schwarz preconditioner, `PCASM`.
1618 
1619     Not Collective
1620 
1621     Input Parameter:
1622 .   pc - the preconditioner context
1623 
1624     Output Parameters:
1625 +   n - if requested, the number of matrices for this processor (default value = 1)
1626 -   mat - if requested, the matrices
1627 
1628     Level: advanced
1629 
1630     Notes:
1631     Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1632 
1633     Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1634 
1635 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1636           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1637 @*/
1638 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[]) {
1639   PC_ASM   *osm;
1640   PetscBool match;
1641 
1642   PetscFunctionBegin;
1643   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1644   if (n) PetscValidIntPointer(n, 2);
1645   if (mat) PetscValidPointer(mat, 3);
1646   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1647   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1648   if (!match) {
1649     if (n) *n = 0;
1650     if (mat) *mat = NULL;
1651   } else {
1652     osm = (PC_ASM *)pc->data;
1653     if (n) *n = osm->n_local_true;
1654     if (mat) *mat = osm->pmat;
1655   }
1656   PetscFunctionReturn(0);
1657 }
1658 
1659 /*@
1660     PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1661 
1662     Logically Collective
1663 
1664     Input Parameters:
1665 +   pc  - the preconditioner
1666 -   flg - boolean indicating whether to use subdomains defined by the `DM`
1667 
1668     Options Database Key:
1669 .   -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM`
1670 
1671     Level: intermediate
1672 
1673     Note:
1674     `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1675     so setting either of the first two effectively turns the latter off.
1676 
1677 .seealso: `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1678           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1679 @*/
1680 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg) {
1681   PC_ASM   *osm = (PC_ASM *)pc->data;
1682   PetscBool match;
1683 
1684   PetscFunctionBegin;
1685   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1686   PetscValidLogicalCollectiveBool(pc, flg, 2);
1687   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1688   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1689   if (match) osm->dm_subdomains = flg;
1690   PetscFunctionReturn(0);
1691 }
1692 
1693 /*@
1694     PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1695 
1696     Not Collective
1697 
1698     Input Parameter:
1699 .   pc  - the preconditioner
1700 
1701     Output Parameter:
1702 .   flg - boolean indicating whether to use subdomains defined by the DM
1703 
1704     Level: intermediate
1705 
1706 .seealso: `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1707           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1708 @*/
1709 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg) {
1710   PC_ASM   *osm = (PC_ASM *)pc->data;
1711   PetscBool match;
1712 
1713   PetscFunctionBegin;
1714   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1715   PetscValidBoolPointer(flg, 2);
1716   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1717   if (match) *flg = osm->dm_subdomains;
1718   else *flg = PETSC_FALSE;
1719   PetscFunctionReturn(0);
1720 }
1721 
1722 /*@
1723      PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1724 
1725    Not Collective
1726 
1727    Input Parameter:
1728 .  pc - the `PC`
1729 
1730    Output Parameter:
1731 .  pc_asm_sub_mat_type - name of matrix type
1732 
1733    Level: advanced
1734 
1735 .seealso: `PCASM`, `PCASMSetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1736 @*/
1737 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type) {
1738   PetscFunctionBegin;
1739   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1740   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1741   PetscFunctionReturn(0);
1742 }
1743 
1744 /*@
1745      PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1746 
1747    Collective on pc
1748 
1749    Input Parameters:
1750 +  pc             - the `PC` object
1751 -  sub_mat_type   - the `MatType`
1752 
1753    Options Database Key:
1754 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1755    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1756 
1757    Note:
1758    See "${PETSC_DIR}/include/petscmat.h" for available types
1759 
1760   Level: advanced
1761 
1762 .seealso: `PCASM`, `PCASMGetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1763 @*/
1764 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type) {
1765   PetscFunctionBegin;
1766   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1767   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1768   PetscFunctionReturn(0);
1769 }
1770