xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision cb5db2414029547a5ccf00a1710ee072432a08af)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include "petsc/private/matimpl.h"
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         iascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(viewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(viewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerFlush(viewer));
69       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
70     }
71   } else if (isstring) {
72     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
73     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
74     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
75     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
76   }
77   PetscFunctionReturn(PETSC_SUCCESS);
78 }
79 
80 static PetscErrorCode PCASMPrintSubdomains(PC pc)
81 {
82   PC_ASM         *osm = (PC_ASM *)pc->data;
83   const char     *prefix;
84   char            fname[PETSC_MAX_PATH_LEN + 1];
85   PetscViewer     viewer, sviewer;
86   char           *s;
87   PetscInt        i, j, nidx;
88   const PetscInt *idx;
89   PetscMPIInt     rank, size;
90 
91   PetscFunctionBegin;
92   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
93   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
94   PetscCall(PCGetOptionsPrefix(pc, &prefix));
95   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
96   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
97   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
98   for (i = 0; i < osm->n_local; i++) {
99     if (i < osm->n_local_true) {
100       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
101       PetscCall(ISGetIndices(osm->is[i], &idx));
102       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
103 #define len 16 * (nidx + 1) + 512
104       PetscCall(PetscMalloc1(len, &s));
105       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
106 #undef len
107       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
108       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
109       PetscCall(ISRestoreIndices(osm->is[i], &idx));
110       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
111       PetscCall(PetscViewerDestroy(&sviewer));
112       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
113       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
114       PetscCall(PetscViewerFlush(viewer));
115       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
116       PetscCall(PetscFree(s));
117       if (osm->is_local) {
118         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
119 #define len 16 * (nidx + 1) + 512
120         PetscCall(PetscMalloc1(len, &s));
121         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
122 #undef len
123         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
124         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
125         PetscCall(ISGetIndices(osm->is_local[i], &idx));
126         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
127         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
128         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
129         PetscCall(PetscViewerDestroy(&sviewer));
130         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
131         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
132         PetscCall(PetscViewerFlush(viewer));
133         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
134         PetscCall(PetscFree(s));
135       }
136     } else {
137       /* Participate in collective viewer calls. */
138       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
139       PetscCall(PetscViewerFlush(viewer));
140       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
141       /* Assume either all ranks have is_local or none do. */
142       if (osm->is_local) {
143         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
144         PetscCall(PetscViewerFlush(viewer));
145         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
146       }
147     }
148   }
149   PetscCall(PetscViewerFlush(viewer));
150   PetscCall(PetscViewerDestroy(&viewer));
151   PetscFunctionReturn(PETSC_SUCCESS);
152 }
153 
154 static PetscErrorCode PCSetUp_ASM(PC pc)
155 {
156   PC_ASM     *osm = (PC_ASM *)pc->data;
157   PetscBool   flg;
158   PetscInt    i, m, m_local;
159   MatReuse    scall = MAT_REUSE_MATRIX;
160   IS          isl;
161   KSP         ksp;
162   PC          subpc;
163   const char *prefix, *pprefix;
164   Vec         vec;
165   DM         *domain_dm = NULL;
166 
167   PetscFunctionBegin;
168   if (!pc->setupcalled) {
169     PetscInt m;
170 
171     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172     if (osm->n_local_true == PETSC_DECIDE) {
173       /* no subdomains given */
174       /* try pc->dm first, if allowed */
175       if (osm->dm_subdomains && pc->dm) {
176         PetscInt num_domains, d;
177         char   **domain_names;
178         IS      *inner_domain_is, *outer_domain_is;
179         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181                               A future improvement of this code might allow one to use
182                               DM-defined subdomains and also increase the overlap,
183                               but that is not currently supported */
184         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185         for (d = 0; d < num_domains; ++d) {
186           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189         }
190         PetscCall(PetscFree(domain_names));
191         PetscCall(PetscFree(inner_domain_is));
192         PetscCall(PetscFree(outer_domain_is));
193       }
194       if (osm->n_local_true == PETSC_DECIDE) {
195         /* still no subdomains; use one subdomain per processor */
196         osm->n_local_true = 1;
197       }
198     }
199     { /* determine the global and max number of subdomains */
200       struct {
201         PetscInt max, sum;
202       } inwork, outwork;
203       PetscMPIInt size;
204 
205       inwork.max = osm->n_local_true;
206       inwork.sum = osm->n_local_true;
207       PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208       osm->n_local = outwork.max;
209       osm->n       = outwork.sum;
210 
211       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212       if (outwork.max == 1 && outwork.sum == size) {
213         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215       }
216     }
217     if (!osm->is) { /* create the index sets */
218       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219     }
220     if (osm->n_local_true > 1 && !osm->is_local) {
221       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222       for (i = 0; i < osm->n_local_true; i++) {
223         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226         } else {
227           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228           osm->is_local[i] = osm->is[i];
229         }
230       }
231     }
232     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233     if (osm->overlap > 0) {
234       /* Extend the "overlapping" regions by a number of steps */
235       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236     }
237     if (osm->sort_indices) {
238       for (i = 0; i < osm->n_local_true; i++) {
239         PetscCall(ISSort(osm->is[i]));
240         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241       }
242     }
243     flg = PETSC_FALSE;
244     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246     if (!osm->ksp) {
247       /* Create the local solvers */
248       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250       for (i = 0; i < osm->n_local_true; i++) {
251         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
253         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
254         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
255         PetscCall(KSPSetType(ksp, KSPPREONLY));
256         PetscCall(KSPGetPC(ksp, &subpc));
257         PetscCall(PCGetOptionsPrefix(pc, &prefix));
258         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
259         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
260         if (domain_dm) {
261           PetscCall(KSPSetDM(ksp, domain_dm[i]));
262           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
263           PetscCall(DMDestroy(&domain_dm[i]));
264         }
265         osm->ksp[i] = ksp;
266       }
267       if (domain_dm) PetscCall(PetscFree(domain_dm));
268     }
269 
270     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
271     PetscCall(ISSortRemoveDups(osm->lis));
272     PetscCall(ISGetLocalSize(osm->lis, &m));
273 
274     scall = MAT_INITIAL_MATRIX;
275   } else {
276     /*
277        Destroy the blocks from the previous iteration
278     */
279     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
280       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
281       scall = MAT_INITIAL_MATRIX;
282     }
283   }
284 
285   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
286   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
287     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
288     scall = MAT_INITIAL_MATRIX;
289   }
290 
291   /*
292      Extract out the submatrices
293   */
294   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
295   if (scall == MAT_INITIAL_MATRIX) {
296     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
297     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
298   }
299 
300   /* Convert the types of the submatrices (if needbe) */
301   if (osm->sub_mat_type) {
302     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &(osm->pmat[i])));
303   }
304 
305   if (!pc->setupcalled) {
306     VecType vtype;
307 
308     /* Create the local work vectors (from the local matrices) and scatter contexts */
309     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
310 
311     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
312     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
313     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
314     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
315     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
316 
317     PetscCall(ISGetLocalSize(osm->lis, &m));
318     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
319     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
320     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
321     PetscCall(VecSetSizes(osm->lx, m, m));
322     PetscCall(VecSetType(osm->lx, vtype));
323     PetscCall(VecDuplicate(osm->lx, &osm->ly));
324     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
325     PetscCall(ISDestroy(&isl));
326 
327     for (i = 0; i < osm->n_local_true; ++i) {
328       ISLocalToGlobalMapping ltog;
329       IS                     isll;
330       const PetscInt        *idx_is;
331       PetscInt              *idx_lis, nout;
332 
333       PetscCall(ISGetLocalSize(osm->is[i], &m));
334       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
335       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
336 
337       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
338       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
339       PetscCall(ISGetLocalSize(osm->is[i], &m));
340       PetscCall(ISGetIndices(osm->is[i], &idx_is));
341       PetscCall(PetscMalloc1(m, &idx_lis));
342       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
343       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
344       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
345       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
346       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
347       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
348       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
349       PetscCall(ISDestroy(&isll));
350       PetscCall(ISDestroy(&isl));
351       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
352         ISLocalToGlobalMapping ltog;
353         IS                     isll, isll_local;
354         const PetscInt        *idx_local;
355         PetscInt              *idx1, *idx2, nout;
356 
357         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
358         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
359 
360         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
361         PetscCall(PetscMalloc1(m_local, &idx1));
362         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
363         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
364         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
365         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
366 
367         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
368         PetscCall(PetscMalloc1(m_local, &idx2));
369         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
370         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
371         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
372         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
373 
374         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
375         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
376 
377         PetscCall(ISDestroy(&isll));
378         PetscCall(ISDestroy(&isll_local));
379       }
380     }
381     PetscCall(VecDestroy(&vec));
382   }
383 
384   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
385     IS      *cis;
386     PetscInt c;
387 
388     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
389     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
390     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
391     PetscCall(PetscFree(cis));
392   }
393 
394   /* Return control to the user so that the submatrices can be modified (e.g., to apply
395      different boundary conditions for the submatrices than for the global problem) */
396   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
397 
398   /*
399      Loop over subdomains putting them into local ksp
400   */
401   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
402   for (i = 0; i < osm->n_local_true; i++) {
403     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
404     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
405     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
406   }
407   PetscFunctionReturn(PETSC_SUCCESS);
408 }
409 
410 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
411 {
412   PC_ASM            *osm = (PC_ASM *)pc->data;
413   PetscInt           i;
414   KSPConvergedReason reason;
415 
416   PetscFunctionBegin;
417   for (i = 0; i < osm->n_local_true; i++) {
418     PetscCall(KSPSetUp(osm->ksp[i]));
419     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
420     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
421   }
422   PetscFunctionReturn(PETSC_SUCCESS);
423 }
424 
425 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
426 {
427   PC_ASM     *osm = (PC_ASM *)pc->data;
428   PetscInt    i, n_local_true = osm->n_local_true;
429   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
430 
431   PetscFunctionBegin;
432   /*
433      support for limiting the restriction or interpolation to only local
434      subdomain values (leaving the other values 0).
435   */
436   if (!(osm->type & PC_ASM_RESTRICT)) {
437     forward = SCATTER_FORWARD_LOCAL;
438     /* have to zero the work RHS since scatter may leave some slots empty */
439     PetscCall(VecSet(osm->lx, 0.0));
440   }
441   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
442 
443   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
444   /* zero the global and the local solutions */
445   PetscCall(VecSet(y, 0.0));
446   PetscCall(VecSet(osm->ly, 0.0));
447 
448   /* copy the global RHS to local RHS including the ghost nodes */
449   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
450   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
451 
452   /* restrict local RHS to the overlapping 0-block RHS */
453   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
454   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
455 
456   /* do the local solves */
457   for (i = 0; i < n_local_true; ++i) {
458     /* solve the overlapping i-block */
459     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
460     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
461     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
462     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
463 
464     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
465       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
466       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
467     } else { /* interpolate the overlapping i-block solution to the local solution */
468       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
469       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
470     }
471 
472     if (i < n_local_true - 1) {
473       /* restrict local RHS to the overlapping (i+1)-block RHS */
474       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
475       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
476 
477       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
478         /* update the overlapping (i+1)-block RHS using the current local solution */
479         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
480         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
481       }
482     }
483   }
484   /* add the local solution to the global solution including the ghost nodes */
485   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
486   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
487   PetscFunctionReturn(PETSC_SUCCESS);
488 }
489 
490 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
491 {
492   PC_ASM     *osm = (PC_ASM *)pc->data;
493   Mat         Z, W;
494   Vec         x;
495   PetscInt    i, m, N;
496   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
497 
498   PetscFunctionBegin;
499   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
500   /*
501      support for limiting the restriction or interpolation to only local
502      subdomain values (leaving the other values 0).
503   */
504   if (!(osm->type & PC_ASM_RESTRICT)) {
505     forward = SCATTER_FORWARD_LOCAL;
506     /* have to zero the work RHS since scatter may leave some slots empty */
507     PetscCall(VecSet(osm->lx, 0.0));
508   }
509   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
510   PetscCall(VecGetLocalSize(osm->x[0], &m));
511   PetscCall(MatGetSize(X, NULL, &N));
512   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
513 
514   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
515   /* zero the global and the local solutions */
516   PetscCall(MatZeroEntries(Y));
517   PetscCall(VecSet(osm->ly, 0.0));
518 
519   for (i = 0; i < N; ++i) {
520     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
521     /* copy the global RHS to local RHS including the ghost nodes */
522     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
523     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
524     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
525 
526     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
527     /* restrict local RHS to the overlapping 0-block RHS */
528     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
529     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
530     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
531   }
532   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
533   /* solve the overlapping 0-block */
534   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
535   PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
536   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
537   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
538   PetscCall(MatDestroy(&Z));
539 
540   for (i = 0; i < N; ++i) {
541     PetscCall(VecSet(osm->ly, 0.0));
542     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
543     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
544       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
545       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
546     } else { /* interpolate the overlapping 0-block solution to the local solution */
547       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
548       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
549     }
550     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
551 
552     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
553     /* add the local solution to the global solution including the ghost nodes */
554     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
555     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
556     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
557   }
558   PetscCall(MatDestroy(&W));
559   PetscFunctionReturn(PETSC_SUCCESS);
560 }
561 
562 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
563 {
564   PC_ASM     *osm = (PC_ASM *)pc->data;
565   PetscInt    i, n_local_true = osm->n_local_true;
566   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
567 
568   PetscFunctionBegin;
569   /*
570      Support for limiting the restriction or interpolation to only local
571      subdomain values (leaving the other values 0).
572 
573      Note: these are reversed from the PCApply_ASM() because we are applying the
574      transpose of the three terms
575   */
576 
577   if (!(osm->type & PC_ASM_INTERPOLATE)) {
578     forward = SCATTER_FORWARD_LOCAL;
579     /* have to zero the work RHS since scatter may leave some slots empty */
580     PetscCall(VecSet(osm->lx, 0.0));
581   }
582   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
583 
584   /* zero the global and the local solutions */
585   PetscCall(VecSet(y, 0.0));
586   PetscCall(VecSet(osm->ly, 0.0));
587 
588   /* Copy the global RHS to local RHS including the ghost nodes */
589   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
590   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
591 
592   /* Restrict local RHS to the overlapping 0-block RHS */
593   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
594   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
595 
596   /* do the local solves */
597   for (i = 0; i < n_local_true; ++i) {
598     /* solve the overlapping i-block */
599     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
600     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
601     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
602     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
603 
604     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
605       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
606       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
607     } else { /* interpolate the overlapping i-block solution to the local solution */
608       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
609       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
610     }
611 
612     if (i < n_local_true - 1) {
613       /* Restrict local RHS to the overlapping (i+1)-block RHS */
614       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
615       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
616     }
617   }
618   /* Add the local solution to the global solution including the ghost nodes */
619   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
620   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
621   PetscFunctionReturn(PETSC_SUCCESS);
622 }
623 
624 static PetscErrorCode PCReset_ASM(PC pc)
625 {
626   PC_ASM  *osm = (PC_ASM *)pc->data;
627   PetscInt i;
628 
629   PetscFunctionBegin;
630   if (osm->ksp) {
631     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
632   }
633   if (osm->pmat) {
634     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
635   }
636   if (osm->lrestriction) {
637     PetscCall(VecScatterDestroy(&osm->restriction));
638     for (i = 0; i < osm->n_local_true; i++) {
639       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
640       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
641       PetscCall(VecDestroy(&osm->x[i]));
642       PetscCall(VecDestroy(&osm->y[i]));
643     }
644     PetscCall(PetscFree(osm->lrestriction));
645     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
646     PetscCall(PetscFree(osm->x));
647     PetscCall(PetscFree(osm->y));
648   }
649   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
650   PetscCall(ISDestroy(&osm->lis));
651   PetscCall(VecDestroy(&osm->lx));
652   PetscCall(VecDestroy(&osm->ly));
653   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
654 
655   PetscCall(PetscFree(osm->sub_mat_type));
656 
657   osm->is       = NULL;
658   osm->is_local = NULL;
659   PetscFunctionReturn(PETSC_SUCCESS);
660 }
661 
662 static PetscErrorCode PCDestroy_ASM(PC pc)
663 {
664   PC_ASM  *osm = (PC_ASM *)pc->data;
665   PetscInt i;
666 
667   PetscFunctionBegin;
668   PetscCall(PCReset_ASM(pc));
669   if (osm->ksp) {
670     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
671     PetscCall(PetscFree(osm->ksp));
672   }
673   PetscCall(PetscFree(pc->data));
674 
675   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
676   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
677   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
685   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
686   PetscFunctionReturn(PETSC_SUCCESS);
687 }
688 
689 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject)
690 {
691   PC_ASM         *osm = (PC_ASM *)pc->data;
692   PetscInt        blocks, ovl;
693   PetscBool       flg;
694   PCASMType       asmtype;
695   PCCompositeType loctype;
696   char            sub_mat_type[256];
697 
698   PetscFunctionBegin;
699   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
700   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
701   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
702   if (flg) {
703     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
704     osm->dm_subdomains = PETSC_FALSE;
705   }
706   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
707   if (flg) {
708     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
709     osm->dm_subdomains = PETSC_FALSE;
710   }
711   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
712   if (flg) {
713     PetscCall(PCASMSetOverlap(pc, ovl));
714     osm->dm_subdomains = PETSC_FALSE;
715   }
716   flg = PETSC_FALSE;
717   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
718   if (flg) PetscCall(PCASMSetType(pc, asmtype));
719   flg = PETSC_FALSE;
720   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
721   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
722   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
723   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
724   PetscOptionsHeadEnd();
725   PetscFunctionReturn(PETSC_SUCCESS);
726 }
727 
728 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
729 {
730   PC_ASM  *osm = (PC_ASM *)pc->data;
731   PetscInt i;
732 
733   PetscFunctionBegin;
734   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
735   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
736 
737   if (!pc->setupcalled) {
738     if (is) {
739       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
740     }
741     if (is_local) {
742       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
743     }
744     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
745 
746     osm->n_local_true = n;
747     osm->is           = NULL;
748     osm->is_local     = NULL;
749     if (is) {
750       PetscCall(PetscMalloc1(n, &osm->is));
751       for (i = 0; i < n; i++) osm->is[i] = is[i];
752       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
753       osm->overlap = -1;
754     }
755     if (is_local) {
756       PetscCall(PetscMalloc1(n, &osm->is_local));
757       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
758       if (!is) {
759         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
760         for (i = 0; i < osm->n_local_true; i++) {
761           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
762             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
763             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
764           } else {
765             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
766             osm->is[i] = osm->is_local[i];
767           }
768         }
769       }
770     }
771   }
772   PetscFunctionReturn(PETSC_SUCCESS);
773 }
774 
775 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
776 {
777   PC_ASM     *osm = (PC_ASM *)pc->data;
778   PetscMPIInt rank, size;
779   PetscInt    n;
780 
781   PetscFunctionBegin;
782   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
783   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
784 
785   /*
786      Split the subdomains equally among all processors
787   */
788   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
789   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
790   n = N / size + ((N % size) > rank);
791   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
792   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
793   if (!pc->setupcalled) {
794     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
795 
796     osm->n_local_true = n;
797     osm->is           = NULL;
798     osm->is_local     = NULL;
799   }
800   PetscFunctionReturn(PETSC_SUCCESS);
801 }
802 
803 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
804 {
805   PC_ASM *osm = (PC_ASM *)pc->data;
806 
807   PetscFunctionBegin;
808   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
809   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
810   if (!pc->setupcalled) osm->overlap = ovl;
811   PetscFunctionReturn(PETSC_SUCCESS);
812 }
813 
814 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
815 {
816   PC_ASM *osm = (PC_ASM *)pc->data;
817 
818   PetscFunctionBegin;
819   osm->type     = type;
820   osm->type_set = PETSC_TRUE;
821   PetscFunctionReturn(PETSC_SUCCESS);
822 }
823 
824 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
825 {
826   PC_ASM *osm = (PC_ASM *)pc->data;
827 
828   PetscFunctionBegin;
829   *type = osm->type;
830   PetscFunctionReturn(PETSC_SUCCESS);
831 }
832 
833 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
834 {
835   PC_ASM *osm = (PC_ASM *)pc->data;
836 
837   PetscFunctionBegin;
838   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
839   osm->loctype = type;
840   PetscFunctionReturn(PETSC_SUCCESS);
841 }
842 
843 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
844 {
845   PC_ASM *osm = (PC_ASM *)pc->data;
846 
847   PetscFunctionBegin;
848   *type = osm->loctype;
849   PetscFunctionReturn(PETSC_SUCCESS);
850 }
851 
852 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
853 {
854   PC_ASM *osm = (PC_ASM *)pc->data;
855 
856   PetscFunctionBegin;
857   osm->sort_indices = doSort;
858   PetscFunctionReturn(PETSC_SUCCESS);
859 }
860 
861 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
862 {
863   PC_ASM *osm = (PC_ASM *)pc->data;
864 
865   PetscFunctionBegin;
866   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
867 
868   if (n_local) *n_local = osm->n_local_true;
869   if (first_local) {
870     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
871     *first_local -= osm->n_local_true;
872   }
873   if (ksp) *ksp = osm->ksp;
874   PetscFunctionReturn(PETSC_SUCCESS);
875 }
876 
877 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
878 {
879   PC_ASM *osm = (PC_ASM *)pc->data;
880 
881   PetscFunctionBegin;
882   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
883   PetscAssertPointer(sub_mat_type, 2);
884   *sub_mat_type = osm->sub_mat_type;
885   PetscFunctionReturn(PETSC_SUCCESS);
886 }
887 
888 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
889 {
890   PC_ASM *osm = (PC_ASM *)pc->data;
891 
892   PetscFunctionBegin;
893   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
894   PetscCall(PetscFree(osm->sub_mat_type));
895   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
896   PetscFunctionReturn(PETSC_SUCCESS);
897 }
898 
899 /*@C
900   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
901 
902   Collective
903 
904   Input Parameters:
905 + pc       - the preconditioner context
906 . n        - the number of subdomains for this processor (default value = 1)
907 . is       - the index set that defines the subdomains for this processor
908          (or `NULL` for PETSc to determine subdomains)
909 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
910          (or `NULL` to not provide these)
911 
912   Options Database Key:
913 . -pc_asm_local_blocks <blks> - Sets number of local blocks
914 
915   Level: advanced
916 
917   Notes:
918   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
919 
920   By default the `PCASM` preconditioner uses 1 block per processor.
921 
922   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
923 
924   If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated
925   back to form the global solution (this is the standard restricted additive Schwarz method)
926 
927   If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
928   no code to handle that case.
929 
930 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
931           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
932 @*/
933 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
934 {
935   PetscFunctionBegin;
936   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
937   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
938   PetscFunctionReturn(PETSC_SUCCESS);
939 }
940 
941 /*@C
942   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
943   additive Schwarz preconditioner, `PCASM`.
944 
945   Collective, all MPI ranks must pass in the same array of `IS`
946 
947   Input Parameters:
948 + pc       - the preconditioner context
949 . N        - the number of subdomains for all processors
950 . is       - the index sets that define the subdomains for all processors
951          (or `NULL` to ask PETSc to determine the subdomains)
952 - is_local - the index sets that define the local part of the subdomains for this processor
953          (or `NULL` to not provide this information)
954 
955   Options Database Key:
956 . -pc_asm_blocks <blks> - Sets total blocks
957 
958   Level: advanced
959 
960   Notes:
961   Currently you cannot use this to set the actual subdomains with the argument is or is_local.
962 
963   By default the `PCASM` preconditioner uses 1 block per processor.
964 
965   These index sets cannot be destroyed until after completion of the
966   linear solves for which the `PCASM` preconditioner is being used.
967 
968   Use `PCASMSetLocalSubdomains()` to set local subdomains.
969 
970   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
971 
972 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
973           `PCASMCreateSubdomains2D()`, `PCGASM`
974 @*/
975 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
976 {
977   PetscFunctionBegin;
978   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
979   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
980   PetscFunctionReturn(PETSC_SUCCESS);
981 }
982 
983 /*@
984   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
985   additive Schwarz preconditioner, `PCASM`.
986 
987   Logically Collective
988 
989   Input Parameters:
990 + pc  - the preconditioner context
991 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
992 
993   Options Database Key:
994 . -pc_asm_overlap <ovl> - Sets overlap
995 
996   Level: intermediate
997 
998   Notes:
999   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1000   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1001   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1002 
1003   The overlap defaults to 1, so if one desires that no additional
1004   overlap be computed beyond what may have been set with a call to
1005   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1006   must be set to be 0.  In particular, if one does not explicitly set
1007   the subdomains an application code, then all overlap would be computed
1008   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1009   variant that is equivalent to the block Jacobi preconditioner.
1010 
1011   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1012   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1013 
1014   One can define initial index sets with any overlap via
1015   `PCASMSetLocalSubdomains()`; the routine
1016   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1017   if desired.
1018 
1019 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1020           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1021 @*/
1022 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1023 {
1024   PetscFunctionBegin;
1025   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1026   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1027   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1028   PetscFunctionReturn(PETSC_SUCCESS);
1029 }
1030 
1031 /*@
1032   PCASMSetType - Sets the type of restriction and interpolation used
1033   for local problems in the additive Schwarz method, `PCASM`.
1034 
1035   Logically Collective
1036 
1037   Input Parameters:
1038 + pc   - the preconditioner context
1039 - type - variant of `PCASM`, one of
1040 .vb
1041       PC_ASM_BASIC       - full interpolation and restriction
1042       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1043       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1044       PC_ASM_NONE        - local processor restriction and interpolation
1045 .ve
1046 
1047   Options Database Key:
1048 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1049 
1050   Level: intermediate
1051 
1052   Note:
1053   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1054   to limit the local processor interpolation
1055 
1056 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1057           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1058 @*/
1059 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1060 {
1061   PetscFunctionBegin;
1062   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1063   PetscValidLogicalCollectiveEnum(pc, type, 2);
1064   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1065   PetscFunctionReturn(PETSC_SUCCESS);
1066 }
1067 
1068 /*@
1069   PCASMGetType - Gets the type of restriction and interpolation used
1070   for local problems in the additive Schwarz method, `PCASM`.
1071 
1072   Logically Collective
1073 
1074   Input Parameter:
1075 . pc - the preconditioner context
1076 
1077   Output Parameter:
1078 . type - variant of `PCASM`, one of
1079 .vb
1080       PC_ASM_BASIC       - full interpolation and restriction
1081       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1082       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1083       PC_ASM_NONE        - local processor restriction and interpolation
1084 .ve
1085 
1086   Options Database Key:
1087 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1088 
1089   Level: intermediate
1090 
1091 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1092           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1093 @*/
1094 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1095 {
1096   PetscFunctionBegin;
1097   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1098   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1099   PetscFunctionReturn(PETSC_SUCCESS);
1100 }
1101 
1102 /*@
1103   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1104 
1105   Logically Collective
1106 
1107   Input Parameters:
1108 + pc   - the preconditioner context
1109 - type - type of composition, one of
1110 .vb
1111   PC_COMPOSITE_ADDITIVE       - local additive combination
1112   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1113 .ve
1114 
1115   Options Database Key:
1116 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1117 
1118   Level: intermediate
1119 
1120 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1121 @*/
1122 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1123 {
1124   PetscFunctionBegin;
1125   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1126   PetscValidLogicalCollectiveEnum(pc, type, 2);
1127   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1128   PetscFunctionReturn(PETSC_SUCCESS);
1129 }
1130 
1131 /*@
1132   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1133 
1134   Logically Collective
1135 
1136   Input Parameter:
1137 . pc - the preconditioner context
1138 
1139   Output Parameter:
1140 . type - type of composition, one of
1141 .vb
1142   PC_COMPOSITE_ADDITIVE       - local additive combination
1143   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1144 .ve
1145 
1146   Options Database Key:
1147 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1148 
1149   Level: intermediate
1150 
1151 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCCompositeType`
1152 @*/
1153 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1154 {
1155   PetscFunctionBegin;
1156   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1157   PetscAssertPointer(type, 2);
1158   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1159   PetscFunctionReturn(PETSC_SUCCESS);
1160 }
1161 
1162 /*@
1163   PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1164 
1165   Logically Collective
1166 
1167   Input Parameters:
1168 + pc     - the preconditioner context
1169 - doSort - sort the subdomain indices
1170 
1171   Level: intermediate
1172 
1173 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1174           `PCASMCreateSubdomains2D()`
1175 @*/
1176 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1177 {
1178   PetscFunctionBegin;
1179   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1180   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1181   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1182   PetscFunctionReturn(PETSC_SUCCESS);
1183 }
1184 
1185 /*@C
1186   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1187   this processor.
1188 
1189   Collective iff first_local is requested
1190 
1191   Input Parameter:
1192 . pc - the preconditioner context
1193 
1194   Output Parameters:
1195 + n_local     - the number of blocks on this processor or NULL
1196 . first_local - the global number of the first block on this processor or NULL,
1197                  all processors must request or all must pass NULL
1198 - ksp         - the array of `KSP` contexts
1199 
1200   Level: advanced
1201 
1202   Notes:
1203   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1204 
1205   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1206 
1207   Fortran Notes:
1208   The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.
1209 
1210 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1211           `PCASMCreateSubdomains2D()`,
1212 @*/
1213 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1214 {
1215   PetscFunctionBegin;
1216   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1217   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1218   PetscFunctionReturn(PETSC_SUCCESS);
1219 }
1220 
1221 /*MC
1222    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1223            its own `KSP` object.
1224 
1225    Options Database Keys:
1226 +  -pc_asm_blocks <blks> - Sets total blocks. Defaults to one block per MPI rank.
1227 .  -pc_asm_overlap <ovl> - Sets overlap
1228 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1229 -  -pc_asm_local_type [additive, multiplicative] - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1230 
1231    Level: beginner
1232 
1233    Notes:
1234    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1235    will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1236     -pc_asm_type basic to get the same convergence behavior
1237 
1238    Each processor can have one or more blocks, but a block cannot be shared by more
1239    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1240 
1241    To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC`
1242    options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1243 
1244    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1245    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1246 
1247     References:
1248 +   * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1249      Courant Institute, New York University Technical report
1250 -   * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1251     Cambridge University Press.
1252 
1253 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1254           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1255           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1256 M*/
1257 
1258 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1259 {
1260   PC_ASM *osm;
1261 
1262   PetscFunctionBegin;
1263   PetscCall(PetscNew(&osm));
1264 
1265   osm->n             = PETSC_DECIDE;
1266   osm->n_local       = 0;
1267   osm->n_local_true  = PETSC_DECIDE;
1268   osm->overlap       = 1;
1269   osm->ksp           = NULL;
1270   osm->restriction   = NULL;
1271   osm->lprolongation = NULL;
1272   osm->lrestriction  = NULL;
1273   osm->x             = NULL;
1274   osm->y             = NULL;
1275   osm->is            = NULL;
1276   osm->is_local      = NULL;
1277   osm->mat           = NULL;
1278   osm->pmat          = NULL;
1279   osm->type          = PC_ASM_RESTRICT;
1280   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1281   osm->sort_indices  = PETSC_TRUE;
1282   osm->dm_subdomains = PETSC_FALSE;
1283   osm->sub_mat_type  = NULL;
1284 
1285   pc->data                 = (void *)osm;
1286   pc->ops->apply           = PCApply_ASM;
1287   pc->ops->matapply        = PCMatApply_ASM;
1288   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1289   pc->ops->setup           = PCSetUp_ASM;
1290   pc->ops->reset           = PCReset_ASM;
1291   pc->ops->destroy         = PCDestroy_ASM;
1292   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1293   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1294   pc->ops->view            = PCView_ASM;
1295   pc->ops->applyrichardson = NULL;
1296 
1297   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1298   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1299   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1300   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1301   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1308   PetscFunctionReturn(PETSC_SUCCESS);
1309 }
1310 
1311 /*@C
1312   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1313   preconditioner, `PCASM`,  for any problem on a general grid.
1314 
1315   Collective
1316 
1317   Input Parameters:
1318 + A - The global matrix operator
1319 - n - the number of local blocks
1320 
1321   Output Parameter:
1322 . outis - the array of index sets defining the subdomains
1323 
1324   Level: advanced
1325 
1326   Note:
1327   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1328   from these if you use `PCASMSetLocalSubdomains()`
1329 
1330   Fortran Notes:
1331   You must provide the array outis[] already allocated of length n.
1332 
1333 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1334 @*/
1335 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1336 {
1337   MatPartitioning mpart;
1338   const char     *prefix;
1339   PetscInt        i, j, rstart, rend, bs;
1340   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1341   Mat             Ad = NULL, adj;
1342   IS              ispart, isnumb, *is;
1343 
1344   PetscFunctionBegin;
1345   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1346   PetscAssertPointer(outis, 3);
1347   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1348 
1349   /* Get prefix, row distribution, and block size */
1350   PetscCall(MatGetOptionsPrefix(A, &prefix));
1351   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1352   PetscCall(MatGetBlockSize(A, &bs));
1353   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1354 
1355   /* Get diagonal block from matrix if possible */
1356   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1357   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1358   if (Ad) {
1359     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1360     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1361   }
1362   if (Ad && n > 1) {
1363     PetscBool match, done;
1364     /* Try to setup a good matrix partitioning if available */
1365     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1366     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1367     PetscCall(MatPartitioningSetFromOptions(mpart));
1368     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1369     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1370     if (!match) { /* assume a "good" partitioner is available */
1371       PetscInt        na;
1372       const PetscInt *ia, *ja;
1373       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1374       if (done) {
1375         /* Build adjacency matrix by hand. Unfortunately a call to
1376            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1377            remove the block-aij structure and we cannot expect
1378            MatPartitioning to split vertices as we need */
1379         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1380         const PetscInt *row;
1381         nnz = 0;
1382         for (i = 0; i < na; i++) { /* count number of nonzeros */
1383           len = ia[i + 1] - ia[i];
1384           row = ja + ia[i];
1385           for (j = 0; j < len; j++) {
1386             if (row[j] == i) { /* don't count diagonal */
1387               len--;
1388               break;
1389             }
1390           }
1391           nnz += len;
1392         }
1393         PetscCall(PetscMalloc1(na + 1, &iia));
1394         PetscCall(PetscMalloc1(nnz, &jja));
1395         nnz    = 0;
1396         iia[0] = 0;
1397         for (i = 0; i < na; i++) { /* fill adjacency */
1398           cnt = 0;
1399           len = ia[i + 1] - ia[i];
1400           row = ja + ia[i];
1401           for (j = 0; j < len; j++) {
1402             if (row[j] != i) { /* if not diagonal */
1403               jja[nnz + cnt++] = row[j];
1404             }
1405           }
1406           nnz += cnt;
1407           iia[i + 1] = nnz;
1408         }
1409         /* Partitioning of the adjacency matrix */
1410         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1411         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1412         PetscCall(MatPartitioningSetNParts(mpart, n));
1413         PetscCall(MatPartitioningApply(mpart, &ispart));
1414         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1415         PetscCall(MatDestroy(&adj));
1416         foundpart = PETSC_TRUE;
1417       }
1418       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1419     }
1420     PetscCall(MatPartitioningDestroy(&mpart));
1421   }
1422 
1423   PetscCall(PetscMalloc1(n, &is));
1424   *outis = is;
1425 
1426   if (!foundpart) {
1427     /* Partitioning by contiguous chunks of rows */
1428 
1429     PetscInt mbs   = (rend - rstart) / bs;
1430     PetscInt start = rstart;
1431     for (i = 0; i < n; i++) {
1432       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1433       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1434       start += count;
1435     }
1436 
1437   } else {
1438     /* Partitioning by adjacency of diagonal block  */
1439 
1440     const PetscInt *numbering;
1441     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1442     /* Get node count in each partition */
1443     PetscCall(PetscMalloc1(n, &count));
1444     PetscCall(ISPartitioningCount(ispart, n, count));
1445     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1446       for (i = 0; i < n; i++) count[i] *= bs;
1447     }
1448     /* Build indices from node numbering */
1449     PetscCall(ISGetLocalSize(isnumb, &nidx));
1450     PetscCall(PetscMalloc1(nidx, &indices));
1451     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1452     PetscCall(ISGetIndices(isnumb, &numbering));
1453     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1454     PetscCall(ISRestoreIndices(isnumb, &numbering));
1455     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1456       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1457       for (i = 0; i < nidx; i++) {
1458         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1459       }
1460       PetscCall(PetscFree(indices));
1461       nidx *= bs;
1462       indices = newidx;
1463     }
1464     /* Shift to get global indices */
1465     for (i = 0; i < nidx; i++) indices[i] += rstart;
1466 
1467     /* Build the index sets for each block */
1468     for (i = 0; i < n; i++) {
1469       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1470       PetscCall(ISSort(is[i]));
1471       start += count[i];
1472     }
1473 
1474     PetscCall(PetscFree(count));
1475     PetscCall(PetscFree(indices));
1476     PetscCall(ISDestroy(&isnumb));
1477     PetscCall(ISDestroy(&ispart));
1478   }
1479   PetscFunctionReturn(PETSC_SUCCESS);
1480 }
1481 
1482 /*@C
1483   PCASMDestroySubdomains - Destroys the index sets created with
1484   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1485 
1486   Collective
1487 
1488   Input Parameters:
1489 + n        - the number of index sets
1490 . is       - the array of index sets
1491 - is_local - the array of local index sets, can be `NULL`
1492 
1493   Level: advanced
1494 
1495 .seealso: `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1496 @*/
1497 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1498 {
1499   PetscInt i;
1500 
1501   PetscFunctionBegin;
1502   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1503   if (is) {
1504     PetscAssertPointer(is, 2);
1505     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1506     PetscCall(PetscFree(is));
1507   }
1508   if (is_local) {
1509     PetscAssertPointer(is_local, 3);
1510     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1511     PetscCall(PetscFree(is_local));
1512   }
1513   PetscFunctionReturn(PETSC_SUCCESS);
1514 }
1515 
1516 /*@C
1517   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1518   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1519 
1520   Not Collective
1521 
1522   Input Parameters:
1523 + m       - the number of mesh points in the x direction
1524 . n       - the number of mesh points in the y direction
1525 . M       - the number of subdomains in the x direction
1526 . N       - the number of subdomains in the y direction
1527 . dof     - degrees of freedom per node
1528 - overlap - overlap in mesh lines
1529 
1530   Output Parameters:
1531 + Nsub     - the number of subdomains created
1532 . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1533 - is_local - array of index sets defining non-overlapping subdomains
1534 
1535   Level: advanced
1536 
1537   Note:
1538   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1539   preconditioners.  More general related routines are
1540   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1541 
1542   Fortran Notes:
1543   The `IS` must be declared as an array of length long enough to hold `Nsub` entries
1544 
1545 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1546           `PCASMSetOverlap()`
1547 @*/
1548 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local)
1549 {
1550   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1551   PetscInt nidx, *idx, loc, ii, jj, count;
1552 
1553   PetscFunctionBegin;
1554   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1555 
1556   *Nsub = N * M;
1557   PetscCall(PetscMalloc1(*Nsub, is));
1558   PetscCall(PetscMalloc1(*Nsub, is_local));
1559   ystart    = 0;
1560   loc_outer = 0;
1561   for (i = 0; i < N; i++) {
1562     height = n / N + ((n % N) > i); /* height of subdomain */
1563     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1564     yleft = ystart - overlap;
1565     if (yleft < 0) yleft = 0;
1566     yright = ystart + height + overlap;
1567     if (yright > n) yright = n;
1568     xstart = 0;
1569     for (j = 0; j < M; j++) {
1570       width = m / M + ((m % M) > j); /* width of subdomain */
1571       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1572       xleft = xstart - overlap;
1573       if (xleft < 0) xleft = 0;
1574       xright = xstart + width + overlap;
1575       if (xright > m) xright = m;
1576       nidx = (xright - xleft) * (yright - yleft);
1577       PetscCall(PetscMalloc1(nidx, &idx));
1578       loc = 0;
1579       for (ii = yleft; ii < yright; ii++) {
1580         count = m * ii + xleft;
1581         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1582       }
1583       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1584       if (overlap == 0) {
1585         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1586 
1587         (*is_local)[loc_outer] = (*is)[loc_outer];
1588       } else {
1589         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1590           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1591         }
1592         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1593       }
1594       PetscCall(PetscFree(idx));
1595       xstart += width;
1596       loc_outer++;
1597     }
1598     ystart += height;
1599   }
1600   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1601   PetscFunctionReturn(PETSC_SUCCESS);
1602 }
1603 
1604 /*@C
1605   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1606   only) for the additive Schwarz preconditioner, `PCASM`.
1607 
1608   Not Collective
1609 
1610   Input Parameter:
1611 . pc - the preconditioner context
1612 
1613   Output Parameters:
1614 + n        - if requested, the number of subdomains for this processor (default value = 1)
1615 . is       - if requested, the index sets that define the subdomains for this processor
1616 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)
1617 
1618   Level: advanced
1619 
1620   Note:
1621   The `IS` numbering is in the parallel, global numbering of the vector.
1622 
1623 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1624           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1625 @*/
1626 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1627 {
1628   PC_ASM   *osm = (PC_ASM *)pc->data;
1629   PetscBool match;
1630 
1631   PetscFunctionBegin;
1632   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1633   if (n) PetscAssertPointer(n, 2);
1634   if (is) PetscAssertPointer(is, 3);
1635   if (is_local) PetscAssertPointer(is_local, 4);
1636   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1637   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1638   if (n) *n = osm->n_local_true;
1639   if (is) *is = osm->is;
1640   if (is_local) *is_local = osm->is_local;
1641   PetscFunctionReturn(PETSC_SUCCESS);
1642 }
1643 
1644 /*@C
1645   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1646   only) for the additive Schwarz preconditioner, `PCASM`.
1647 
1648   Not Collective
1649 
1650   Input Parameter:
1651 . pc - the preconditioner context
1652 
1653   Output Parameters:
1654 + n   - if requested, the number of matrices for this processor (default value = 1)
1655 - mat - if requested, the matrices
1656 
1657   Level: advanced
1658 
1659   Notes:
1660   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1661 
1662   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1663 
1664 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1665           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1666 @*/
1667 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1668 {
1669   PC_ASM   *osm;
1670   PetscBool match;
1671 
1672   PetscFunctionBegin;
1673   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1674   if (n) PetscAssertPointer(n, 2);
1675   if (mat) PetscAssertPointer(mat, 3);
1676   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1677   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1678   if (!match) {
1679     if (n) *n = 0;
1680     if (mat) *mat = NULL;
1681   } else {
1682     osm = (PC_ASM *)pc->data;
1683     if (n) *n = osm->n_local_true;
1684     if (mat) *mat = osm->pmat;
1685   }
1686   PetscFunctionReturn(PETSC_SUCCESS);
1687 }
1688 
1689 /*@
1690   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1691 
1692   Logically Collective
1693 
1694   Input Parameters:
1695 + pc  - the preconditioner
1696 - flg - boolean indicating whether to use subdomains defined by the `DM`
1697 
1698   Options Database Key:
1699 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM`
1700 
1701   Level: intermediate
1702 
1703   Note:
1704   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1705   so setting either of the first two effectively turns the latter off.
1706 
1707 .seealso: `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1708           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1709 @*/
1710 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1711 {
1712   PC_ASM   *osm = (PC_ASM *)pc->data;
1713   PetscBool match;
1714 
1715   PetscFunctionBegin;
1716   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1717   PetscValidLogicalCollectiveBool(pc, flg, 2);
1718   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1719   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1720   if (match) osm->dm_subdomains = flg;
1721   PetscFunctionReturn(PETSC_SUCCESS);
1722 }
1723 
1724 /*@
1725   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1726 
1727   Not Collective
1728 
1729   Input Parameter:
1730 . pc - the preconditioner
1731 
1732   Output Parameter:
1733 . flg - boolean indicating whether to use subdomains defined by the `DM`
1734 
1735   Level: intermediate
1736 
1737 .seealso: `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1738           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1739 @*/
1740 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1741 {
1742   PC_ASM   *osm = (PC_ASM *)pc->data;
1743   PetscBool match;
1744 
1745   PetscFunctionBegin;
1746   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1747   PetscAssertPointer(flg, 2);
1748   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1749   if (match) *flg = osm->dm_subdomains;
1750   else *flg = PETSC_FALSE;
1751   PetscFunctionReturn(PETSC_SUCCESS);
1752 }
1753 
1754 /*@C
1755   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1756 
1757   Not Collective
1758 
1759   Input Parameter:
1760 . pc - the `PC`
1761 
1762   Output Parameter:
1763 . sub_mat_type - name of matrix type
1764 
1765   Level: advanced
1766 
1767 .seealso: `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1768 @*/
1769 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1770 {
1771   PetscFunctionBegin;
1772   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1773   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1774   PetscFunctionReturn(PETSC_SUCCESS);
1775 }
1776 
1777 /*@C
1778   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1779 
1780   Collective
1781 
1782   Input Parameters:
1783 + pc           - the `PC` object
1784 - sub_mat_type - the `MatType`
1785 
1786   Options Database Key:
1787 . -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1788    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1789 
1790   Note:
1791   See `MatType` for available types
1792 
1793   Level: advanced
1794 
1795 .seealso: `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1796 @*/
1797 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1798 {
1799   PetscFunctionBegin;
1800   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1801   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1802   PetscFunctionReturn(PETSC_SUCCESS);
1803 }
1804