xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision a96705020bd7e362116d3e2c8daa1efc28dedc1e)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include "petsc/private/matimpl.h"
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         iascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(viewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(viewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerFlush(viewer));
69       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
70     }
71   } else if (isstring) {
72     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
73     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
74     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
75     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
76   }
77   PetscFunctionReturn(PETSC_SUCCESS);
78 }
79 
80 static PetscErrorCode PCASMPrintSubdomains(PC pc)
81 {
82   PC_ASM         *osm = (PC_ASM *)pc->data;
83   const char     *prefix;
84   char            fname[PETSC_MAX_PATH_LEN + 1];
85   PetscViewer     viewer, sviewer;
86   char           *s;
87   PetscInt        i, j, nidx;
88   const PetscInt *idx;
89   PetscMPIInt     rank, size;
90 
91   PetscFunctionBegin;
92   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
93   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
94   PetscCall(PCGetOptionsPrefix(pc, &prefix));
95   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
96   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
97   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
98   for (i = 0; i < osm->n_local; i++) {
99     if (i < osm->n_local_true) {
100       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
101       PetscCall(ISGetIndices(osm->is[i], &idx));
102       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
103 #define len 16 * (nidx + 1) + 512
104       PetscCall(PetscMalloc1(len, &s));
105       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
106 #undef len
107       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
108       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
109       PetscCall(ISRestoreIndices(osm->is[i], &idx));
110       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
111       PetscCall(PetscViewerDestroy(&sviewer));
112       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
113       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
114       PetscCall(PetscViewerFlush(viewer));
115       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
116       PetscCall(PetscFree(s));
117       if (osm->is_local) {
118         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
119 #define len 16 * (nidx + 1) + 512
120         PetscCall(PetscMalloc1(len, &s));
121         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
122 #undef len
123         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
124         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
125         PetscCall(ISGetIndices(osm->is_local[i], &idx));
126         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
127         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
128         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
129         PetscCall(PetscViewerDestroy(&sviewer));
130         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
131         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
132         PetscCall(PetscViewerFlush(viewer));
133         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
134         PetscCall(PetscFree(s));
135       }
136     } else {
137       /* Participate in collective viewer calls. */
138       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
139       PetscCall(PetscViewerFlush(viewer));
140       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
141       /* Assume either all ranks have is_local or none do. */
142       if (osm->is_local) {
143         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
144         PetscCall(PetscViewerFlush(viewer));
145         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
146       }
147     }
148   }
149   PetscCall(PetscViewerFlush(viewer));
150   PetscCall(PetscViewerDestroy(&viewer));
151   PetscFunctionReturn(PETSC_SUCCESS);
152 }
153 
154 static PetscErrorCode PCSetUp_ASM(PC pc)
155 {
156   PC_ASM     *osm = (PC_ASM *)pc->data;
157   PetscBool   flg;
158   PetscInt    i, m, m_local;
159   MatReuse    scall = MAT_REUSE_MATRIX;
160   IS          isl;
161   KSP         ksp;
162   PC          subpc;
163   const char *prefix, *pprefix;
164   Vec         vec;
165   DM         *domain_dm = NULL;
166 
167   PetscFunctionBegin;
168   if (!pc->setupcalled) {
169     PetscInt m;
170 
171     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172     if (osm->n_local_true == PETSC_DECIDE) {
173       /* no subdomains given */
174       /* try pc->dm first, if allowed */
175       if (osm->dm_subdomains && pc->dm) {
176         PetscInt num_domains, d;
177         char   **domain_names;
178         IS      *inner_domain_is, *outer_domain_is;
179         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181                               A future improvement of this code might allow one to use
182                               DM-defined subdomains and also increase the overlap,
183                               but that is not currently supported */
184         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185         for (d = 0; d < num_domains; ++d) {
186           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189         }
190         PetscCall(PetscFree(domain_names));
191         PetscCall(PetscFree(inner_domain_is));
192         PetscCall(PetscFree(outer_domain_is));
193       }
194       if (osm->n_local_true == PETSC_DECIDE) {
195         /* still no subdomains; use one subdomain per processor */
196         osm->n_local_true = 1;
197       }
198     }
199     { /* determine the global and max number of subdomains */
200       struct {
201         PetscInt max, sum;
202       } inwork, outwork;
203       PetscMPIInt size;
204 
205       inwork.max = osm->n_local_true;
206       inwork.sum = osm->n_local_true;
207       PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208       osm->n_local = outwork.max;
209       osm->n       = outwork.sum;
210 
211       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212       if (outwork.max == 1 && outwork.sum == size) {
213         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215       }
216     }
217     if (!osm->is) { /* create the index sets */
218       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219     }
220     if (osm->n_local_true > 1 && !osm->is_local) {
221       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222       for (i = 0; i < osm->n_local_true; i++) {
223         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226         } else {
227           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228           osm->is_local[i] = osm->is[i];
229         }
230       }
231     }
232     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233     if (osm->overlap > 0) {
234       /* Extend the "overlapping" regions by a number of steps */
235       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236     }
237     if (osm->sort_indices) {
238       for (i = 0; i < osm->n_local_true; i++) {
239         PetscCall(ISSort(osm->is[i]));
240         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241       }
242     }
243     flg = PETSC_FALSE;
244     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246     if (!osm->ksp) {
247       /* Create the local solvers */
248       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250       for (i = 0; i < osm->n_local_true; i++) {
251         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
253         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
254         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
255         PetscCall(KSPSetType(ksp, KSPPREONLY));
256         PetscCall(KSPGetPC(ksp, &subpc));
257         PetscCall(PCGetOptionsPrefix(pc, &prefix));
258         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
259         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
260         if (domain_dm) {
261           PetscCall(KSPSetDM(ksp, domain_dm[i]));
262           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
263           PetscCall(DMDestroy(&domain_dm[i]));
264         }
265         osm->ksp[i] = ksp;
266       }
267       if (domain_dm) PetscCall(PetscFree(domain_dm));
268     }
269 
270     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
271     PetscCall(ISSortRemoveDups(osm->lis));
272     PetscCall(ISGetLocalSize(osm->lis, &m));
273 
274     scall = MAT_INITIAL_MATRIX;
275   } else {
276     /*
277        Destroy the blocks from the previous iteration
278     */
279     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
280       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
281       scall = MAT_INITIAL_MATRIX;
282     }
283   }
284 
285   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
286   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
287     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
288     scall = MAT_INITIAL_MATRIX;
289   }
290 
291   /*
292      Extract out the submatrices
293   */
294   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
295   if (scall == MAT_INITIAL_MATRIX) {
296     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
297     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
298   }
299 
300   /* Convert the types of the submatrices (if needbe) */
301   if (osm->sub_mat_type) {
302     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &(osm->pmat[i])));
303   }
304 
305   if (!pc->setupcalled) {
306     VecType vtype;
307 
308     /* Create the local work vectors (from the local matrices) and scatter contexts */
309     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
310 
311     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
312     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
313     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
314     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
315     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
316 
317     PetscCall(ISGetLocalSize(osm->lis, &m));
318     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
319     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
320     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
321     PetscCall(VecSetSizes(osm->lx, m, m));
322     PetscCall(VecSetType(osm->lx, vtype));
323     PetscCall(VecDuplicate(osm->lx, &osm->ly));
324     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
325     PetscCall(ISDestroy(&isl));
326 
327     for (i = 0; i < osm->n_local_true; ++i) {
328       ISLocalToGlobalMapping ltog;
329       IS                     isll;
330       const PetscInt        *idx_is;
331       PetscInt              *idx_lis, nout;
332 
333       PetscCall(ISGetLocalSize(osm->is[i], &m));
334       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
335       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
336 
337       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
338       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
339       PetscCall(ISGetLocalSize(osm->is[i], &m));
340       PetscCall(ISGetIndices(osm->is[i], &idx_is));
341       PetscCall(PetscMalloc1(m, &idx_lis));
342       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
343       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
344       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
345       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
346       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
347       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
348       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
349       PetscCall(ISDestroy(&isll));
350       PetscCall(ISDestroy(&isl));
351       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
352         ISLocalToGlobalMapping ltog;
353         IS                     isll, isll_local;
354         const PetscInt        *idx_local;
355         PetscInt              *idx1, *idx2, nout;
356 
357         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
358         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
359 
360         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
361         PetscCall(PetscMalloc1(m_local, &idx1));
362         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
363         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
364         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
365         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
366 
367         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
368         PetscCall(PetscMalloc1(m_local, &idx2));
369         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
370         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
371         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
372         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
373 
374         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
375         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
376 
377         PetscCall(ISDestroy(&isll));
378         PetscCall(ISDestroy(&isll_local));
379       }
380     }
381     PetscCall(VecDestroy(&vec));
382   }
383 
384   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
385     IS      *cis;
386     PetscInt c;
387 
388     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
389     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
390     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
391     PetscCall(PetscFree(cis));
392   }
393 
394   /* Return control to the user so that the submatrices can be modified (e.g., to apply
395      different boundary conditions for the submatrices than for the global problem) */
396   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
397 
398   /*
399      Loop over subdomains putting them into local ksp
400   */
401   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
402   for (i = 0; i < osm->n_local_true; i++) {
403     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
404     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
405     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
406   }
407   PetscFunctionReturn(PETSC_SUCCESS);
408 }
409 
410 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
411 {
412   PC_ASM            *osm = (PC_ASM *)pc->data;
413   PetscInt           i;
414   KSPConvergedReason reason;
415 
416   PetscFunctionBegin;
417   for (i = 0; i < osm->n_local_true; i++) {
418     PetscCall(KSPSetUp(osm->ksp[i]));
419     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
420     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
421   }
422   PetscFunctionReturn(PETSC_SUCCESS);
423 }
424 
425 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
426 {
427   PC_ASM     *osm = (PC_ASM *)pc->data;
428   PetscInt    i, n_local_true = osm->n_local_true;
429   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
430 
431   PetscFunctionBegin;
432   /*
433      support for limiting the restriction or interpolation to only local
434      subdomain values (leaving the other values 0).
435   */
436   if (!(osm->type & PC_ASM_RESTRICT)) {
437     forward = SCATTER_FORWARD_LOCAL;
438     /* have to zero the work RHS since scatter may leave some slots empty */
439     PetscCall(VecSet(osm->lx, 0.0));
440   }
441   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
442 
443   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
444   /* zero the global and the local solutions */
445   PetscCall(VecSet(y, 0.0));
446   PetscCall(VecSet(osm->ly, 0.0));
447 
448   /* copy the global RHS to local RHS including the ghost nodes */
449   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
450   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
451 
452   /* restrict local RHS to the overlapping 0-block RHS */
453   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
454   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
455 
456   /* do the local solves */
457   for (i = 0; i < n_local_true; ++i) {
458     /* solve the overlapping i-block */
459     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
460     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
461     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
462     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
463 
464     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
465       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
466       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
467     } else { /* interpolate the overlapping i-block solution to the local solution */
468       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
469       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
470     }
471 
472     if (i < n_local_true - 1) {
473       /* restrict local RHS to the overlapping (i+1)-block RHS */
474       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
475       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
476 
477       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
478         /* update the overlapping (i+1)-block RHS using the current local solution */
479         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
480         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
481       }
482     }
483   }
484   /* add the local solution to the global solution including the ghost nodes */
485   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
486   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
487   PetscFunctionReturn(PETSC_SUCCESS);
488 }
489 
490 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
491 {
492   PC_ASM     *osm = (PC_ASM *)pc->data;
493   Mat         Z, W;
494   Vec         x;
495   PetscInt    i, m, N;
496   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
497 
498   PetscFunctionBegin;
499   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
500   /*
501      support for limiting the restriction or interpolation to only local
502      subdomain values (leaving the other values 0).
503   */
504   if (!(osm->type & PC_ASM_RESTRICT)) {
505     forward = SCATTER_FORWARD_LOCAL;
506     /* have to zero the work RHS since scatter may leave some slots empty */
507     PetscCall(VecSet(osm->lx, 0.0));
508   }
509   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
510   PetscCall(VecGetLocalSize(osm->x[0], &m));
511   PetscCall(MatGetSize(X, NULL, &N));
512   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
513 
514   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
515   /* zero the global and the local solutions */
516   PetscCall(MatZeroEntries(Y));
517   PetscCall(VecSet(osm->ly, 0.0));
518 
519   for (i = 0; i < N; ++i) {
520     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
521     /* copy the global RHS to local RHS including the ghost nodes */
522     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
523     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
524     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
525 
526     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
527     /* restrict local RHS to the overlapping 0-block RHS */
528     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
529     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
530     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
531   }
532   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
533   /* solve the overlapping 0-block */
534   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
535   PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
536   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
537   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
538   PetscCall(MatDestroy(&Z));
539 
540   for (i = 0; i < N; ++i) {
541     PetscCall(VecSet(osm->ly, 0.0));
542     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
543     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
544       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
545       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
546     } else { /* interpolate the overlapping 0-block solution to the local solution */
547       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
548       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
549     }
550     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
551 
552     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
553     /* add the local solution to the global solution including the ghost nodes */
554     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
555     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
556     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
557   }
558   PetscCall(MatDestroy(&W));
559   PetscFunctionReturn(PETSC_SUCCESS);
560 }
561 
562 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
563 {
564   PC_ASM     *osm = (PC_ASM *)pc->data;
565   PetscInt    i, n_local_true = osm->n_local_true;
566   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
567 
568   PetscFunctionBegin;
569   /*
570      Support for limiting the restriction or interpolation to only local
571      subdomain values (leaving the other values 0).
572 
573      Note: these are reversed from the PCApply_ASM() because we are applying the
574      transpose of the three terms
575   */
576 
577   if (!(osm->type & PC_ASM_INTERPOLATE)) {
578     forward = SCATTER_FORWARD_LOCAL;
579     /* have to zero the work RHS since scatter may leave some slots empty */
580     PetscCall(VecSet(osm->lx, 0.0));
581   }
582   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
583 
584   /* zero the global and the local solutions */
585   PetscCall(VecSet(y, 0.0));
586   PetscCall(VecSet(osm->ly, 0.0));
587 
588   /* Copy the global RHS to local RHS including the ghost nodes */
589   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
590   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
591 
592   /* Restrict local RHS to the overlapping 0-block RHS */
593   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
594   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
595 
596   /* do the local solves */
597   for (i = 0; i < n_local_true; ++i) {
598     /* solve the overlapping i-block */
599     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
600     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
601     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
602     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
603 
604     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
605       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
606       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
607     } else { /* interpolate the overlapping i-block solution to the local solution */
608       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
609       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
610     }
611 
612     if (i < n_local_true - 1) {
613       /* Restrict local RHS to the overlapping (i+1)-block RHS */
614       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
615       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
616     }
617   }
618   /* Add the local solution to the global solution including the ghost nodes */
619   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
620   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
621   PetscFunctionReturn(PETSC_SUCCESS);
622 }
623 
624 static PetscErrorCode PCReset_ASM(PC pc)
625 {
626   PC_ASM  *osm = (PC_ASM *)pc->data;
627   PetscInt i;
628 
629   PetscFunctionBegin;
630   if (osm->ksp) {
631     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
632   }
633   if (osm->pmat) {
634     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
635   }
636   if (osm->lrestriction) {
637     PetscCall(VecScatterDestroy(&osm->restriction));
638     for (i = 0; i < osm->n_local_true; i++) {
639       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
640       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
641       PetscCall(VecDestroy(&osm->x[i]));
642       PetscCall(VecDestroy(&osm->y[i]));
643     }
644     PetscCall(PetscFree(osm->lrestriction));
645     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
646     PetscCall(PetscFree(osm->x));
647     PetscCall(PetscFree(osm->y));
648   }
649   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
650   PetscCall(ISDestroy(&osm->lis));
651   PetscCall(VecDestroy(&osm->lx));
652   PetscCall(VecDestroy(&osm->ly));
653   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
654 
655   PetscCall(PetscFree(osm->sub_mat_type));
656 
657   osm->is       = NULL;
658   osm->is_local = NULL;
659   PetscFunctionReturn(PETSC_SUCCESS);
660 }
661 
662 static PetscErrorCode PCDestroy_ASM(PC pc)
663 {
664   PC_ASM  *osm = (PC_ASM *)pc->data;
665   PetscInt i;
666 
667   PetscFunctionBegin;
668   PetscCall(PCReset_ASM(pc));
669   if (osm->ksp) {
670     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
671     PetscCall(PetscFree(osm->ksp));
672   }
673   PetscCall(PetscFree(pc->data));
674 
675   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
676   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
677   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
685   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
686   PetscFunctionReturn(PETSC_SUCCESS);
687 }
688 
689 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject)
690 {
691   PC_ASM         *osm = (PC_ASM *)pc->data;
692   PetscInt        blocks, ovl;
693   PetscBool       flg;
694   PCASMType       asmtype;
695   PCCompositeType loctype;
696   char            sub_mat_type[256];
697 
698   PetscFunctionBegin;
699   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
700   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
701   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
702   if (flg) {
703     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
704     osm->dm_subdomains = PETSC_FALSE;
705   }
706   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
707   if (flg) {
708     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
709     osm->dm_subdomains = PETSC_FALSE;
710   }
711   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
712   if (flg) {
713     PetscCall(PCASMSetOverlap(pc, ovl));
714     osm->dm_subdomains = PETSC_FALSE;
715   }
716   flg = PETSC_FALSE;
717   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
718   if (flg) PetscCall(PCASMSetType(pc, asmtype));
719   flg = PETSC_FALSE;
720   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
721   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
722   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
723   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
724   PetscOptionsHeadEnd();
725   PetscFunctionReturn(PETSC_SUCCESS);
726 }
727 
728 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
729 {
730   PC_ASM  *osm = (PC_ASM *)pc->data;
731   PetscInt i;
732 
733   PetscFunctionBegin;
734   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
735   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
736 
737   if (!pc->setupcalled) {
738     if (is) {
739       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
740     }
741     if (is_local) {
742       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
743     }
744     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
745 
746     if (osm->ksp && osm->n_local_true != n) {
747       for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
748       PetscCall(PetscFree(osm->ksp));
749     }
750 
751     osm->n_local_true = n;
752     osm->is           = NULL;
753     osm->is_local     = NULL;
754     if (is) {
755       PetscCall(PetscMalloc1(n, &osm->is));
756       for (i = 0; i < n; i++) osm->is[i] = is[i];
757       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
758       osm->overlap = -1;
759     }
760     if (is_local) {
761       PetscCall(PetscMalloc1(n, &osm->is_local));
762       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
763       if (!is) {
764         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
765         for (i = 0; i < osm->n_local_true; i++) {
766           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
767             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
768             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
769           } else {
770             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
771             osm->is[i] = osm->is_local[i];
772           }
773         }
774       }
775     }
776   }
777   PetscFunctionReturn(PETSC_SUCCESS);
778 }
779 
780 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
781 {
782   PC_ASM     *osm = (PC_ASM *)pc->data;
783   PetscMPIInt rank, size;
784   PetscInt    n;
785 
786   PetscFunctionBegin;
787   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
788   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
789 
790   /*
791      Split the subdomains equally among all processors
792   */
793   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
794   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
795   n = N / size + ((N % size) > rank);
796   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
797   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
798   if (!pc->setupcalled) {
799     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
800 
801     osm->n_local_true = n;
802     osm->is           = NULL;
803     osm->is_local     = NULL;
804   }
805   PetscFunctionReturn(PETSC_SUCCESS);
806 }
807 
808 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
809 {
810   PC_ASM *osm = (PC_ASM *)pc->data;
811 
812   PetscFunctionBegin;
813   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
814   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
815   if (!pc->setupcalled) osm->overlap = ovl;
816   PetscFunctionReturn(PETSC_SUCCESS);
817 }
818 
819 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
820 {
821   PC_ASM *osm = (PC_ASM *)pc->data;
822 
823   PetscFunctionBegin;
824   osm->type     = type;
825   osm->type_set = PETSC_TRUE;
826   PetscFunctionReturn(PETSC_SUCCESS);
827 }
828 
829 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
830 {
831   PC_ASM *osm = (PC_ASM *)pc->data;
832 
833   PetscFunctionBegin;
834   *type = osm->type;
835   PetscFunctionReturn(PETSC_SUCCESS);
836 }
837 
838 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
839 {
840   PC_ASM *osm = (PC_ASM *)pc->data;
841 
842   PetscFunctionBegin;
843   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
844   osm->loctype = type;
845   PetscFunctionReturn(PETSC_SUCCESS);
846 }
847 
848 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
849 {
850   PC_ASM *osm = (PC_ASM *)pc->data;
851 
852   PetscFunctionBegin;
853   *type = osm->loctype;
854   PetscFunctionReturn(PETSC_SUCCESS);
855 }
856 
857 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
858 {
859   PC_ASM *osm = (PC_ASM *)pc->data;
860 
861   PetscFunctionBegin;
862   osm->sort_indices = doSort;
863   PetscFunctionReturn(PETSC_SUCCESS);
864 }
865 
866 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
867 {
868   PC_ASM *osm = (PC_ASM *)pc->data;
869 
870   PetscFunctionBegin;
871   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
872 
873   if (n_local) *n_local = osm->n_local_true;
874   if (first_local) {
875     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
876     *first_local -= osm->n_local_true;
877   }
878   if (ksp) *ksp = osm->ksp;
879   PetscFunctionReturn(PETSC_SUCCESS);
880 }
881 
882 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
883 {
884   PC_ASM *osm = (PC_ASM *)pc->data;
885 
886   PetscFunctionBegin;
887   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
888   PetscAssertPointer(sub_mat_type, 2);
889   *sub_mat_type = osm->sub_mat_type;
890   PetscFunctionReturn(PETSC_SUCCESS);
891 }
892 
893 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
894 {
895   PC_ASM *osm = (PC_ASM *)pc->data;
896 
897   PetscFunctionBegin;
898   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
899   PetscCall(PetscFree(osm->sub_mat_type));
900   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
901   PetscFunctionReturn(PETSC_SUCCESS);
902 }
903 
904 /*@C
905   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
906 
907   Collective
908 
909   Input Parameters:
910 + pc       - the preconditioner context
911 . n        - the number of subdomains for this processor (default value = 1)
912 . is       - the index set that defines the subdomains for this processor
913          (or `NULL` for PETSc to determine subdomains)
914 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
915          (or `NULL` to not provide these)
916 
917   Options Database Key:
918 . -pc_asm_local_blocks <blks> - Sets number of local blocks
919 
920   Level: advanced
921 
922   Notes:
923   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
924 
925   By default the `PCASM` preconditioner uses 1 block per processor.
926 
927   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
928 
929   If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated
930   back to form the global solution (this is the standard restricted additive Schwarz method)
931 
932   If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
933   no code to handle that case.
934 
935 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
936           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
937 @*/
938 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
939 {
940   PetscFunctionBegin;
941   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
942   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
943   PetscFunctionReturn(PETSC_SUCCESS);
944 }
945 
946 /*@C
947   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
948   additive Schwarz preconditioner, `PCASM`.
949 
950   Collective, all MPI ranks must pass in the same array of `IS`
951 
952   Input Parameters:
953 + pc       - the preconditioner context
954 . N        - the number of subdomains for all processors
955 . is       - the index sets that define the subdomains for all processors
956          (or `NULL` to ask PETSc to determine the subdomains)
957 - is_local - the index sets that define the local part of the subdomains for this processor
958          (or `NULL` to not provide this information)
959 
960   Options Database Key:
961 . -pc_asm_blocks <blks> - Sets total blocks
962 
963   Level: advanced
964 
965   Notes:
966   Currently you cannot use this to set the actual subdomains with the argument is or is_local.
967 
968   By default the `PCASM` preconditioner uses 1 block per processor.
969 
970   These index sets cannot be destroyed until after completion of the
971   linear solves for which the `PCASM` preconditioner is being used.
972 
973   Use `PCASMSetLocalSubdomains()` to set local subdomains.
974 
975   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
976 
977 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
978           `PCASMCreateSubdomains2D()`, `PCGASM`
979 @*/
980 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
981 {
982   PetscFunctionBegin;
983   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
984   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
985   PetscFunctionReturn(PETSC_SUCCESS);
986 }
987 
988 /*@
989   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
990   additive Schwarz preconditioner, `PCASM`.
991 
992   Logically Collective
993 
994   Input Parameters:
995 + pc  - the preconditioner context
996 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
997 
998   Options Database Key:
999 . -pc_asm_overlap <ovl> - Sets overlap
1000 
1001   Level: intermediate
1002 
1003   Notes:
1004   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1005   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1006   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1007 
1008   The overlap defaults to 1, so if one desires that no additional
1009   overlap be computed beyond what may have been set with a call to
1010   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1011   must be set to be 0.  In particular, if one does not explicitly set
1012   the subdomains an application code, then all overlap would be computed
1013   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1014   variant that is equivalent to the block Jacobi preconditioner.
1015 
1016   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1017   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1018 
1019   One can define initial index sets with any overlap via
1020   `PCASMSetLocalSubdomains()`; the routine
1021   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1022   if desired.
1023 
1024 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1025           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1026 @*/
1027 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1028 {
1029   PetscFunctionBegin;
1030   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1031   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1032   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1033   PetscFunctionReturn(PETSC_SUCCESS);
1034 }
1035 
1036 /*@
1037   PCASMSetType - Sets the type of restriction and interpolation used
1038   for local problems in the additive Schwarz method, `PCASM`.
1039 
1040   Logically Collective
1041 
1042   Input Parameters:
1043 + pc   - the preconditioner context
1044 - type - variant of `PCASM`, one of
1045 .vb
1046       PC_ASM_BASIC       - full interpolation and restriction
1047       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1048       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1049       PC_ASM_NONE        - local processor restriction and interpolation
1050 .ve
1051 
1052   Options Database Key:
1053 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1054 
1055   Level: intermediate
1056 
1057   Note:
1058   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1059   to limit the local processor interpolation
1060 
1061 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1062           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1063 @*/
1064 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1065 {
1066   PetscFunctionBegin;
1067   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1068   PetscValidLogicalCollectiveEnum(pc, type, 2);
1069   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1070   PetscFunctionReturn(PETSC_SUCCESS);
1071 }
1072 
1073 /*@
1074   PCASMGetType - Gets the type of restriction and interpolation used
1075   for local problems in the additive Schwarz method, `PCASM`.
1076 
1077   Logically Collective
1078 
1079   Input Parameter:
1080 . pc - the preconditioner context
1081 
1082   Output Parameter:
1083 . type - variant of `PCASM`, one of
1084 .vb
1085       PC_ASM_BASIC       - full interpolation and restriction
1086       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1087       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1088       PC_ASM_NONE        - local processor restriction and interpolation
1089 .ve
1090 
1091   Options Database Key:
1092 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1093 
1094   Level: intermediate
1095 
1096 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1097           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1098 @*/
1099 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1100 {
1101   PetscFunctionBegin;
1102   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1103   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1104   PetscFunctionReturn(PETSC_SUCCESS);
1105 }
1106 
1107 /*@
1108   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1109 
1110   Logically Collective
1111 
1112   Input Parameters:
1113 + pc   - the preconditioner context
1114 - type - type of composition, one of
1115 .vb
1116   PC_COMPOSITE_ADDITIVE       - local additive combination
1117   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1118 .ve
1119 
1120   Options Database Key:
1121 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1122 
1123   Level: intermediate
1124 
1125 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1126 @*/
1127 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1128 {
1129   PetscFunctionBegin;
1130   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1131   PetscValidLogicalCollectiveEnum(pc, type, 2);
1132   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1133   PetscFunctionReturn(PETSC_SUCCESS);
1134 }
1135 
1136 /*@
1137   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1138 
1139   Logically Collective
1140 
1141   Input Parameter:
1142 . pc - the preconditioner context
1143 
1144   Output Parameter:
1145 . type - type of composition, one of
1146 .vb
1147   PC_COMPOSITE_ADDITIVE       - local additive combination
1148   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1149 .ve
1150 
1151   Options Database Key:
1152 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1153 
1154   Level: intermediate
1155 
1156 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCCompositeType`
1157 @*/
1158 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1159 {
1160   PetscFunctionBegin;
1161   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1162   PetscAssertPointer(type, 2);
1163   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1164   PetscFunctionReturn(PETSC_SUCCESS);
1165 }
1166 
1167 /*@
1168   PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1169 
1170   Logically Collective
1171 
1172   Input Parameters:
1173 + pc     - the preconditioner context
1174 - doSort - sort the subdomain indices
1175 
1176   Level: intermediate
1177 
1178 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1179           `PCASMCreateSubdomains2D()`
1180 @*/
1181 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1182 {
1183   PetscFunctionBegin;
1184   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1185   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1186   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1187   PetscFunctionReturn(PETSC_SUCCESS);
1188 }
1189 
1190 /*@C
1191   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1192   this processor.
1193 
1194   Collective iff first_local is requested
1195 
1196   Input Parameter:
1197 . pc - the preconditioner context
1198 
1199   Output Parameters:
1200 + n_local     - the number of blocks on this processor or NULL
1201 . first_local - the global number of the first block on this processor or NULL,
1202                  all processors must request or all must pass NULL
1203 - ksp         - the array of `KSP` contexts
1204 
1205   Level: advanced
1206 
1207   Notes:
1208   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1209 
1210   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1211 
1212   Fortran Notes:
1213   The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.
1214 
1215 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1216           `PCASMCreateSubdomains2D()`,
1217 @*/
1218 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1219 {
1220   PetscFunctionBegin;
1221   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1222   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1223   PetscFunctionReturn(PETSC_SUCCESS);
1224 }
1225 
1226 /*MC
1227    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1228            its own `KSP` object.
1229 
1230    Options Database Keys:
1231 +  -pc_asm_blocks <blks> - Sets total blocks. Defaults to one block per MPI rank.
1232 .  -pc_asm_overlap <ovl> - Sets overlap
1233 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1234 -  -pc_asm_local_type [additive, multiplicative] - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1235 
1236    Level: beginner
1237 
1238    Notes:
1239    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1240    will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1241     -pc_asm_type basic to get the same convergence behavior
1242 
1243    Each processor can have one or more blocks, but a block cannot be shared by more
1244    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1245 
1246    To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC`
1247    options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1248 
1249    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1250    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1251 
1252     References:
1253 +   * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1254      Courant Institute, New York University Technical report
1255 -   * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1256     Cambridge University Press.
1257 
1258 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1259           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1260           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1261 M*/
1262 
1263 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1264 {
1265   PC_ASM *osm;
1266 
1267   PetscFunctionBegin;
1268   PetscCall(PetscNew(&osm));
1269 
1270   osm->n             = PETSC_DECIDE;
1271   osm->n_local       = 0;
1272   osm->n_local_true  = PETSC_DECIDE;
1273   osm->overlap       = 1;
1274   osm->ksp           = NULL;
1275   osm->restriction   = NULL;
1276   osm->lprolongation = NULL;
1277   osm->lrestriction  = NULL;
1278   osm->x             = NULL;
1279   osm->y             = NULL;
1280   osm->is            = NULL;
1281   osm->is_local      = NULL;
1282   osm->mat           = NULL;
1283   osm->pmat          = NULL;
1284   osm->type          = PC_ASM_RESTRICT;
1285   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1286   osm->sort_indices  = PETSC_TRUE;
1287   osm->dm_subdomains = PETSC_FALSE;
1288   osm->sub_mat_type  = NULL;
1289 
1290   pc->data                 = (void *)osm;
1291   pc->ops->apply           = PCApply_ASM;
1292   pc->ops->matapply        = PCMatApply_ASM;
1293   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1294   pc->ops->setup           = PCSetUp_ASM;
1295   pc->ops->reset           = PCReset_ASM;
1296   pc->ops->destroy         = PCDestroy_ASM;
1297   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1298   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1299   pc->ops->view            = PCView_ASM;
1300   pc->ops->applyrichardson = NULL;
1301 
1302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1310   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1311   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1312   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1313   PetscFunctionReturn(PETSC_SUCCESS);
1314 }
1315 
1316 /*@C
1317   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1318   preconditioner, `PCASM`,  for any problem on a general grid.
1319 
1320   Collective
1321 
1322   Input Parameters:
1323 + A - The global matrix operator
1324 - n - the number of local blocks
1325 
1326   Output Parameter:
1327 . outis - the array of index sets defining the subdomains
1328 
1329   Level: advanced
1330 
1331   Note:
1332   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1333   from these if you use `PCASMSetLocalSubdomains()`
1334 
1335   Fortran Notes:
1336   You must provide the array outis[] already allocated of length n.
1337 
1338 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1339 @*/
1340 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1341 {
1342   MatPartitioning mpart;
1343   const char     *prefix;
1344   PetscInt        i, j, rstart, rend, bs;
1345   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1346   Mat             Ad = NULL, adj;
1347   IS              ispart, isnumb, *is;
1348 
1349   PetscFunctionBegin;
1350   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1351   PetscAssertPointer(outis, 3);
1352   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1353 
1354   /* Get prefix, row distribution, and block size */
1355   PetscCall(MatGetOptionsPrefix(A, &prefix));
1356   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1357   PetscCall(MatGetBlockSize(A, &bs));
1358   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1359 
1360   /* Get diagonal block from matrix if possible */
1361   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1362   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1363   if (Ad) {
1364     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1365     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1366   }
1367   if (Ad && n > 1) {
1368     PetscBool match, done;
1369     /* Try to setup a good matrix partitioning if available */
1370     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1371     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1372     PetscCall(MatPartitioningSetFromOptions(mpart));
1373     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1374     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1375     if (!match) { /* assume a "good" partitioner is available */
1376       PetscInt        na;
1377       const PetscInt *ia, *ja;
1378       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1379       if (done) {
1380         /* Build adjacency matrix by hand. Unfortunately a call to
1381            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1382            remove the block-aij structure and we cannot expect
1383            MatPartitioning to split vertices as we need */
1384         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1385         const PetscInt *row;
1386         nnz = 0;
1387         for (i = 0; i < na; i++) { /* count number of nonzeros */
1388           len = ia[i + 1] - ia[i];
1389           row = ja + ia[i];
1390           for (j = 0; j < len; j++) {
1391             if (row[j] == i) { /* don't count diagonal */
1392               len--;
1393               break;
1394             }
1395           }
1396           nnz += len;
1397         }
1398         PetscCall(PetscMalloc1(na + 1, &iia));
1399         PetscCall(PetscMalloc1(nnz, &jja));
1400         nnz    = 0;
1401         iia[0] = 0;
1402         for (i = 0; i < na; i++) { /* fill adjacency */
1403           cnt = 0;
1404           len = ia[i + 1] - ia[i];
1405           row = ja + ia[i];
1406           for (j = 0; j < len; j++) {
1407             if (row[j] != i) { /* if not diagonal */
1408               jja[nnz + cnt++] = row[j];
1409             }
1410           }
1411           nnz += cnt;
1412           iia[i + 1] = nnz;
1413         }
1414         /* Partitioning of the adjacency matrix */
1415         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1416         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1417         PetscCall(MatPartitioningSetNParts(mpart, n));
1418         PetscCall(MatPartitioningApply(mpart, &ispart));
1419         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1420         PetscCall(MatDestroy(&adj));
1421         foundpart = PETSC_TRUE;
1422       }
1423       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1424     }
1425     PetscCall(MatPartitioningDestroy(&mpart));
1426   }
1427 
1428   PetscCall(PetscMalloc1(n, &is));
1429   *outis = is;
1430 
1431   if (!foundpart) {
1432     /* Partitioning by contiguous chunks of rows */
1433 
1434     PetscInt mbs   = (rend - rstart) / bs;
1435     PetscInt start = rstart;
1436     for (i = 0; i < n; i++) {
1437       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1438       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1439       start += count;
1440     }
1441 
1442   } else {
1443     /* Partitioning by adjacency of diagonal block  */
1444 
1445     const PetscInt *numbering;
1446     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1447     /* Get node count in each partition */
1448     PetscCall(PetscMalloc1(n, &count));
1449     PetscCall(ISPartitioningCount(ispart, n, count));
1450     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1451       for (i = 0; i < n; i++) count[i] *= bs;
1452     }
1453     /* Build indices from node numbering */
1454     PetscCall(ISGetLocalSize(isnumb, &nidx));
1455     PetscCall(PetscMalloc1(nidx, &indices));
1456     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1457     PetscCall(ISGetIndices(isnumb, &numbering));
1458     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1459     PetscCall(ISRestoreIndices(isnumb, &numbering));
1460     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1461       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1462       for (i = 0; i < nidx; i++) {
1463         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1464       }
1465       PetscCall(PetscFree(indices));
1466       nidx *= bs;
1467       indices = newidx;
1468     }
1469     /* Shift to get global indices */
1470     for (i = 0; i < nidx; i++) indices[i] += rstart;
1471 
1472     /* Build the index sets for each block */
1473     for (i = 0; i < n; i++) {
1474       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1475       PetscCall(ISSort(is[i]));
1476       start += count[i];
1477     }
1478 
1479     PetscCall(PetscFree(count));
1480     PetscCall(PetscFree(indices));
1481     PetscCall(ISDestroy(&isnumb));
1482     PetscCall(ISDestroy(&ispart));
1483   }
1484   PetscFunctionReturn(PETSC_SUCCESS);
1485 }
1486 
1487 /*@C
1488   PCASMDestroySubdomains - Destroys the index sets created with
1489   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1490 
1491   Collective
1492 
1493   Input Parameters:
1494 + n        - the number of index sets
1495 . is       - the array of index sets
1496 - is_local - the array of local index sets, can be `NULL`
1497 
1498   Level: advanced
1499 
1500 .seealso: `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1501 @*/
1502 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1503 {
1504   PetscInt i;
1505 
1506   PetscFunctionBegin;
1507   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1508   if (is) {
1509     PetscAssertPointer(is, 2);
1510     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1511     PetscCall(PetscFree(is));
1512   }
1513   if (is_local) {
1514     PetscAssertPointer(is_local, 3);
1515     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1516     PetscCall(PetscFree(is_local));
1517   }
1518   PetscFunctionReturn(PETSC_SUCCESS);
1519 }
1520 
1521 /*@C
1522   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1523   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1524 
1525   Not Collective
1526 
1527   Input Parameters:
1528 + m       - the number of mesh points in the x direction
1529 . n       - the number of mesh points in the y direction
1530 . M       - the number of subdomains in the x direction
1531 . N       - the number of subdomains in the y direction
1532 . dof     - degrees of freedom per node
1533 - overlap - overlap in mesh lines
1534 
1535   Output Parameters:
1536 + Nsub     - the number of subdomains created
1537 . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1538 - is_local - array of index sets defining non-overlapping subdomains
1539 
1540   Level: advanced
1541 
1542   Note:
1543   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1544   preconditioners.  More general related routines are
1545   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1546 
1547   Fortran Notes:
1548   The `IS` must be declared as an array of length long enough to hold `Nsub` entries
1549 
1550 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1551           `PCASMSetOverlap()`
1552 @*/
1553 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local)
1554 {
1555   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1556   PetscInt nidx, *idx, loc, ii, jj, count;
1557 
1558   PetscFunctionBegin;
1559   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1560 
1561   *Nsub = N * M;
1562   PetscCall(PetscMalloc1(*Nsub, is));
1563   PetscCall(PetscMalloc1(*Nsub, is_local));
1564   ystart    = 0;
1565   loc_outer = 0;
1566   for (i = 0; i < N; i++) {
1567     height = n / N + ((n % N) > i); /* height of subdomain */
1568     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1569     yleft = ystart - overlap;
1570     if (yleft < 0) yleft = 0;
1571     yright = ystart + height + overlap;
1572     if (yright > n) yright = n;
1573     xstart = 0;
1574     for (j = 0; j < M; j++) {
1575       width = m / M + ((m % M) > j); /* width of subdomain */
1576       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1577       xleft = xstart - overlap;
1578       if (xleft < 0) xleft = 0;
1579       xright = xstart + width + overlap;
1580       if (xright > m) xright = m;
1581       nidx = (xright - xleft) * (yright - yleft);
1582       PetscCall(PetscMalloc1(nidx, &idx));
1583       loc = 0;
1584       for (ii = yleft; ii < yright; ii++) {
1585         count = m * ii + xleft;
1586         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1587       }
1588       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1589       if (overlap == 0) {
1590         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1591 
1592         (*is_local)[loc_outer] = (*is)[loc_outer];
1593       } else {
1594         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1595           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1596         }
1597         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1598       }
1599       PetscCall(PetscFree(idx));
1600       xstart += width;
1601       loc_outer++;
1602     }
1603     ystart += height;
1604   }
1605   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1606   PetscFunctionReturn(PETSC_SUCCESS);
1607 }
1608 
1609 /*@C
1610   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1611   only) for the additive Schwarz preconditioner, `PCASM`.
1612 
1613   Not Collective
1614 
1615   Input Parameter:
1616 . pc - the preconditioner context
1617 
1618   Output Parameters:
1619 + n        - if requested, the number of subdomains for this processor (default value = 1)
1620 . is       - if requested, the index sets that define the subdomains for this processor
1621 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)
1622 
1623   Level: advanced
1624 
1625   Note:
1626   The `IS` numbering is in the parallel, global numbering of the vector.
1627 
1628 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1629           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1630 @*/
1631 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1632 {
1633   PC_ASM   *osm = (PC_ASM *)pc->data;
1634   PetscBool match;
1635 
1636   PetscFunctionBegin;
1637   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1638   if (n) PetscAssertPointer(n, 2);
1639   if (is) PetscAssertPointer(is, 3);
1640   if (is_local) PetscAssertPointer(is_local, 4);
1641   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1642   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1643   if (n) *n = osm->n_local_true;
1644   if (is) *is = osm->is;
1645   if (is_local) *is_local = osm->is_local;
1646   PetscFunctionReturn(PETSC_SUCCESS);
1647 }
1648 
1649 /*@C
1650   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1651   only) for the additive Schwarz preconditioner, `PCASM`.
1652 
1653   Not Collective
1654 
1655   Input Parameter:
1656 . pc - the preconditioner context
1657 
1658   Output Parameters:
1659 + n   - if requested, the number of matrices for this processor (default value = 1)
1660 - mat - if requested, the matrices
1661 
1662   Level: advanced
1663 
1664   Notes:
1665   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1666 
1667   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1668 
1669 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1670           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1671 @*/
1672 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1673 {
1674   PC_ASM   *osm;
1675   PetscBool match;
1676 
1677   PetscFunctionBegin;
1678   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1679   if (n) PetscAssertPointer(n, 2);
1680   if (mat) PetscAssertPointer(mat, 3);
1681   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1682   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1683   if (!match) {
1684     if (n) *n = 0;
1685     if (mat) *mat = NULL;
1686   } else {
1687     osm = (PC_ASM *)pc->data;
1688     if (n) *n = osm->n_local_true;
1689     if (mat) *mat = osm->pmat;
1690   }
1691   PetscFunctionReturn(PETSC_SUCCESS);
1692 }
1693 
1694 /*@
1695   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1696 
1697   Logically Collective
1698 
1699   Input Parameters:
1700 + pc  - the preconditioner
1701 - flg - boolean indicating whether to use subdomains defined by the `DM`
1702 
1703   Options Database Key:
1704 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM`
1705 
1706   Level: intermediate
1707 
1708   Note:
1709   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1710   so setting either of the first two effectively turns the latter off.
1711 
1712 .seealso: `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1713           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1714 @*/
1715 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1716 {
1717   PC_ASM   *osm = (PC_ASM *)pc->data;
1718   PetscBool match;
1719 
1720   PetscFunctionBegin;
1721   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1722   PetscValidLogicalCollectiveBool(pc, flg, 2);
1723   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1724   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1725   if (match) osm->dm_subdomains = flg;
1726   PetscFunctionReturn(PETSC_SUCCESS);
1727 }
1728 
1729 /*@
1730   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1731 
1732   Not Collective
1733 
1734   Input Parameter:
1735 . pc - the preconditioner
1736 
1737   Output Parameter:
1738 . flg - boolean indicating whether to use subdomains defined by the `DM`
1739 
1740   Level: intermediate
1741 
1742 .seealso: `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1743           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1744 @*/
1745 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1746 {
1747   PC_ASM   *osm = (PC_ASM *)pc->data;
1748   PetscBool match;
1749 
1750   PetscFunctionBegin;
1751   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1752   PetscAssertPointer(flg, 2);
1753   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1754   if (match) *flg = osm->dm_subdomains;
1755   else *flg = PETSC_FALSE;
1756   PetscFunctionReturn(PETSC_SUCCESS);
1757 }
1758 
1759 /*@C
1760   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1761 
1762   Not Collective
1763 
1764   Input Parameter:
1765 . pc - the `PC`
1766 
1767   Output Parameter:
1768 . sub_mat_type - name of matrix type
1769 
1770   Level: advanced
1771 
1772 .seealso: `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1773 @*/
1774 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1775 {
1776   PetscFunctionBegin;
1777   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1778   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1779   PetscFunctionReturn(PETSC_SUCCESS);
1780 }
1781 
1782 /*@C
1783   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1784 
1785   Collective
1786 
1787   Input Parameters:
1788 + pc           - the `PC` object
1789 - sub_mat_type - the `MatType`
1790 
1791   Options Database Key:
1792 . -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1793    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1794 
1795   Note:
1796   See `MatType` for available types
1797 
1798   Level: advanced
1799 
1800 .seealso: `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1801 @*/
1802 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1803 {
1804   PetscFunctionBegin;
1805   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1806   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1807   PetscFunctionReturn(PETSC_SUCCESS);
1808 }
1809