xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 21e3ffae2f3b73c0bd738cf6d0a809700fc04bb0)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include "petsc/private/matimpl.h"
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         iascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(viewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(viewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerFlush(viewer));
69       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
70     }
71   } else if (isstring) {
72     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
73     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
74     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
75     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
76   }
77   PetscFunctionReturn(PETSC_SUCCESS);
78 }
79 
80 static PetscErrorCode PCASMPrintSubdomains(PC pc)
81 {
82   PC_ASM         *osm = (PC_ASM *)pc->data;
83   const char     *prefix;
84   char            fname[PETSC_MAX_PATH_LEN + 1];
85   PetscViewer     viewer, sviewer;
86   char           *s;
87   PetscInt        i, j, nidx;
88   const PetscInt *idx;
89   PetscMPIInt     rank, size;
90 
91   PetscFunctionBegin;
92   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
93   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
94   PetscCall(PCGetOptionsPrefix(pc, &prefix));
95   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
96   if (fname[0] == 0) PetscCall(PetscStrcpy(fname, "stdout"));
97   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
98   for (i = 0; i < osm->n_local; i++) {
99     if (i < osm->n_local_true) {
100       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
101       PetscCall(ISGetIndices(osm->is[i], &idx));
102       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
103 #define len 16 * (nidx + 1) + 512
104       PetscCall(PetscMalloc1(len, &s));
105       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
106 #undef len
107       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
108       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
109       PetscCall(ISRestoreIndices(osm->is[i], &idx));
110       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
111       PetscCall(PetscViewerDestroy(&sviewer));
112       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
113       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
114       PetscCall(PetscViewerFlush(viewer));
115       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
116       PetscCall(PetscFree(s));
117       if (osm->is_local) {
118         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
119 #define len 16 * (nidx + 1) + 512
120         PetscCall(PetscMalloc1(len, &s));
121         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
122 #undef len
123         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
124         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
125         PetscCall(ISGetIndices(osm->is_local[i], &idx));
126         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
127         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
128         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
129         PetscCall(PetscViewerDestroy(&sviewer));
130         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
131         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
132         PetscCall(PetscViewerFlush(viewer));
133         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
134         PetscCall(PetscFree(s));
135       }
136     } else {
137       /* Participate in collective viewer calls. */
138       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
139       PetscCall(PetscViewerFlush(viewer));
140       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
141       /* Assume either all ranks have is_local or none do. */
142       if (osm->is_local) {
143         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
144         PetscCall(PetscViewerFlush(viewer));
145         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
146       }
147     }
148   }
149   PetscCall(PetscViewerFlush(viewer));
150   PetscCall(PetscViewerDestroy(&viewer));
151   PetscFunctionReturn(PETSC_SUCCESS);
152 }
153 
154 static PetscErrorCode PCSetUp_ASM(PC pc)
155 {
156   PC_ASM     *osm = (PC_ASM *)pc->data;
157   PetscBool   flg;
158   PetscInt    i, m, m_local;
159   MatReuse    scall = MAT_REUSE_MATRIX;
160   IS          isl;
161   KSP         ksp;
162   PC          subpc;
163   const char *prefix, *pprefix;
164   Vec         vec;
165   DM         *domain_dm = NULL;
166 
167   PetscFunctionBegin;
168   if (!pc->setupcalled) {
169     PetscInt m;
170 
171     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172     if (osm->n_local_true == PETSC_DECIDE) {
173       /* no subdomains given */
174       /* try pc->dm first, if allowed */
175       if (osm->dm_subdomains && pc->dm) {
176         PetscInt num_domains, d;
177         char   **domain_names;
178         IS      *inner_domain_is, *outer_domain_is;
179         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181                               A future improvement of this code might allow one to use
182                               DM-defined subdomains and also increase the overlap,
183                               but that is not currently supported */
184         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185         for (d = 0; d < num_domains; ++d) {
186           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189         }
190         PetscCall(PetscFree(domain_names));
191         PetscCall(PetscFree(inner_domain_is));
192         PetscCall(PetscFree(outer_domain_is));
193       }
194       if (osm->n_local_true == PETSC_DECIDE) {
195         /* still no subdomains; use one subdomain per processor */
196         osm->n_local_true = 1;
197       }
198     }
199     { /* determine the global and max number of subdomains */
200       struct {
201         PetscInt max, sum;
202       } inwork, outwork;
203       PetscMPIInt size;
204 
205       inwork.max = osm->n_local_true;
206       inwork.sum = osm->n_local_true;
207       PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208       osm->n_local = outwork.max;
209       osm->n       = outwork.sum;
210 
211       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212       if (outwork.max == 1 && outwork.sum == size) {
213         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215       }
216     }
217     if (!osm->is) { /* create the index sets */
218       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219     }
220     if (osm->n_local_true > 1 && !osm->is_local) {
221       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222       for (i = 0; i < osm->n_local_true; i++) {
223         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226         } else {
227           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228           osm->is_local[i] = osm->is[i];
229         }
230       }
231     }
232     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233     if (osm->overlap > 0) {
234       /* Extend the "overlapping" regions by a number of steps */
235       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236     }
237     if (osm->sort_indices) {
238       for (i = 0; i < osm->n_local_true; i++) {
239         PetscCall(ISSort(osm->is[i]));
240         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241       }
242     }
243     flg = PETSC_FALSE;
244     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246     if (!osm->ksp) {
247       /* Create the local solvers */
248       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250       for (i = 0; i < osm->n_local_true; i++) {
251         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
253         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
254         PetscCall(KSPSetType(ksp, KSPPREONLY));
255         PetscCall(KSPGetPC(ksp, &subpc));
256         PetscCall(PCGetOptionsPrefix(pc, &prefix));
257         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
258         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
259         if (domain_dm) {
260           PetscCall(KSPSetDM(ksp, domain_dm[i]));
261           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
262           PetscCall(DMDestroy(&domain_dm[i]));
263         }
264         osm->ksp[i] = ksp;
265       }
266       if (domain_dm) PetscCall(PetscFree(domain_dm));
267     }
268 
269     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
270     PetscCall(ISSortRemoveDups(osm->lis));
271     PetscCall(ISGetLocalSize(osm->lis, &m));
272 
273     scall = MAT_INITIAL_MATRIX;
274   } else {
275     /*
276        Destroy the blocks from the previous iteration
277     */
278     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
279       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
280       scall = MAT_INITIAL_MATRIX;
281     }
282   }
283 
284   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
285   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
286     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
287     scall = MAT_INITIAL_MATRIX;
288   }
289 
290   /*
291      Extract out the submatrices
292   */
293   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
294   if (scall == MAT_INITIAL_MATRIX) {
295     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
296     for (i = 0; i < osm->n_local_true; i++) { PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix)); }
297   }
298 
299   /* Convert the types of the submatrices (if needbe) */
300   if (osm->sub_mat_type) {
301     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &(osm->pmat[i])));
302   }
303 
304   if (!pc->setupcalled) {
305     VecType vtype;
306 
307     /* Create the local work vectors (from the local matrices) and scatter contexts */
308     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
309 
310     PetscCheck(!osm->is_local || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
311     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
312     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
313     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
314     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
315 
316     PetscCall(ISGetLocalSize(osm->lis, &m));
317     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
318     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
319     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
320     PetscCall(VecSetSizes(osm->lx, m, m));
321     PetscCall(VecSetType(osm->lx, vtype));
322     PetscCall(VecDuplicate(osm->lx, &osm->ly));
323     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
324     PetscCall(ISDestroy(&isl));
325 
326     for (i = 0; i < osm->n_local_true; ++i) {
327       ISLocalToGlobalMapping ltog;
328       IS                     isll;
329       const PetscInt        *idx_is;
330       PetscInt              *idx_lis, nout;
331 
332       PetscCall(ISGetLocalSize(osm->is[i], &m));
333       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
334       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
335 
336       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
337       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
338       PetscCall(ISGetLocalSize(osm->is[i], &m));
339       PetscCall(ISGetIndices(osm->is[i], &idx_is));
340       PetscCall(PetscMalloc1(m, &idx_lis));
341       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
342       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
343       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
344       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
345       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
346       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
347       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
348       PetscCall(ISDestroy(&isll));
349       PetscCall(ISDestroy(&isl));
350       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
351         ISLocalToGlobalMapping ltog;
352         IS                     isll, isll_local;
353         const PetscInt        *idx_local;
354         PetscInt              *idx1, *idx2, nout;
355 
356         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
357         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
358 
359         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
360         PetscCall(PetscMalloc1(m_local, &idx1));
361         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
362         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
363         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
364         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
365 
366         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
367         PetscCall(PetscMalloc1(m_local, &idx2));
368         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
369         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
370         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
371         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
372 
373         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
374         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
375 
376         PetscCall(ISDestroy(&isll));
377         PetscCall(ISDestroy(&isll_local));
378       }
379     }
380     PetscCall(VecDestroy(&vec));
381   }
382 
383   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
384     IS      *cis;
385     PetscInt c;
386 
387     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
388     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
389     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
390     PetscCall(PetscFree(cis));
391   }
392 
393   /* Return control to the user so that the submatrices can be modified (e.g., to apply
394      different boundary conditions for the submatrices than for the global problem) */
395   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
396 
397   /*
398      Loop over subdomains putting them into local ksp
399   */
400   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
401   for (i = 0; i < osm->n_local_true; i++) {
402     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
403     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
404     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
405   }
406   PetscFunctionReturn(PETSC_SUCCESS);
407 }
408 
409 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
410 {
411   PC_ASM            *osm = (PC_ASM *)pc->data;
412   PetscInt           i;
413   KSPConvergedReason reason;
414 
415   PetscFunctionBegin;
416   for (i = 0; i < osm->n_local_true; i++) {
417     PetscCall(KSPSetUp(osm->ksp[i]));
418     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
419     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
420   }
421   PetscFunctionReturn(PETSC_SUCCESS);
422 }
423 
424 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
425 {
426   PC_ASM     *osm = (PC_ASM *)pc->data;
427   PetscInt    i, n_local_true = osm->n_local_true;
428   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
429 
430   PetscFunctionBegin;
431   /*
432      support for limiting the restriction or interpolation to only local
433      subdomain values (leaving the other values 0).
434   */
435   if (!(osm->type & PC_ASM_RESTRICT)) {
436     forward = SCATTER_FORWARD_LOCAL;
437     /* have to zero the work RHS since scatter may leave some slots empty */
438     PetscCall(VecSet(osm->lx, 0.0));
439   }
440   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
441 
442   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
443   /* zero the global and the local solutions */
444   PetscCall(VecSet(y, 0.0));
445   PetscCall(VecSet(osm->ly, 0.0));
446 
447   /* copy the global RHS to local RHS including the ghost nodes */
448   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
449   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
450 
451   /* restrict local RHS to the overlapping 0-block RHS */
452   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
453   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
454 
455   /* do the local solves */
456   for (i = 0; i < n_local_true; ++i) {
457     /* solve the overlapping i-block */
458     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
459     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
460     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
461     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
462 
463     if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
464       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
465       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
466     } else { /* interpolate the overlapping i-block solution to the local solution */
467       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
468       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
469     }
470 
471     if (i < n_local_true - 1) {
472       /* restrict local RHS to the overlapping (i+1)-block RHS */
473       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
474       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
475 
476       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
477         /* update the overlapping (i+1)-block RHS using the current local solution */
478         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
479         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
480       }
481     }
482   }
483   /* add the local solution to the global solution including the ghost nodes */
484   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
485   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
486   PetscFunctionReturn(PETSC_SUCCESS);
487 }
488 
489 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
490 {
491   PC_ASM     *osm = (PC_ASM *)pc->data;
492   Mat         Z, W;
493   Vec         x;
494   PetscInt    i, m, N;
495   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
496 
497   PetscFunctionBegin;
498   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
499   /*
500      support for limiting the restriction or interpolation to only local
501      subdomain values (leaving the other values 0).
502   */
503   if (!(osm->type & PC_ASM_RESTRICT)) {
504     forward = SCATTER_FORWARD_LOCAL;
505     /* have to zero the work RHS since scatter may leave some slots empty */
506     PetscCall(VecSet(osm->lx, 0.0));
507   }
508   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
509   PetscCall(VecGetLocalSize(osm->x[0], &m));
510   PetscCall(MatGetSize(X, NULL, &N));
511   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
512 
513   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
514   /* zero the global and the local solutions */
515   PetscCall(MatZeroEntries(Y));
516   PetscCall(VecSet(osm->ly, 0.0));
517 
518   for (i = 0; i < N; ++i) {
519     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
520     /* copy the global RHS to local RHS including the ghost nodes */
521     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
522     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
523     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
524 
525     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
526     /* restrict local RHS to the overlapping 0-block RHS */
527     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
528     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
529     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
530   }
531   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
532   /* solve the overlapping 0-block */
533   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
534   PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
535   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
536   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
537   PetscCall(MatDestroy(&Z));
538 
539   for (i = 0; i < N; ++i) {
540     PetscCall(VecSet(osm->ly, 0.0));
541     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
542     if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
543       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
544       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
545     } else { /* interpolate the overlapping 0-block solution to the local solution */
546       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
547       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
548     }
549     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
550 
551     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
552     /* add the local solution to the global solution including the ghost nodes */
553     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
554     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
555     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
556   }
557   PetscCall(MatDestroy(&W));
558   PetscFunctionReturn(PETSC_SUCCESS);
559 }
560 
561 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
562 {
563   PC_ASM     *osm = (PC_ASM *)pc->data;
564   PetscInt    i, n_local_true = osm->n_local_true;
565   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
566 
567   PetscFunctionBegin;
568   /*
569      Support for limiting the restriction or interpolation to only local
570      subdomain values (leaving the other values 0).
571 
572      Note: these are reversed from the PCApply_ASM() because we are applying the
573      transpose of the three terms
574   */
575 
576   if (!(osm->type & PC_ASM_INTERPOLATE)) {
577     forward = SCATTER_FORWARD_LOCAL;
578     /* have to zero the work RHS since scatter may leave some slots empty */
579     PetscCall(VecSet(osm->lx, 0.0));
580   }
581   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
582 
583   /* zero the global and the local solutions */
584   PetscCall(VecSet(y, 0.0));
585   PetscCall(VecSet(osm->ly, 0.0));
586 
587   /* Copy the global RHS to local RHS including the ghost nodes */
588   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
589   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
590 
591   /* Restrict local RHS to the overlapping 0-block RHS */
592   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
593   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
594 
595   /* do the local solves */
596   for (i = 0; i < n_local_true; ++i) {
597     /* solve the overlapping i-block */
598     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
599     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
600     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
601     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
602 
603     if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
604       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
605       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
606     } else { /* interpolate the overlapping i-block solution to the local solution */
607       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
608       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
609     }
610 
611     if (i < n_local_true - 1) {
612       /* Restrict local RHS to the overlapping (i+1)-block RHS */
613       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
614       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
615     }
616   }
617   /* Add the local solution to the global solution including the ghost nodes */
618   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
619   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
620   PetscFunctionReturn(PETSC_SUCCESS);
621 }
622 
623 static PetscErrorCode PCReset_ASM(PC pc)
624 {
625   PC_ASM  *osm = (PC_ASM *)pc->data;
626   PetscInt i;
627 
628   PetscFunctionBegin;
629   if (osm->ksp) {
630     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
631   }
632   if (osm->pmat) {
633     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
634   }
635   if (osm->lrestriction) {
636     PetscCall(VecScatterDestroy(&osm->restriction));
637     for (i = 0; i < osm->n_local_true; i++) {
638       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
639       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
640       PetscCall(VecDestroy(&osm->x[i]));
641       PetscCall(VecDestroy(&osm->y[i]));
642     }
643     PetscCall(PetscFree(osm->lrestriction));
644     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
645     PetscCall(PetscFree(osm->x));
646     PetscCall(PetscFree(osm->y));
647   }
648   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
649   PetscCall(ISDestroy(&osm->lis));
650   PetscCall(VecDestroy(&osm->lx));
651   PetscCall(VecDestroy(&osm->ly));
652   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
653 
654   PetscCall(PetscFree(osm->sub_mat_type));
655 
656   osm->is       = NULL;
657   osm->is_local = NULL;
658   PetscFunctionReturn(PETSC_SUCCESS);
659 }
660 
661 static PetscErrorCode PCDestroy_ASM(PC pc)
662 {
663   PC_ASM  *osm = (PC_ASM *)pc->data;
664   PetscInt i;
665 
666   PetscFunctionBegin;
667   PetscCall(PCReset_ASM(pc));
668   if (osm->ksp) {
669     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
670     PetscCall(PetscFree(osm->ksp));
671   }
672   PetscCall(PetscFree(pc->data));
673 
674   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
675   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
676   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
677   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
685   PetscFunctionReturn(PETSC_SUCCESS);
686 }
687 
688 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject)
689 {
690   PC_ASM         *osm = (PC_ASM *)pc->data;
691   PetscInt        blocks, ovl;
692   PetscBool       flg;
693   PCASMType       asmtype;
694   PCCompositeType loctype;
695   char            sub_mat_type[256];
696 
697   PetscFunctionBegin;
698   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
699   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
700   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
701   if (flg) {
702     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
703     osm->dm_subdomains = PETSC_FALSE;
704   }
705   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
706   if (flg) {
707     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
708     osm->dm_subdomains = PETSC_FALSE;
709   }
710   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
711   if (flg) {
712     PetscCall(PCASMSetOverlap(pc, ovl));
713     osm->dm_subdomains = PETSC_FALSE;
714   }
715   flg = PETSC_FALSE;
716   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
717   if (flg) PetscCall(PCASMSetType(pc, asmtype));
718   flg = PETSC_FALSE;
719   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
720   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
721   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
722   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
723   PetscOptionsHeadEnd();
724   PetscFunctionReturn(PETSC_SUCCESS);
725 }
726 
727 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
728 {
729   PC_ASM  *osm = (PC_ASM *)pc->data;
730   PetscInt i;
731 
732   PetscFunctionBegin;
733   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
734   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
735 
736   if (!pc->setupcalled) {
737     if (is) {
738       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
739     }
740     if (is_local) {
741       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
742     }
743     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
744 
745     osm->n_local_true = n;
746     osm->is           = NULL;
747     osm->is_local     = NULL;
748     if (is) {
749       PetscCall(PetscMalloc1(n, &osm->is));
750       for (i = 0; i < n; i++) osm->is[i] = is[i];
751       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
752       osm->overlap = -1;
753     }
754     if (is_local) {
755       PetscCall(PetscMalloc1(n, &osm->is_local));
756       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
757       if (!is) {
758         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
759         for (i = 0; i < osm->n_local_true; i++) {
760           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
761             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
762             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
763           } else {
764             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
765             osm->is[i] = osm->is_local[i];
766           }
767         }
768       }
769     }
770   }
771   PetscFunctionReturn(PETSC_SUCCESS);
772 }
773 
774 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
775 {
776   PC_ASM     *osm = (PC_ASM *)pc->data;
777   PetscMPIInt rank, size;
778   PetscInt    n;
779 
780   PetscFunctionBegin;
781   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
782   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
783 
784   /*
785      Split the subdomains equally among all processors
786   */
787   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
788   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
789   n = N / size + ((N % size) > rank);
790   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
791   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
792   if (!pc->setupcalled) {
793     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
794 
795     osm->n_local_true = n;
796     osm->is           = NULL;
797     osm->is_local     = NULL;
798   }
799   PetscFunctionReturn(PETSC_SUCCESS);
800 }
801 
802 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
803 {
804   PC_ASM *osm = (PC_ASM *)pc->data;
805 
806   PetscFunctionBegin;
807   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
808   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
809   if (!pc->setupcalled) osm->overlap = ovl;
810   PetscFunctionReturn(PETSC_SUCCESS);
811 }
812 
813 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
814 {
815   PC_ASM *osm = (PC_ASM *)pc->data;
816 
817   PetscFunctionBegin;
818   osm->type     = type;
819   osm->type_set = PETSC_TRUE;
820   PetscFunctionReturn(PETSC_SUCCESS);
821 }
822 
823 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
824 {
825   PC_ASM *osm = (PC_ASM *)pc->data;
826 
827   PetscFunctionBegin;
828   *type = osm->type;
829   PetscFunctionReturn(PETSC_SUCCESS);
830 }
831 
832 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
833 {
834   PC_ASM *osm = (PC_ASM *)pc->data;
835 
836   PetscFunctionBegin;
837   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
838   osm->loctype = type;
839   PetscFunctionReturn(PETSC_SUCCESS);
840 }
841 
842 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
843 {
844   PC_ASM *osm = (PC_ASM *)pc->data;
845 
846   PetscFunctionBegin;
847   *type = osm->loctype;
848   PetscFunctionReturn(PETSC_SUCCESS);
849 }
850 
851 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
852 {
853   PC_ASM *osm = (PC_ASM *)pc->data;
854 
855   PetscFunctionBegin;
856   osm->sort_indices = doSort;
857   PetscFunctionReturn(PETSC_SUCCESS);
858 }
859 
860 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
861 {
862   PC_ASM *osm = (PC_ASM *)pc->data;
863 
864   PetscFunctionBegin;
865   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
866 
867   if (n_local) *n_local = osm->n_local_true;
868   if (first_local) {
869     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
870     *first_local -= osm->n_local_true;
871   }
872   if (ksp) *ksp = osm->ksp;
873   PetscFunctionReturn(PETSC_SUCCESS);
874 }
875 
876 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
877 {
878   PC_ASM *osm = (PC_ASM *)pc->data;
879 
880   PetscFunctionBegin;
881   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
882   PetscValidPointer(sub_mat_type, 2);
883   *sub_mat_type = osm->sub_mat_type;
884   PetscFunctionReturn(PETSC_SUCCESS);
885 }
886 
887 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
888 {
889   PC_ASM *osm = (PC_ASM *)pc->data;
890 
891   PetscFunctionBegin;
892   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
893   PetscCall(PetscFree(osm->sub_mat_type));
894   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
895   PetscFunctionReturn(PETSC_SUCCESS);
896 }
897 
898 /*@C
899     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
900 
901     Collective
902 
903     Input Parameters:
904 +   pc - the preconditioner context
905 .   n - the number of subdomains for this processor (default value = 1)
906 .   is - the index set that defines the subdomains for this processor
907          (or NULL for PETSc to determine subdomains)
908 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
909          (or NULL to not provide these)
910 
911     Options Database Key:
912     To set the total number of subdomain blocks rather than specify the
913     index sets, use the option
914 .    -pc_asm_local_blocks <blks> - Sets local blocks
915 
916     Notes:
917     The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
918 
919     By default the `PCASM` preconditioner uses 1 block per processor.
920 
921     Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
922 
923     If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated
924     back to form the global solution (this is the standard restricted additive Schwarz method)
925 
926     If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
927     no code to handle that case.
928 
929     Level: advanced
930 
931 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
932           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
933 @*/
934 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
935 {
936   PetscFunctionBegin;
937   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
938   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
939   PetscFunctionReturn(PETSC_SUCCESS);
940 }
941 
942 /*@C
943     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
944     additive Schwarz preconditioner, `PCASM`.
945 
946     Collective, all MPI ranks must pass in the same array of `IS`
947 
948     Input Parameters:
949 +   pc - the preconditioner context
950 .   N  - the number of subdomains for all processors
951 .   is - the index sets that define the subdomains for all processors
952          (or NULL to ask PETSc to determine the subdomains)
953 -   is_local - the index sets that define the local part of the subdomains for this processor
954          (or NULL to not provide this information)
955 
956     Options Database Key:
957     To set the total number of subdomain blocks rather than specify the
958     index sets, use the option
959 .    -pc_asm_blocks <blks> - Sets total blocks
960 
961     Notes:
962     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
963 
964     By default the `PCASM` preconditioner uses 1 block per processor.
965 
966     These index sets cannot be destroyed until after completion of the
967     linear solves for which the `PCASM` preconditioner is being used.
968 
969     Use `PCASMSetLocalSubdomains()` to set local subdomains.
970 
971     The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
972 
973     Level: advanced
974 
975 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
976           `PCASMCreateSubdomains2D()`, `PCGASM`
977 @*/
978 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
979 {
980   PetscFunctionBegin;
981   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
982   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
983   PetscFunctionReturn(PETSC_SUCCESS);
984 }
985 
986 /*@
987     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
988     additive Schwarz preconditioner, `PCASM`.
989 
990     Logically Collective
991 
992     Input Parameters:
993 +   pc  - the preconditioner context
994 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
995 
996     Options Database Key:
997 .   -pc_asm_overlap <ovl> - Sets overlap
998 
999     Notes:
1000     By default the `PCASM` preconditioner uses 1 block per processor.  To use
1001     multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1002     `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1003 
1004     The overlap defaults to 1, so if one desires that no additional
1005     overlap be computed beyond what may have been set with a call to
1006     `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1007     must be set to be 0.  In particular, if one does not explicitly set
1008     the subdomains an application code, then all overlap would be computed
1009     internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1010     variant that is equivalent to the block Jacobi preconditioner.
1011 
1012     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1013     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1014 
1015     Note that one can define initial index sets with any overlap via
1016     `PCASMSetLocalSubdomains()`; the routine
1017     `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1018     if desired.
1019 
1020     Level: intermediate
1021 
1022 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1023           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1024 @*/
1025 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1026 {
1027   PetscFunctionBegin;
1028   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1029   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1030   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1031   PetscFunctionReturn(PETSC_SUCCESS);
1032 }
1033 
1034 /*@
1035     PCASMSetType - Sets the type of restriction and interpolation used
1036     for local problems in the additive Schwarz method, `PCASM`.
1037 
1038     Logically Collective
1039 
1040     Input Parameters:
1041 +   pc  - the preconditioner context
1042 -   type - variant of `PCASM`, one of
1043 .vb
1044       PC_ASM_BASIC       - full interpolation and restriction
1045       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1046       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1047       PC_ASM_NONE        - local processor restriction and interpolation
1048 .ve
1049 
1050     Options Database Key:
1051 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1052 
1053     Note:
1054     if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1055     to limit the local processor interpolation
1056 
1057     Level: intermediate
1058 
1059 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1060           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1061 @*/
1062 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1063 {
1064   PetscFunctionBegin;
1065   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1066   PetscValidLogicalCollectiveEnum(pc, type, 2);
1067   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1068   PetscFunctionReturn(PETSC_SUCCESS);
1069 }
1070 
1071 /*@
1072     PCASMGetType - Gets the type of restriction and interpolation used
1073     for local problems in the additive Schwarz method, `PCASM`.
1074 
1075     Logically Collective
1076 
1077     Input Parameter:
1078 .   pc  - the preconditioner context
1079 
1080     Output Parameter:
1081 .   type - variant of `PCASM`, one of
1082 
1083 .vb
1084       PC_ASM_BASIC       - full interpolation and restriction
1085       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1086       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1087       PC_ASM_NONE        - local processor restriction and interpolation
1088 .ve
1089 
1090     Options Database Key:
1091 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1092 
1093     Level: intermediate
1094 
1095 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1096           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1097 @*/
1098 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1099 {
1100   PetscFunctionBegin;
1101   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1102   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1103   PetscFunctionReturn(PETSC_SUCCESS);
1104 }
1105 
1106 /*@
1107   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1108 
1109   Logically Collective
1110 
1111   Input Parameters:
1112 + pc  - the preconditioner context
1113 - type - type of composition, one of
1114 .vb
1115   PC_COMPOSITE_ADDITIVE       - local additive combination
1116   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1117 .ve
1118 
1119   Options Database Key:
1120 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1121 
1122   Level: intermediate
1123 
1124 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASM`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1125 @*/
1126 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1127 {
1128   PetscFunctionBegin;
1129   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1130   PetscValidLogicalCollectiveEnum(pc, type, 2);
1131   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1132   PetscFunctionReturn(PETSC_SUCCESS);
1133 }
1134 
1135 /*@
1136   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1137 
1138   Logically Collective
1139 
1140   Input Parameter:
1141 . pc  - the preconditioner context
1142 
1143   Output Parameter:
1144 . type - type of composition, one of
1145 .vb
1146   PC_COMPOSITE_ADDITIVE       - local additive combination
1147   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1148 .ve
1149 
1150   Options Database Key:
1151 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1152 
1153   Level: intermediate
1154 
1155 .seealso: `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1156 @*/
1157 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1158 {
1159   PetscFunctionBegin;
1160   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1161   PetscValidPointer(type, 2);
1162   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1163   PetscFunctionReturn(PETSC_SUCCESS);
1164 }
1165 
1166 /*@
1167     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1168 
1169     Logically Collective
1170 
1171     Input Parameters:
1172 +   pc  - the preconditioner context
1173 -   doSort - sort the subdomain indices
1174 
1175     Level: intermediate
1176 
1177 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1178           `PCASMCreateSubdomains2D()`
1179 @*/
1180 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1181 {
1182   PetscFunctionBegin;
1183   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1184   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1185   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1186   PetscFunctionReturn(PETSC_SUCCESS);
1187 }
1188 
1189 /*@C
1190    PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1191    this processor.
1192 
1193    Collective iff first_local is requested
1194 
1195    Input Parameter:
1196 .  pc - the preconditioner context
1197 
1198    Output Parameters:
1199 +  n_local - the number of blocks on this processor or NULL
1200 .  first_local - the global number of the first block on this processor or NULL,
1201                  all processors must request or all must pass NULL
1202 -  ksp - the array of `KSP` contexts
1203 
1204    Notes:
1205    After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1206 
1207    You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1208 
1209    Fortran Note:
1210    The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.
1211 
1212    Level: advanced
1213 
1214 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1215           `PCASMCreateSubdomains2D()`,
1216 @*/
1217 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1218 {
1219   PetscFunctionBegin;
1220   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1221   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1222   PetscFunctionReturn(PETSC_SUCCESS);
1223 }
1224 
1225 /*MC
1226    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1227            its own `KSP` object.
1228 
1229    Options Database Keys:
1230 +  -pc_asm_blocks <blks> - Sets total blocks. Defaults to one block per MPI rank.
1231 .  -pc_asm_overlap <ovl> - Sets overlap
1232 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1233 -  -pc_asm_local_type [additive, multiplicative] - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1234 
1235    Level: beginner
1236 
1237    Notes:
1238    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1239    will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1240     -pc_asm_type basic to get the same convergence behavior
1241 
1242    Each processor can have one or more blocks, but a block cannot be shared by more
1243    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1244 
1245    To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC`
1246    options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1247 
1248    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1249    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1250 
1251     References:
1252 +   * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1253      Courant Institute, New York University Technical report
1254 -   * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1255     Cambridge University Press.
1256 
1257 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1258           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1259           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1260 M*/
1261 
1262 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1263 {
1264   PC_ASM *osm;
1265 
1266   PetscFunctionBegin;
1267   PetscCall(PetscNew(&osm));
1268 
1269   osm->n             = PETSC_DECIDE;
1270   osm->n_local       = 0;
1271   osm->n_local_true  = PETSC_DECIDE;
1272   osm->overlap       = 1;
1273   osm->ksp           = NULL;
1274   osm->restriction   = NULL;
1275   osm->lprolongation = NULL;
1276   osm->lrestriction  = NULL;
1277   osm->x             = NULL;
1278   osm->y             = NULL;
1279   osm->is            = NULL;
1280   osm->is_local      = NULL;
1281   osm->mat           = NULL;
1282   osm->pmat          = NULL;
1283   osm->type          = PC_ASM_RESTRICT;
1284   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1285   osm->sort_indices  = PETSC_TRUE;
1286   osm->dm_subdomains = PETSC_FALSE;
1287   osm->sub_mat_type  = NULL;
1288 
1289   pc->data                 = (void *)osm;
1290   pc->ops->apply           = PCApply_ASM;
1291   pc->ops->matapply        = PCMatApply_ASM;
1292   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1293   pc->ops->setup           = PCSetUp_ASM;
1294   pc->ops->reset           = PCReset_ASM;
1295   pc->ops->destroy         = PCDestroy_ASM;
1296   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1297   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1298   pc->ops->view            = PCView_ASM;
1299   pc->ops->applyrichardson = NULL;
1300 
1301   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1310   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1311   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1312   PetscFunctionReturn(PETSC_SUCCESS);
1313 }
1314 
1315 /*@C
1316    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1317    preconditioner, `PCASM`,  for any problem on a general grid.
1318 
1319    Collective
1320 
1321    Input Parameters:
1322 +  A - The global matrix operator
1323 -  n - the number of local blocks
1324 
1325    Output Parameters:
1326 .  outis - the array of index sets defining the subdomains
1327 
1328    Level: advanced
1329 
1330    Note:
1331    This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1332    from these if you use `PCASMSetLocalSubdomains()`
1333 
1334    Fortran Note:
1335    You must provide the array outis[] already allocated of length n.
1336 
1337 .seealso: `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1338 @*/
1339 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1340 {
1341   MatPartitioning mpart;
1342   const char     *prefix;
1343   PetscInt        i, j, rstart, rend, bs;
1344   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1345   Mat             Ad = NULL, adj;
1346   IS              ispart, isnumb, *is;
1347 
1348   PetscFunctionBegin;
1349   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1350   PetscValidPointer(outis, 3);
1351   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1352 
1353   /* Get prefix, row distribution, and block size */
1354   PetscCall(MatGetOptionsPrefix(A, &prefix));
1355   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1356   PetscCall(MatGetBlockSize(A, &bs));
1357   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1358 
1359   /* Get diagonal block from matrix if possible */
1360   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1361   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1362   if (Ad) {
1363     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1364     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1365   }
1366   if (Ad && n > 1) {
1367     PetscBool match, done;
1368     /* Try to setup a good matrix partitioning if available */
1369     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1370     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1371     PetscCall(MatPartitioningSetFromOptions(mpart));
1372     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1373     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1374     if (!match) { /* assume a "good" partitioner is available */
1375       PetscInt        na;
1376       const PetscInt *ia, *ja;
1377       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1378       if (done) {
1379         /* Build adjacency matrix by hand. Unfortunately a call to
1380            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1381            remove the block-aij structure and we cannot expect
1382            MatPartitioning to split vertices as we need */
1383         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1384         const PetscInt *row;
1385         nnz = 0;
1386         for (i = 0; i < na; i++) { /* count number of nonzeros */
1387           len = ia[i + 1] - ia[i];
1388           row = ja + ia[i];
1389           for (j = 0; j < len; j++) {
1390             if (row[j] == i) { /* don't count diagonal */
1391               len--;
1392               break;
1393             }
1394           }
1395           nnz += len;
1396         }
1397         PetscCall(PetscMalloc1(na + 1, &iia));
1398         PetscCall(PetscMalloc1(nnz, &jja));
1399         nnz    = 0;
1400         iia[0] = 0;
1401         for (i = 0; i < na; i++) { /* fill adjacency */
1402           cnt = 0;
1403           len = ia[i + 1] - ia[i];
1404           row = ja + ia[i];
1405           for (j = 0; j < len; j++) {
1406             if (row[j] != i) { /* if not diagonal */
1407               jja[nnz + cnt++] = row[j];
1408             }
1409           }
1410           nnz += cnt;
1411           iia[i + 1] = nnz;
1412         }
1413         /* Partitioning of the adjacency matrix */
1414         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1415         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1416         PetscCall(MatPartitioningSetNParts(mpart, n));
1417         PetscCall(MatPartitioningApply(mpart, &ispart));
1418         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1419         PetscCall(MatDestroy(&adj));
1420         foundpart = PETSC_TRUE;
1421       }
1422       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1423     }
1424     PetscCall(MatPartitioningDestroy(&mpart));
1425   }
1426 
1427   PetscCall(PetscMalloc1(n, &is));
1428   *outis = is;
1429 
1430   if (!foundpart) {
1431     /* Partitioning by contiguous chunks of rows */
1432 
1433     PetscInt mbs   = (rend - rstart) / bs;
1434     PetscInt start = rstart;
1435     for (i = 0; i < n; i++) {
1436       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1437       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1438       start += count;
1439     }
1440 
1441   } else {
1442     /* Partitioning by adjacency of diagonal block  */
1443 
1444     const PetscInt *numbering;
1445     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1446     /* Get node count in each partition */
1447     PetscCall(PetscMalloc1(n, &count));
1448     PetscCall(ISPartitioningCount(ispart, n, count));
1449     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1450       for (i = 0; i < n; i++) count[i] *= bs;
1451     }
1452     /* Build indices from node numbering */
1453     PetscCall(ISGetLocalSize(isnumb, &nidx));
1454     PetscCall(PetscMalloc1(nidx, &indices));
1455     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1456     PetscCall(ISGetIndices(isnumb, &numbering));
1457     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1458     PetscCall(ISRestoreIndices(isnumb, &numbering));
1459     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1460       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1461       for (i = 0; i < nidx; i++) {
1462         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1463       }
1464       PetscCall(PetscFree(indices));
1465       nidx *= bs;
1466       indices = newidx;
1467     }
1468     /* Shift to get global indices */
1469     for (i = 0; i < nidx; i++) indices[i] += rstart;
1470 
1471     /* Build the index sets for each block */
1472     for (i = 0; i < n; i++) {
1473       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1474       PetscCall(ISSort(is[i]));
1475       start += count[i];
1476     }
1477 
1478     PetscCall(PetscFree(count));
1479     PetscCall(PetscFree(indices));
1480     PetscCall(ISDestroy(&isnumb));
1481     PetscCall(ISDestroy(&ispart));
1482   }
1483   PetscFunctionReturn(PETSC_SUCCESS);
1484 }
1485 
1486 /*@C
1487    PCASMDestroySubdomains - Destroys the index sets created with
1488    `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1489 
1490    Collective
1491 
1492    Input Parameters:
1493 +  n - the number of index sets
1494 .  is - the array of index sets
1495 -  is_local - the array of local index sets, can be NULL
1496 
1497    Level: advanced
1498 
1499 .seealso: `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1500 @*/
1501 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1502 {
1503   PetscInt i;
1504 
1505   PetscFunctionBegin;
1506   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1507   if (is) {
1508     PetscValidPointer(is, 2);
1509     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1510     PetscCall(PetscFree(is));
1511   }
1512   if (is_local) {
1513     PetscValidPointer(is_local, 3);
1514     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1515     PetscCall(PetscFree(is_local));
1516   }
1517   PetscFunctionReturn(PETSC_SUCCESS);
1518 }
1519 
1520 /*@
1521    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1522    preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1523 
1524    Not Collective
1525 
1526    Input Parameters:
1527 +  m   - the number of mesh points in the x direction
1528 .  n   - the number of mesh points in the y direction
1529 .  M   - the number of subdomains in the x direction
1530 .  N   - the number of subdomains in the y direction
1531 .  dof - degrees of freedom per node
1532 -  overlap - overlap in mesh lines
1533 
1534    Output Parameters:
1535 +  Nsub - the number of subdomains created
1536 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1537 -  is_local - array of index sets defining non-overlapping subdomains
1538 
1539    Note:
1540    Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1541    preconditioners.  More general related routines are
1542    `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1543 
1544    Level: advanced
1545 
1546 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1547           `PCASMSetOverlap()`
1548 @*/
1549 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local)
1550 {
1551   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1552   PetscInt nidx, *idx, loc, ii, jj, count;
1553 
1554   PetscFunctionBegin;
1555   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1556 
1557   *Nsub = N * M;
1558   PetscCall(PetscMalloc1(*Nsub, is));
1559   PetscCall(PetscMalloc1(*Nsub, is_local));
1560   ystart    = 0;
1561   loc_outer = 0;
1562   for (i = 0; i < N; i++) {
1563     height = n / N + ((n % N) > i); /* height of subdomain */
1564     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1565     yleft = ystart - overlap;
1566     if (yleft < 0) yleft = 0;
1567     yright = ystart + height + overlap;
1568     if (yright > n) yright = n;
1569     xstart = 0;
1570     for (j = 0; j < M; j++) {
1571       width = m / M + ((m % M) > j); /* width of subdomain */
1572       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1573       xleft = xstart - overlap;
1574       if (xleft < 0) xleft = 0;
1575       xright = xstart + width + overlap;
1576       if (xright > m) xright = m;
1577       nidx = (xright - xleft) * (yright - yleft);
1578       PetscCall(PetscMalloc1(nidx, &idx));
1579       loc = 0;
1580       for (ii = yleft; ii < yright; ii++) {
1581         count = m * ii + xleft;
1582         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1583       }
1584       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1585       if (overlap == 0) {
1586         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1587 
1588         (*is_local)[loc_outer] = (*is)[loc_outer];
1589       } else {
1590         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1591           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1592         }
1593         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1594       }
1595       PetscCall(PetscFree(idx));
1596       xstart += width;
1597       loc_outer++;
1598     }
1599     ystart += height;
1600   }
1601   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1602   PetscFunctionReturn(PETSC_SUCCESS);
1603 }
1604 
1605 /*@C
1606     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1607     only) for the additive Schwarz preconditioner, `PCASM`.
1608 
1609     Not Collective
1610 
1611     Input Parameter:
1612 .   pc - the preconditioner context
1613 
1614     Output Parameters:
1615 +   n - if requested, the number of subdomains for this processor (default value = 1)
1616 .   is - if requested, the index sets that define the subdomains for this processor
1617 -   is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be NULL)
1618 
1619     Note:
1620     The `IS` numbering is in the parallel, global numbering of the vector.
1621 
1622     Level: advanced
1623 
1624 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1625           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1626 @*/
1627 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1628 {
1629   PC_ASM   *osm = (PC_ASM *)pc->data;
1630   PetscBool match;
1631 
1632   PetscFunctionBegin;
1633   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1634   if (n) PetscValidIntPointer(n, 2);
1635   if (is) PetscValidPointer(is, 3);
1636   if (is_local) PetscValidPointer(is_local, 4);
1637   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1638   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1639   if (n) *n = osm->n_local_true;
1640   if (is) *is = osm->is;
1641   if (is_local) *is_local = osm->is_local;
1642   PetscFunctionReturn(PETSC_SUCCESS);
1643 }
1644 
1645 /*@C
1646     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1647     only) for the additive Schwarz preconditioner, `PCASM`.
1648 
1649     Not Collective
1650 
1651     Input Parameter:
1652 .   pc - the preconditioner context
1653 
1654     Output Parameters:
1655 +   n - if requested, the number of matrices for this processor (default value = 1)
1656 -   mat - if requested, the matrices
1657 
1658     Level: advanced
1659 
1660     Notes:
1661     Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1662 
1663     Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1664 
1665 .seealso: `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1666           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1667 @*/
1668 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1669 {
1670   PC_ASM   *osm;
1671   PetscBool match;
1672 
1673   PetscFunctionBegin;
1674   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1675   if (n) PetscValidIntPointer(n, 2);
1676   if (mat) PetscValidPointer(mat, 3);
1677   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1678   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1679   if (!match) {
1680     if (n) *n = 0;
1681     if (mat) *mat = NULL;
1682   } else {
1683     osm = (PC_ASM *)pc->data;
1684     if (n) *n = osm->n_local_true;
1685     if (mat) *mat = osm->pmat;
1686   }
1687   PetscFunctionReturn(PETSC_SUCCESS);
1688 }
1689 
1690 /*@
1691     PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1692 
1693     Logically Collective
1694 
1695     Input Parameters:
1696 +   pc  - the preconditioner
1697 -   flg - boolean indicating whether to use subdomains defined by the `DM`
1698 
1699     Options Database Key:
1700 .   -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM`
1701 
1702     Level: intermediate
1703 
1704     Note:
1705     `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1706     so setting either of the first two effectively turns the latter off.
1707 
1708 .seealso: `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1709           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1710 @*/
1711 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1712 {
1713   PC_ASM   *osm = (PC_ASM *)pc->data;
1714   PetscBool match;
1715 
1716   PetscFunctionBegin;
1717   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1718   PetscValidLogicalCollectiveBool(pc, flg, 2);
1719   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1720   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1721   if (match) osm->dm_subdomains = flg;
1722   PetscFunctionReturn(PETSC_SUCCESS);
1723 }
1724 
1725 /*@
1726     PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1727 
1728     Not Collective
1729 
1730     Input Parameter:
1731 .   pc  - the preconditioner
1732 
1733     Output Parameter:
1734 .   flg - boolean indicating whether to use subdomains defined by the DM
1735 
1736     Level: intermediate
1737 
1738 .seealso: `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1739           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1740 @*/
1741 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1742 {
1743   PC_ASM   *osm = (PC_ASM *)pc->data;
1744   PetscBool match;
1745 
1746   PetscFunctionBegin;
1747   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1748   PetscValidBoolPointer(flg, 2);
1749   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1750   if (match) *flg = osm->dm_subdomains;
1751   else *flg = PETSC_FALSE;
1752   PetscFunctionReturn(PETSC_SUCCESS);
1753 }
1754 
1755 /*@
1756      PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1757 
1758    Not Collective
1759 
1760    Input Parameter:
1761 .  pc - the `PC`
1762 
1763    Output Parameter:
1764 .  pc_asm_sub_mat_type - name of matrix type
1765 
1766    Level: advanced
1767 
1768 .seealso: `PCASM`, `PCASMSetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1769 @*/
1770 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1771 {
1772   PetscFunctionBegin;
1773   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1774   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1775   PetscFunctionReturn(PETSC_SUCCESS);
1776 }
1777 
1778 /*@
1779      PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1780 
1781    Collective
1782 
1783    Input Parameters:
1784 +  pc             - the `PC` object
1785 -  sub_mat_type   - the `MatType`
1786 
1787    Options Database Key:
1788 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1789    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1790 
1791    Note:
1792    See "${PETSC_DIR}/include/petscmat.h" for available types
1793 
1794   Level: advanced
1795 
1796 .seealso: `PCASM`, `PCASMGetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1797 @*/
1798 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1799 {
1800   PetscFunctionBegin;
1801   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1802   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1803   PetscFunctionReturn(PETSC_SUCCESS);
1804 }
1805