xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 4ad8454beace47809662cdae21ee081016eaa39a)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include <petsc/private/matimpl.h>
15 
16 static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
17 {
18   PC_ASM           *osm = (PC_ASM *)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i, bsz;
21   PetscBool         iascii, isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char       *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
38     PetscCall(PetscViewerGetFormat(viewer, &format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc, &prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
44         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(sviewer));
47           PetscCall(KSPView(osm->ksp[0], sviewer));
48           PetscCall(PetscViewerASCIIPopTab(sviewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
60       for (i = 0; i < osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
62         PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
63         PetscCall(KSPView(osm->ksp[i], sviewer));
64         PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
75   }
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc)
80 {
81   PC_ASM         *osm = (PC_ASM *)pc->data;
82   const char     *prefix;
83   char            fname[PETSC_MAX_PATH_LEN + 1];
84   PetscViewer     viewer, sviewer;
85   char           *s;
86   PetscInt        i, j, nidx;
87   const PetscInt *idx;
88   PetscMPIInt     rank, size;
89 
90   PetscFunctionBegin;
91   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
92   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
93   PetscCall(PCGetOptionsPrefix(pc, &prefix));
94   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
95   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
96   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
97   for (i = 0; i < osm->n_local; i++) {
98     if (i < osm->n_local_true) {
99       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
100       PetscCall(ISGetIndices(osm->is[i], &idx));
101       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
102 #define len 16 * (nidx + 1) + 512
103       PetscCall(PetscMalloc1(len, &s));
104       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
105 #undef len
106       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
107       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
108       PetscCall(ISRestoreIndices(osm->is[i], &idx));
109       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
110       PetscCall(PetscViewerDestroy(&sviewer));
111       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
112       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
113       PetscCall(PetscViewerFlush(viewer));
114       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
115       PetscCall(PetscFree(s));
116       if (osm->is_local) {
117         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
118 #define len 16 * (nidx + 1) + 512
119         PetscCall(PetscMalloc1(len, &s));
120         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
121 #undef len
122         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
123         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
124         PetscCall(ISGetIndices(osm->is_local[i], &idx));
125         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
126         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
127         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
128         PetscCall(PetscViewerDestroy(&sviewer));
129         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
130         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
131         PetscCall(PetscViewerFlush(viewer));
132         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
133         PetscCall(PetscFree(s));
134       }
135     } else {
136       /* Participate in collective viewer calls. */
137       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
138       PetscCall(PetscViewerFlush(viewer));
139       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
140       /* Assume either all ranks have is_local or none do. */
141       if (osm->is_local) {
142         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
143         PetscCall(PetscViewerFlush(viewer));
144         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
145       }
146     }
147   }
148   PetscCall(PetscViewerFlush(viewer));
149   PetscCall(PetscViewerDestroy(&viewer));
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode PCSetUp_ASM(PC pc)
154 {
155   PC_ASM       *osm = (PC_ASM *)pc->data;
156   PetscBool     flg;
157   PetscInt      i, m, m_local;
158   MatReuse      scall = MAT_REUSE_MATRIX;
159   IS            isl;
160   KSP           ksp;
161   PC            subpc;
162   const char   *prefix, *pprefix;
163   Vec           vec;
164   DM           *domain_dm = NULL;
165   MatNullSpace *nullsp    = NULL;
166 
167   PetscFunctionBegin;
168   if (!pc->setupcalled) {
169     PetscInt m;
170 
171     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172     if (osm->n_local_true == PETSC_DECIDE) {
173       /* no subdomains given */
174       /* try pc->dm first, if allowed */
175       if (osm->dm_subdomains && pc->dm) {
176         PetscInt num_domains, d;
177         char   **domain_names;
178         IS      *inner_domain_is, *outer_domain_is;
179         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181                               A future improvement of this code might allow one to use
182                               DM-defined subdomains and also increase the overlap,
183                               but that is not currently supported */
184         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185         for (d = 0; d < num_domains; ++d) {
186           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189         }
190         PetscCall(PetscFree(domain_names));
191         PetscCall(PetscFree(inner_domain_is));
192         PetscCall(PetscFree(outer_domain_is));
193       }
194       if (osm->n_local_true == PETSC_DECIDE) {
195         /* still no subdomains; use one subdomain per processor */
196         osm->n_local_true = 1;
197       }
198     }
199     { /* determine the global and max number of subdomains */
200       struct {
201         PetscInt max, sum;
202       } inwork, outwork;
203       PetscMPIInt size;
204 
205       inwork.max = osm->n_local_true;
206       inwork.sum = osm->n_local_true;
207       PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208       osm->n_local = outwork.max;
209       osm->n       = outwork.sum;
210 
211       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212       if (outwork.max == 1 && outwork.sum == size) {
213         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215       }
216     }
217     if (!osm->is) { /* create the index sets */
218       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219     }
220     if (osm->n_local_true > 1 && !osm->is_local) {
221       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222       for (i = 0; i < osm->n_local_true; i++) {
223         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226         } else {
227           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228           osm->is_local[i] = osm->is[i];
229         }
230       }
231     }
232     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233     if (osm->overlap > 0) {
234       /* Extend the "overlapping" regions by a number of steps */
235       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236     }
237     if (osm->sort_indices) {
238       for (i = 0; i < osm->n_local_true; i++) {
239         PetscCall(ISSort(osm->is[i]));
240         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241       }
242     }
243     flg = PETSC_FALSE;
244     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246     if (!osm->ksp) {
247       /* Create the local solvers */
248       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250       for (i = 0; i < osm->n_local_true; i++) {
251         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
253         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
254         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
255         PetscCall(KSPSetType(ksp, KSPPREONLY));
256         PetscCall(KSPGetPC(ksp, &subpc));
257         PetscCall(PCGetOptionsPrefix(pc, &prefix));
258         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
259         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
260         if (domain_dm) {
261           PetscCall(KSPSetDM(ksp, domain_dm[i]));
262           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
263           PetscCall(DMDestroy(&domain_dm[i]));
264         }
265         osm->ksp[i] = ksp;
266       }
267       if (domain_dm) PetscCall(PetscFree(domain_dm));
268     }
269 
270     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
271     PetscCall(ISSortRemoveDups(osm->lis));
272     PetscCall(ISGetLocalSize(osm->lis, &m));
273 
274     scall = MAT_INITIAL_MATRIX;
275   } else {
276     /*
277        Destroy the blocks from the previous iteration
278     */
279     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
280       PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
281       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
282       scall = MAT_INITIAL_MATRIX;
283     }
284   }
285 
286   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
287   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
288     PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
289     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
290     scall = MAT_INITIAL_MATRIX;
291   }
292 
293   /*
294      Extract out the submatrices
295   */
296   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
297   if (scall == MAT_INITIAL_MATRIX) {
298     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
299     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
300     if (nullsp) PetscCall(MatRestoreNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
301   }
302 
303   /* Convert the types of the submatrices (if needbe) */
304   if (osm->sub_mat_type) {
305     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &osm->pmat[i]));
306   }
307 
308   if (!pc->setupcalled) {
309     VecType vtype;
310 
311     /* Create the local work vectors (from the local matrices) and scatter contexts */
312     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));
313 
314     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
315     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
316     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
317     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
318     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));
319 
320     PetscCall(ISGetLocalSize(osm->lis, &m));
321     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
322     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
323     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
324     PetscCall(VecSetSizes(osm->lx, m, m));
325     PetscCall(VecSetType(osm->lx, vtype));
326     PetscCall(VecDuplicate(osm->lx, &osm->ly));
327     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
328     PetscCall(ISDestroy(&isl));
329 
330     for (i = 0; i < osm->n_local_true; ++i) {
331       ISLocalToGlobalMapping ltog;
332       IS                     isll;
333       const PetscInt        *idx_is;
334       PetscInt              *idx_lis, nout;
335 
336       PetscCall(ISGetLocalSize(osm->is[i], &m));
337       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
338       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));
339 
340       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
341       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
342       PetscCall(ISGetLocalSize(osm->is[i], &m));
343       PetscCall(ISGetIndices(osm->is[i], &idx_is));
344       PetscCall(PetscMalloc1(m, &idx_lis));
345       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
346       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
347       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
348       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
349       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
350       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
351       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
352       PetscCall(ISDestroy(&isll));
353       PetscCall(ISDestroy(&isl));
354       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the non-overlapping is_local[i] entries */
355         ISLocalToGlobalMapping ltog;
356         IS                     isll, isll_local;
357         const PetscInt        *idx_local;
358         PetscInt              *idx1, *idx2, nout;
359 
360         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
361         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
362 
363         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
364         PetscCall(PetscMalloc1(m_local, &idx1));
365         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
366         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
367         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
368         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));
369 
370         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
371         PetscCall(PetscMalloc1(m_local, &idx2));
372         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
373         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
374         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
375         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));
376 
377         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
378         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));
379 
380         PetscCall(ISDestroy(&isll));
381         PetscCall(ISDestroy(&isll_local));
382       }
383     }
384     PetscCall(VecDestroy(&vec));
385   }
386 
387   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
388     IS      *cis;
389     PetscInt c;
390 
391     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
392     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
393     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
394     PetscCall(PetscFree(cis));
395   }
396 
397   /* Return control to the user so that the submatrices can be modified (e.g., to apply
398      different boundary conditions for the submatrices than for the global problem) */
399   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));
400 
401   /*
402      Loop over subdomains putting them into local ksp
403   */
404   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
405   for (i = 0; i < osm->n_local_true; i++) {
406     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
407     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
408     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
409   }
410   PetscFunctionReturn(PETSC_SUCCESS);
411 }
412 
413 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
414 {
415   PC_ASM            *osm = (PC_ASM *)pc->data;
416   PetscInt           i;
417   KSPConvergedReason reason;
418 
419   PetscFunctionBegin;
420   for (i = 0; i < osm->n_local_true; i++) {
421     PetscCall(KSPSetUp(osm->ksp[i]));
422     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
423     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
424   }
425   PetscFunctionReturn(PETSC_SUCCESS);
426 }
427 
428 static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
429 {
430   PC_ASM     *osm = (PC_ASM *)pc->data;
431   PetscInt    i, n_local_true = osm->n_local_true;
432   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
433 
434   PetscFunctionBegin;
435   /*
436      support for limiting the restriction or interpolation to only local
437      subdomain values (leaving the other values 0).
438   */
439   if (!(osm->type & PC_ASM_RESTRICT)) {
440     forward = SCATTER_FORWARD_LOCAL;
441     /* have to zero the work RHS since scatter may leave some slots empty */
442     PetscCall(VecSet(osm->lx, 0.0));
443   }
444   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
445 
446   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
447   /* zero the global and the local solutions */
448   PetscCall(VecSet(y, 0.0));
449   PetscCall(VecSet(osm->ly, 0.0));
450 
451   /* copy the global RHS to local RHS including the ghost nodes */
452   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
453   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
454 
455   /* restrict local RHS to the overlapping 0-block RHS */
456   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
457   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
458 
459   /* do the local solves */
460   for (i = 0; i < n_local_true; ++i) {
461     /* solve the overlapping i-block */
462     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
463     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
464     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
465     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
466 
467     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
468       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
469       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
470     } else { /* interpolate the overlapping i-block solution to the local solution */
471       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
472       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
473     }
474 
475     if (i < n_local_true - 1) {
476       /* restrict local RHS to the overlapping (i+1)-block RHS */
477       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
478       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
479 
480       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
481         /* update the overlapping (i+1)-block RHS using the current local solution */
482         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
483         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
484       }
485     }
486   }
487   /* add the local solution to the global solution including the ghost nodes */
488   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
489   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
490   PetscFunctionReturn(PETSC_SUCCESS);
491 }
492 
493 static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
494 {
495   PC_ASM     *osm = (PC_ASM *)pc->data;
496   Mat         Z, W;
497   Vec         x;
498   PetscInt    i, m, N;
499   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
500 
501   PetscFunctionBegin;
502   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
503   /*
504      support for limiting the restriction or interpolation to only local
505      subdomain values (leaving the other values 0).
506   */
507   if (!(osm->type & PC_ASM_RESTRICT)) {
508     forward = SCATTER_FORWARD_LOCAL;
509     /* have to zero the work RHS since scatter may leave some slots empty */
510     PetscCall(VecSet(osm->lx, 0.0));
511   }
512   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
513   PetscCall(VecGetLocalSize(osm->x[0], &m));
514   PetscCall(MatGetSize(X, NULL, &N));
515   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
516 
517   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
518   /* zero the global and the local solutions */
519   PetscCall(MatZeroEntries(Y));
520   PetscCall(VecSet(osm->ly, 0.0));
521 
522   for (i = 0; i < N; ++i) {
523     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
524     /* copy the global RHS to local RHS including the ghost nodes */
525     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
526     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
527     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
528 
529     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
530     /* restrict local RHS to the overlapping 0-block RHS */
531     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
532     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
533     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
534   }
535   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
536   /* solve the overlapping 0-block */
537   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
538   PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
539   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
540   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
541   PetscCall(MatDestroy(&Z));
542 
543   for (i = 0; i < N; ++i) {
544     PetscCall(VecSet(osm->ly, 0.0));
545     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
546     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
547       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
548       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
549     } else { /* interpolate the overlapping 0-block solution to the local solution */
550       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
551       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
552     }
553     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
554 
555     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
556     /* add the local solution to the global solution including the ghost nodes */
557     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
558     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
559     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
560   }
561   PetscCall(MatDestroy(&W));
562   PetscFunctionReturn(PETSC_SUCCESS);
563 }
564 
565 static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
566 {
567   PC_ASM     *osm = (PC_ASM *)pc->data;
568   PetscInt    i, n_local_true = osm->n_local_true;
569   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
570 
571   PetscFunctionBegin;
572   /*
573      Support for limiting the restriction or interpolation to only local
574      subdomain values (leaving the other values 0).
575 
576      Note: these are reversed from the PCApply_ASM() because we are applying the
577      transpose of the three terms
578   */
579 
580   if (!(osm->type & PC_ASM_INTERPOLATE)) {
581     forward = SCATTER_FORWARD_LOCAL;
582     /* have to zero the work RHS since scatter may leave some slots empty */
583     PetscCall(VecSet(osm->lx, 0.0));
584   }
585   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
586 
587   /* zero the global and the local solutions */
588   PetscCall(VecSet(y, 0.0));
589   PetscCall(VecSet(osm->ly, 0.0));
590 
591   /* Copy the global RHS to local RHS including the ghost nodes */
592   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
593   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
594 
595   /* Restrict local RHS to the overlapping 0-block RHS */
596   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
597   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
598 
599   /* do the local solves */
600   for (i = 0; i < n_local_true; ++i) {
601     /* solve the overlapping i-block */
602     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
603     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
604     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
605     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
606 
607     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
608       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
609       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
610     } else { /* interpolate the overlapping i-block solution to the local solution */
611       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
612       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
613     }
614 
615     if (i < n_local_true - 1) {
616       /* Restrict local RHS to the overlapping (i+1)-block RHS */
617       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
618       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
619     }
620   }
621   /* Add the local solution to the global solution including the ghost nodes */
622   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
623   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
624   PetscFunctionReturn(PETSC_SUCCESS);
625 }
626 
627 static PetscErrorCode PCReset_ASM(PC pc)
628 {
629   PC_ASM  *osm = (PC_ASM *)pc->data;
630   PetscInt i;
631 
632   PetscFunctionBegin;
633   if (osm->ksp) {
634     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
635   }
636   if (osm->pmat) {
637     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
638   }
639   if (osm->lrestriction) {
640     PetscCall(VecScatterDestroy(&osm->restriction));
641     for (i = 0; i < osm->n_local_true; i++) {
642       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
643       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
644       PetscCall(VecDestroy(&osm->x[i]));
645       PetscCall(VecDestroy(&osm->y[i]));
646     }
647     PetscCall(PetscFree(osm->lrestriction));
648     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
649     PetscCall(PetscFree(osm->x));
650     PetscCall(PetscFree(osm->y));
651   }
652   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
653   PetscCall(ISDestroy(&osm->lis));
654   PetscCall(VecDestroy(&osm->lx));
655   PetscCall(VecDestroy(&osm->ly));
656   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
657 
658   PetscCall(PetscFree(osm->sub_mat_type));
659 
660   osm->is       = NULL;
661   osm->is_local = NULL;
662   PetscFunctionReturn(PETSC_SUCCESS);
663 }
664 
665 static PetscErrorCode PCDestroy_ASM(PC pc)
666 {
667   PC_ASM  *osm = (PC_ASM *)pc->data;
668   PetscInt i;
669 
670   PetscFunctionBegin;
671   PetscCall(PCReset_ASM(pc));
672   if (osm->ksp) {
673     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
674     PetscCall(PetscFree(osm->ksp));
675   }
676   PetscCall(PetscFree(pc->data));
677 
678   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
679   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
680   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
681   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
682   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
683   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
684   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
685   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
686   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
687   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
688   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
689   PetscFunctionReturn(PETSC_SUCCESS);
690 }
691 
692 static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject)
693 {
694   PC_ASM         *osm = (PC_ASM *)pc->data;
695   PetscInt        blocks, ovl;
696   PetscBool       flg;
697   PCASMType       asmtype;
698   PCCompositeType loctype;
699   char            sub_mat_type[256];
700 
701   PetscFunctionBegin;
702   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
703   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
704   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
705   if (flg) {
706     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
707     osm->dm_subdomains = PETSC_FALSE;
708   }
709   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
710   if (flg) {
711     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
712     osm->dm_subdomains = PETSC_FALSE;
713   }
714   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
715   if (flg) {
716     PetscCall(PCASMSetOverlap(pc, ovl));
717     osm->dm_subdomains = PETSC_FALSE;
718   }
719   flg = PETSC_FALSE;
720   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
721   if (flg) PetscCall(PCASMSetType(pc, asmtype));
722   flg = PETSC_FALSE;
723   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
724   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
725   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
726   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
727   PetscOptionsHeadEnd();
728   PetscFunctionReturn(PETSC_SUCCESS);
729 }
730 
731 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
732 {
733   PC_ASM  *osm = (PC_ASM *)pc->data;
734   PetscInt i;
735 
736   PetscFunctionBegin;
737   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
738   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
739 
740   if (!pc->setupcalled) {
741     if (is) {
742       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
743     }
744     if (is_local) {
745       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
746     }
747     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
748 
749     if (osm->ksp && osm->n_local_true != n) {
750       for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
751       PetscCall(PetscFree(osm->ksp));
752     }
753 
754     osm->n_local_true = n;
755     osm->is           = NULL;
756     osm->is_local     = NULL;
757     if (is) {
758       PetscCall(PetscMalloc1(n, &osm->is));
759       for (i = 0; i < n; i++) osm->is[i] = is[i];
760       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
761       osm->overlap = -1;
762     }
763     if (is_local) {
764       PetscCall(PetscMalloc1(n, &osm->is_local));
765       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
766       if (!is) {
767         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
768         for (i = 0; i < osm->n_local_true; i++) {
769           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
770             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
771             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
772           } else {
773             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
774             osm->is[i] = osm->is_local[i];
775           }
776         }
777       }
778     }
779   }
780   PetscFunctionReturn(PETSC_SUCCESS);
781 }
782 
783 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
784 {
785   PC_ASM     *osm = (PC_ASM *)pc->data;
786   PetscMPIInt rank, size;
787   PetscInt    n;
788 
789   PetscFunctionBegin;
790   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
791   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets, they cannot be set globally yet.");
792 
793   /*
794      Split the subdomains equally among all processors
795   */
796   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
797   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
798   n = N / size + ((N % size) > rank);
799   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
800   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
801   if (!pc->setupcalled) {
802     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
803 
804     osm->n_local_true = n;
805     osm->is           = NULL;
806     osm->is_local     = NULL;
807   }
808   PetscFunctionReturn(PETSC_SUCCESS);
809 }
810 
811 static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
812 {
813   PC_ASM *osm = (PC_ASM *)pc->data;
814 
815   PetscFunctionBegin;
816   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
817   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
818   if (!pc->setupcalled) osm->overlap = ovl;
819   PetscFunctionReturn(PETSC_SUCCESS);
820 }
821 
822 static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
823 {
824   PC_ASM *osm = (PC_ASM *)pc->data;
825 
826   PetscFunctionBegin;
827   osm->type     = type;
828   osm->type_set = PETSC_TRUE;
829   PetscFunctionReturn(PETSC_SUCCESS);
830 }
831 
832 static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
833 {
834   PC_ASM *osm = (PC_ASM *)pc->data;
835 
836   PetscFunctionBegin;
837   *type = osm->type;
838   PetscFunctionReturn(PETSC_SUCCESS);
839 }
840 
841 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
842 {
843   PC_ASM *osm = (PC_ASM *)pc->data;
844 
845   PetscFunctionBegin;
846   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
847   osm->loctype = type;
848   PetscFunctionReturn(PETSC_SUCCESS);
849 }
850 
851 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
852 {
853   PC_ASM *osm = (PC_ASM *)pc->data;
854 
855   PetscFunctionBegin;
856   *type = osm->loctype;
857   PetscFunctionReturn(PETSC_SUCCESS);
858 }
859 
860 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
861 {
862   PC_ASM *osm = (PC_ASM *)pc->data;
863 
864   PetscFunctionBegin;
865   osm->sort_indices = doSort;
866   PetscFunctionReturn(PETSC_SUCCESS);
867 }
868 
869 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
870 {
871   PC_ASM *osm = (PC_ASM *)pc->data;
872 
873   PetscFunctionBegin;
874   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
875 
876   if (n_local) *n_local = osm->n_local_true;
877   if (first_local) {
878     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
879     *first_local -= osm->n_local_true;
880   }
881   if (ksp) *ksp = osm->ksp;
882   PetscFunctionReturn(PETSC_SUCCESS);
883 }
884 
885 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
886 {
887   PC_ASM *osm = (PC_ASM *)pc->data;
888 
889   PetscFunctionBegin;
890   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
891   PetscAssertPointer(sub_mat_type, 2);
892   *sub_mat_type = osm->sub_mat_type;
893   PetscFunctionReturn(PETSC_SUCCESS);
894 }
895 
896 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
897 {
898   PC_ASM *osm = (PC_ASM *)pc->data;
899 
900   PetscFunctionBegin;
901   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
902   PetscCall(PetscFree(osm->sub_mat_type));
903   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
904   PetscFunctionReturn(PETSC_SUCCESS);
905 }
906 
907 /*@C
908   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.
909 
910   Collective
911 
912   Input Parameters:
913 + pc       - the preconditioner context
914 . n        - the number of subdomains for this processor (default value = 1)
915 . is       - the index set that defines the subdomains for this processor
916          (or `NULL` for PETSc to determine subdomains)
917 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
918          (or `NULL` to not provide these)
919 
920   Options Database Key:
921 . -pc_asm_local_blocks <blks> - Sets number of local blocks
922 
923   Level: advanced
924 
925   Notes:
926   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
927 
928   By default the `PCASM` preconditioner uses 1 block per processor.
929 
930   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.
931 
932   If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated
933   back to form the global solution (this is the standard restricted additive Schwarz method)
934 
935   If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
936   no code to handle that case.
937 
938 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
939           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
940 @*/
941 PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
942 {
943   PetscFunctionBegin;
944   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
945   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
946   PetscFunctionReturn(PETSC_SUCCESS);
947 }
948 
949 /*@C
950   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
951   additive Schwarz preconditioner, `PCASM`.
952 
953   Collective, all MPI ranks must pass in the same array of `IS`
954 
955   Input Parameters:
956 + pc       - the preconditioner context
957 . N        - the number of subdomains for all processors
958 . is       - the index sets that define the subdomains for all processors
959          (or `NULL` to ask PETSc to determine the subdomains)
960 - is_local - the index sets that define the local part of the subdomains for this processor
961          (or `NULL` to not provide this information)
962 
963   Options Database Key:
964 . -pc_asm_blocks <blks> - Sets total blocks
965 
966   Level: advanced
967 
968   Notes:
969   Currently you cannot use this to set the actual subdomains with the argument is or is_local.
970 
971   By default the `PCASM` preconditioner uses 1 block per processor.
972 
973   These index sets cannot be destroyed until after completion of the
974   linear solves for which the `PCASM` preconditioner is being used.
975 
976   Use `PCASMSetLocalSubdomains()` to set local subdomains.
977 
978   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local
979 
980 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
981           `PCASMCreateSubdomains2D()`, `PCGASM`
982 @*/
983 PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
984 {
985   PetscFunctionBegin;
986   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
987   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
988   PetscFunctionReturn(PETSC_SUCCESS);
989 }
990 
991 /*@
992   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
993   additive Schwarz preconditioner, `PCASM`.
994 
995   Logically Collective
996 
997   Input Parameters:
998 + pc  - the preconditioner context
999 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1000 
1001   Options Database Key:
1002 . -pc_asm_overlap <ovl> - Sets overlap
1003 
1004   Level: intermediate
1005 
1006   Notes:
1007   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1008   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1009   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).
1010 
1011   The overlap defaults to 1, so if one desires that no additional
1012   overlap be computed beyond what may have been set with a call to
1013   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1014   must be set to be 0.  In particular, if one does not explicitly set
1015   the subdomains an application code, then all overlap would be computed
1016   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1017   variant that is equivalent to the block Jacobi preconditioner.
1018 
1019   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1020   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1021 
1022   One can define initial index sets with any overlap via
1023   `PCASMSetLocalSubdomains()`; the routine
1024   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1025   if desired.
1026 
1027 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1028           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1029 @*/
1030 PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1031 {
1032   PetscFunctionBegin;
1033   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1034   PetscValidLogicalCollectiveInt(pc, ovl, 2);
1035   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1036   PetscFunctionReturn(PETSC_SUCCESS);
1037 }
1038 
1039 /*@
1040   PCASMSetType - Sets the type of restriction and interpolation used
1041   for local problems in the additive Schwarz method, `PCASM`.
1042 
1043   Logically Collective
1044 
1045   Input Parameters:
1046 + pc   - the preconditioner context
1047 - type - variant of `PCASM`, one of
1048 .vb
1049       PC_ASM_BASIC       - full interpolation and restriction
1050       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1051       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1052       PC_ASM_NONE        - local processor restriction and interpolation
1053 .ve
1054 
1055   Options Database Key:
1056 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`
1057 
1058   Level: intermediate
1059 
1060   Note:
1061   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1062   to limit the local processor interpolation
1063 
1064 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1065           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1066 @*/
1067 PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1068 {
1069   PetscFunctionBegin;
1070   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1071   PetscValidLogicalCollectiveEnum(pc, type, 2);
1072   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1073   PetscFunctionReturn(PETSC_SUCCESS);
1074 }
1075 
1076 /*@
1077   PCASMGetType - Gets the type of restriction and interpolation used
1078   for local problems in the additive Schwarz method, `PCASM`.
1079 
1080   Logically Collective
1081 
1082   Input Parameter:
1083 . pc - the preconditioner context
1084 
1085   Output Parameter:
1086 . type - variant of `PCASM`, one of
1087 .vb
1088       PC_ASM_BASIC       - full interpolation and restriction
1089       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1090       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1091       PC_ASM_NONE        - local processor restriction and interpolation
1092 .ve
1093 
1094   Options Database Key:
1095 . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type
1096 
1097   Level: intermediate
1098 
1099 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1100           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1101 @*/
1102 PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1103 {
1104   PetscFunctionBegin;
1105   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1106   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1107   PetscFunctionReturn(PETSC_SUCCESS);
1108 }
1109 
1110 /*@
1111   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1112 
1113   Logically Collective
1114 
1115   Input Parameters:
1116 + pc   - the preconditioner context
1117 - type - type of composition, one of
1118 .vb
1119   PC_COMPOSITE_ADDITIVE       - local additive combination
1120   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1121 .ve
1122 
1123   Options Database Key:
1124 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1125 
1126   Level: intermediate
1127 
1128 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1129 @*/
1130 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1131 {
1132   PetscFunctionBegin;
1133   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1134   PetscValidLogicalCollectiveEnum(pc, type, 2);
1135   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1136   PetscFunctionReturn(PETSC_SUCCESS);
1137 }
1138 
1139 /*@
1140   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.
1141 
1142   Logically Collective
1143 
1144   Input Parameter:
1145 . pc - the preconditioner context
1146 
1147   Output Parameter:
1148 . type - type of composition, one of
1149 .vb
1150   PC_COMPOSITE_ADDITIVE       - local additive combination
1151   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1152 .ve
1153 
1154   Options Database Key:
1155 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1156 
1157   Level: intermediate
1158 
1159 .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCCompositeType`
1160 @*/
1161 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1162 {
1163   PetscFunctionBegin;
1164   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1165   PetscAssertPointer(type, 2);
1166   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1167   PetscFunctionReturn(PETSC_SUCCESS);
1168 }
1169 
1170 /*@
1171   PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1172 
1173   Logically Collective
1174 
1175   Input Parameters:
1176 + pc     - the preconditioner context
1177 - doSort - sort the subdomain indices
1178 
1179   Level: intermediate
1180 
1181 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1182           `PCASMCreateSubdomains2D()`
1183 @*/
1184 PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1185 {
1186   PetscFunctionBegin;
1187   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1188   PetscValidLogicalCollectiveBool(pc, doSort, 2);
1189   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1190   PetscFunctionReturn(PETSC_SUCCESS);
1191 }
1192 
1193 /*@C
1194   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1195   this processor.
1196 
1197   Collective iff first_local is requested
1198 
1199   Input Parameter:
1200 . pc - the preconditioner context
1201 
1202   Output Parameters:
1203 + n_local     - the number of blocks on this processor or NULL
1204 . first_local - the global number of the first block on this processor or NULL,
1205                  all processors must request or all must pass NULL
1206 - ksp         - the array of `KSP` contexts
1207 
1208   Level: advanced
1209 
1210   Notes:
1211   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.
1212 
1213   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.
1214 
1215   Fortran Notes:
1216   The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.
1217 
1218 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1219           `PCASMCreateSubdomains2D()`,
1220 @*/
1221 PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1222 {
1223   PetscFunctionBegin;
1224   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1225   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1226   PetscFunctionReturn(PETSC_SUCCESS);
1227 }
1228 
1229 /*MC
1230    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1231            its own `KSP` object, {cite}`dryja1987additive` and {cite}`1sbg`
1232 
1233    Options Database Keys:
1234 +  -pc_asm_blocks <blks>                          - Sets total blocks. Defaults to one block per MPI process.
1235 .  -pc_asm_overlap <ovl>                          - Sets overlap
1236 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1237 -  -pc_asm_local_type [additive, multiplicative]  - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`
1238 
1239    Level: beginner
1240 
1241    Notes:
1242    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1243    will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1244     -pc_asm_type basic to get the same convergence behavior
1245 
1246    Each processor can have one or more blocks, but a block cannot be shared by more
1247    than one processor. Use `PCGASM` for subdomains shared by multiple processes.
1248 
1249    To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC`
1250    options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1251 
1252    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1253    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)
1254 
1255 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1256           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1257           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1258 M*/
1259 
1260 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1261 {
1262   PC_ASM *osm;
1263 
1264   PetscFunctionBegin;
1265   PetscCall(PetscNew(&osm));
1266 
1267   osm->n             = PETSC_DECIDE;
1268   osm->n_local       = 0;
1269   osm->n_local_true  = PETSC_DECIDE;
1270   osm->overlap       = 1;
1271   osm->ksp           = NULL;
1272   osm->restriction   = NULL;
1273   osm->lprolongation = NULL;
1274   osm->lrestriction  = NULL;
1275   osm->x             = NULL;
1276   osm->y             = NULL;
1277   osm->is            = NULL;
1278   osm->is_local      = NULL;
1279   osm->mat           = NULL;
1280   osm->pmat          = NULL;
1281   osm->type          = PC_ASM_RESTRICT;
1282   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1283   osm->sort_indices  = PETSC_TRUE;
1284   osm->dm_subdomains = PETSC_FALSE;
1285   osm->sub_mat_type  = NULL;
1286 
1287   pc->data                 = (void *)osm;
1288   pc->ops->apply           = PCApply_ASM;
1289   pc->ops->matapply        = PCMatApply_ASM;
1290   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1291   pc->ops->setup           = PCSetUp_ASM;
1292   pc->ops->reset           = PCReset_ASM;
1293   pc->ops->destroy         = PCDestroy_ASM;
1294   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1295   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1296   pc->ops->view            = PCView_ASM;
1297   pc->ops->applyrichardson = NULL;
1298 
1299   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1300   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1301   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1302   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1303   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1304   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1305   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1306   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1307   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1308   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1309   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1310   PetscFunctionReturn(PETSC_SUCCESS);
1311 }
1312 
1313 /*@C
1314   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1315   preconditioner, `PCASM`,  for any problem on a general grid.
1316 
1317   Collective
1318 
1319   Input Parameters:
1320 + A - The global matrix operator
1321 - n - the number of local blocks
1322 
1323   Output Parameter:
1324 . outis - the array of index sets defining the subdomains
1325 
1326   Level: advanced
1327 
1328   Note:
1329   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1330   from these if you use `PCASMSetLocalSubdomains()`
1331 
1332   Fortran Notes:
1333   You must provide the array outis[] already allocated of length n.
1334 
1335 .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1336 @*/
1337 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1338 {
1339   MatPartitioning mpart;
1340   const char     *prefix;
1341   PetscInt        i, j, rstart, rend, bs;
1342   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1343   Mat             Ad = NULL, adj;
1344   IS              ispart, isnumb, *is;
1345 
1346   PetscFunctionBegin;
1347   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1348   PetscAssertPointer(outis, 3);
1349   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);
1350 
1351   /* Get prefix, row distribution, and block size */
1352   PetscCall(MatGetOptionsPrefix(A, &prefix));
1353   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1354   PetscCall(MatGetBlockSize(A, &bs));
1355   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1356 
1357   /* Get diagonal block from matrix if possible */
1358   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1359   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1360   if (Ad) {
1361     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1362     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1363   }
1364   if (Ad && n > 1) {
1365     PetscBool match, done;
1366     /* Try to setup a good matrix partitioning if available */
1367     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1368     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1369     PetscCall(MatPartitioningSetFromOptions(mpart));
1370     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1371     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1372     if (!match) { /* assume a "good" partitioner is available */
1373       PetscInt        na;
1374       const PetscInt *ia, *ja;
1375       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1376       if (done) {
1377         /* Build adjacency matrix by hand. Unfortunately a call to
1378            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1379            remove the block-aij structure and we cannot expect
1380            MatPartitioning to split vertices as we need */
1381         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1382         const PetscInt *row;
1383         nnz = 0;
1384         for (i = 0; i < na; i++) { /* count number of nonzeros */
1385           len = ia[i + 1] - ia[i];
1386           row = ja + ia[i];
1387           for (j = 0; j < len; j++) {
1388             if (row[j] == i) { /* don't count diagonal */
1389               len--;
1390               break;
1391             }
1392           }
1393           nnz += len;
1394         }
1395         PetscCall(PetscMalloc1(na + 1, &iia));
1396         PetscCall(PetscMalloc1(nnz, &jja));
1397         nnz    = 0;
1398         iia[0] = 0;
1399         for (i = 0; i < na; i++) { /* fill adjacency */
1400           cnt = 0;
1401           len = ia[i + 1] - ia[i];
1402           row = ja + ia[i];
1403           for (j = 0; j < len; j++) {
1404             if (row[j] != i) { /* if not diagonal */
1405               jja[nnz + cnt++] = row[j];
1406             }
1407           }
1408           nnz += cnt;
1409           iia[i + 1] = nnz;
1410         }
1411         /* Partitioning of the adjacency matrix */
1412         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1413         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1414         PetscCall(MatPartitioningSetNParts(mpart, n));
1415         PetscCall(MatPartitioningApply(mpart, &ispart));
1416         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1417         PetscCall(MatDestroy(&adj));
1418         foundpart = PETSC_TRUE;
1419       }
1420       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1421     }
1422     PetscCall(MatPartitioningDestroy(&mpart));
1423   }
1424 
1425   PetscCall(PetscMalloc1(n, &is));
1426   *outis = is;
1427 
1428   if (!foundpart) {
1429     /* Partitioning by contiguous chunks of rows */
1430 
1431     PetscInt mbs   = (rend - rstart) / bs;
1432     PetscInt start = rstart;
1433     for (i = 0; i < n; i++) {
1434       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1435       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1436       start += count;
1437     }
1438 
1439   } else {
1440     /* Partitioning by adjacency of diagonal block  */
1441 
1442     const PetscInt *numbering;
1443     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1444     /* Get node count in each partition */
1445     PetscCall(PetscMalloc1(n, &count));
1446     PetscCall(ISPartitioningCount(ispart, n, count));
1447     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1448       for (i = 0; i < n; i++) count[i] *= bs;
1449     }
1450     /* Build indices from node numbering */
1451     PetscCall(ISGetLocalSize(isnumb, &nidx));
1452     PetscCall(PetscMalloc1(nidx, &indices));
1453     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1454     PetscCall(ISGetIndices(isnumb, &numbering));
1455     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1456     PetscCall(ISRestoreIndices(isnumb, &numbering));
1457     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1458       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1459       for (i = 0; i < nidx; i++) {
1460         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1461       }
1462       PetscCall(PetscFree(indices));
1463       nidx *= bs;
1464       indices = newidx;
1465     }
1466     /* Shift to get global indices */
1467     for (i = 0; i < nidx; i++) indices[i] += rstart;
1468 
1469     /* Build the index sets for each block */
1470     for (i = 0; i < n; i++) {
1471       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1472       PetscCall(ISSort(is[i]));
1473       start += count[i];
1474     }
1475 
1476     PetscCall(PetscFree(count));
1477     PetscCall(PetscFree(indices));
1478     PetscCall(ISDestroy(&isnumb));
1479     PetscCall(ISDestroy(&ispart));
1480   }
1481   PetscFunctionReturn(PETSC_SUCCESS);
1482 }
1483 
1484 /*@C
1485   PCASMDestroySubdomains - Destroys the index sets created with
1486   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.
1487 
1488   Collective
1489 
1490   Input Parameters:
1491 + n        - the number of index sets
1492 . is       - the array of index sets
1493 - is_local - the array of local index sets, can be `NULL`
1494 
1495   Level: advanced
1496 
1497 .seealso: [](ch_ksp), `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1498 @*/
1499 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1500 {
1501   PetscInt i;
1502 
1503   PetscFunctionBegin;
1504   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1505   if (is) {
1506     PetscAssertPointer(is, 2);
1507     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1508     PetscCall(PetscFree(is));
1509   }
1510   if (is_local) {
1511     PetscAssertPointer(is_local, 3);
1512     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1513     PetscCall(PetscFree(is_local));
1514   }
1515   PetscFunctionReturn(PETSC_SUCCESS);
1516 }
1517 
1518 /*@C
1519   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1520   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.
1521 
1522   Not Collective
1523 
1524   Input Parameters:
1525 + m       - the number of mesh points in the x direction
1526 . n       - the number of mesh points in the y direction
1527 . M       - the number of subdomains in the x direction
1528 . N       - the number of subdomains in the y direction
1529 . dof     - degrees of freedom per node
1530 - overlap - overlap in mesh lines
1531 
1532   Output Parameters:
1533 + Nsub     - the number of subdomains created
1534 . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1535 - is_local - array of index sets defining non-overlapping subdomains
1536 
1537   Level: advanced
1538 
1539   Note:
1540   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1541   preconditioners.  More general related routines are
1542   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.
1543 
1544   Fortran Notes:
1545   The `IS` must be declared as an array of length long enough to hold `Nsub` entries
1546 
1547 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1548           `PCASMSetOverlap()`
1549 @*/
1550 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local)
1551 {
1552   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1553   PetscInt nidx, *idx, loc, ii, jj, count;
1554 
1555   PetscFunctionBegin;
1556   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");
1557 
1558   *Nsub = N * M;
1559   PetscCall(PetscMalloc1(*Nsub, is));
1560   PetscCall(PetscMalloc1(*Nsub, is_local));
1561   ystart    = 0;
1562   loc_outer = 0;
1563   for (i = 0; i < N; i++) {
1564     height = n / N + ((n % N) > i); /* height of subdomain */
1565     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1566     yleft = ystart - overlap;
1567     if (yleft < 0) yleft = 0;
1568     yright = ystart + height + overlap;
1569     if (yright > n) yright = n;
1570     xstart = 0;
1571     for (j = 0; j < M; j++) {
1572       width = m / M + ((m % M) > j); /* width of subdomain */
1573       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1574       xleft = xstart - overlap;
1575       if (xleft < 0) xleft = 0;
1576       xright = xstart + width + overlap;
1577       if (xright > m) xright = m;
1578       nidx = (xright - xleft) * (yright - yleft);
1579       PetscCall(PetscMalloc1(nidx, &idx));
1580       loc = 0;
1581       for (ii = yleft; ii < yright; ii++) {
1582         count = m * ii + xleft;
1583         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1584       }
1585       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1586       if (overlap == 0) {
1587         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1588 
1589         (*is_local)[loc_outer] = (*is)[loc_outer];
1590       } else {
1591         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1592           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1593         }
1594         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1595       }
1596       PetscCall(PetscFree(idx));
1597       xstart += width;
1598       loc_outer++;
1599     }
1600     ystart += height;
1601   }
1602   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1603   PetscFunctionReturn(PETSC_SUCCESS);
1604 }
1605 
1606 /*@C
1607   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1608   only) for the additive Schwarz preconditioner, `PCASM`.
1609 
1610   Not Collective
1611 
1612   Input Parameter:
1613 . pc - the preconditioner context
1614 
1615   Output Parameters:
1616 + n        - if requested, the number of subdomains for this processor (default value = 1)
1617 . is       - if requested, the index sets that define the subdomains for this processor
1618 - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)
1619 
1620   Level: advanced
1621 
1622   Note:
1623   The `IS` numbering is in the parallel, global numbering of the vector.
1624 
1625 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1626           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1627 @*/
1628 PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1629 {
1630   PC_ASM   *osm = (PC_ASM *)pc->data;
1631   PetscBool match;
1632 
1633   PetscFunctionBegin;
1634   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1635   if (n) PetscAssertPointer(n, 2);
1636   if (is) PetscAssertPointer(is, 3);
1637   if (is_local) PetscAssertPointer(is_local, 4);
1638   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1639   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1640   if (n) *n = osm->n_local_true;
1641   if (is) *is = osm->is;
1642   if (is_local) *is_local = osm->is_local;
1643   PetscFunctionReturn(PETSC_SUCCESS);
1644 }
1645 
1646 /*@C
1647   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1648   only) for the additive Schwarz preconditioner, `PCASM`.
1649 
1650   Not Collective
1651 
1652   Input Parameter:
1653 . pc - the preconditioner context
1654 
1655   Output Parameters:
1656 + n   - if requested, the number of matrices for this processor (default value = 1)
1657 - mat - if requested, the matrices
1658 
1659   Level: advanced
1660 
1661   Notes:
1662   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)
1663 
1664   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.
1665 
1666 .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1667           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1668 @*/
1669 PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1670 {
1671   PC_ASM   *osm;
1672   PetscBool match;
1673 
1674   PetscFunctionBegin;
1675   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1676   if (n) PetscAssertPointer(n, 2);
1677   if (mat) PetscAssertPointer(mat, 3);
1678   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1679   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1680   if (!match) {
1681     if (n) *n = 0;
1682     if (mat) *mat = NULL;
1683   } else {
1684     osm = (PC_ASM *)pc->data;
1685     if (n) *n = osm->n_local_true;
1686     if (mat) *mat = osm->pmat;
1687   }
1688   PetscFunctionReturn(PETSC_SUCCESS);
1689 }
1690 
1691 /*@
1692   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1693 
1694   Logically Collective
1695 
1696   Input Parameters:
1697 + pc  - the preconditioner
1698 - flg - boolean indicating whether to use subdomains defined by the `DM`
1699 
1700   Options Database Key:
1701 . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM`
1702 
1703   Level: intermediate
1704 
1705   Note:
1706   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1707   so setting either of the first two effectively turns the latter off.
1708 
1709 .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1710           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1711 @*/
1712 PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1713 {
1714   PC_ASM   *osm = (PC_ASM *)pc->data;
1715   PetscBool match;
1716 
1717   PetscFunctionBegin;
1718   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1719   PetscValidLogicalCollectiveBool(pc, flg, 2);
1720   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1721   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1722   if (match) osm->dm_subdomains = flg;
1723   PetscFunctionReturn(PETSC_SUCCESS);
1724 }
1725 
1726 /*@
1727   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.
1728 
1729   Not Collective
1730 
1731   Input Parameter:
1732 . pc - the preconditioner
1733 
1734   Output Parameter:
1735 . flg - boolean indicating whether to use subdomains defined by the `DM`
1736 
1737   Level: intermediate
1738 
1739 .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1740           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1741 @*/
1742 PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1743 {
1744   PC_ASM   *osm = (PC_ASM *)pc->data;
1745   PetscBool match;
1746 
1747   PetscFunctionBegin;
1748   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1749   PetscAssertPointer(flg, 2);
1750   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1751   if (match) *flg = osm->dm_subdomains;
1752   else *flg = PETSC_FALSE;
1753   PetscFunctionReturn(PETSC_SUCCESS);
1754 }
1755 
1756 /*@C
1757   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.
1758 
1759   Not Collective
1760 
1761   Input Parameter:
1762 . pc - the `PC`
1763 
1764   Output Parameter:
1765 . sub_mat_type - name of matrix type
1766 
1767   Level: advanced
1768 
1769 .seealso: [](ch_ksp), `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1770 @*/
1771 PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1772 {
1773   PetscFunctionBegin;
1774   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1775   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1776   PetscFunctionReturn(PETSC_SUCCESS);
1777 }
1778 
1779 /*@C
1780   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves
1781 
1782   Collective
1783 
1784   Input Parameters:
1785 + pc           - the `PC` object
1786 - sub_mat_type - the `MatType`
1787 
1788   Options Database Key:
1789 . -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1790    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1791 
1792   Note:
1793   See `MatType` for available types
1794 
1795   Level: advanced
1796 
1797 .seealso: [](ch_ksp), `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1798 @*/
1799 PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1800 {
1801   PetscFunctionBegin;
1802   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1803   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1804   PetscFunctionReturn(PETSC_SUCCESS);
1805 }
1806