xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 26cc229b24afffee8e9a203ddc8754b40d660fc7)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include "petsc/private/matimpl.h"
15 
16 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
17 {
18   PC_ASM            *osm = (PC_ASM*)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i,bsz;
21   PetscBool         iascii,isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char        *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %" PetscInt_FMT,osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %" PetscInt_FMT,osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank));
38     PetscCall(PetscViewerGetFormat(viewer,&format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer,"  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc,&prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer,"  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n",prefix?prefix:""));
44         PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(viewer));
47           PetscCall(KSPView(osm->ksp[0],sviewer));
48           PetscCall(PetscViewerASCIIPopTab(viewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %" PetscInt_FMT "\n",(int)rank,osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer,"  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
60       for (i=0; i<osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i],&bsz));
62         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n",(int)rank,i,bsz));
63         PetscCall(KSPView(osm->ksp[i],sviewer));
64         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerFlush(viewer));
69       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
70     }
71   } else if (isstring) {
72     PetscCall(PetscViewerStringSPrintf(viewer," blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]));
73     PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
74     if (osm->ksp) PetscCall(KSPView(osm->ksp[0],sviewer));
75     PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 static PetscErrorCode PCASMPrintSubdomains(PC pc)
81 {
82   PC_ASM         *osm = (PC_ASM*)pc->data;
83   const char     *prefix;
84   char           fname[PETSC_MAX_PATH_LEN+1];
85   PetscViewer    viewer, sviewer;
86   char           *s;
87   PetscInt       i,j,nidx;
88   const PetscInt *idx;
89   PetscMPIInt    rank, size;
90 
91   PetscFunctionBegin;
92   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
93   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
94   PetscCall(PCGetOptionsPrefix(pc,&prefix));
95   PetscCall(PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,sizeof(fname),NULL));
96   if (fname[0] == 0) PetscCall(PetscStrcpy(fname,"stdout"));
97   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer));
98   for (i=0; i<osm->n_local; i++) {
99     if (i < osm->n_local_true) {
100       PetscCall(ISGetLocalSize(osm->is[i],&nidx));
101       PetscCall(ISGetIndices(osm->is[i],&idx));
102       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
103 #define len  16*(nidx+1)+512
104       PetscCall(PetscMalloc1(len,&s));
105       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
106 #undef len
107       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
108       for (j=0; j<nidx; j++) {
109         PetscCall(PetscViewerStringSPrintf(sviewer,"%" PetscInt_FMT " ",idx[j]));
110       }
111       PetscCall(ISRestoreIndices(osm->is[i],&idx));
112       PetscCall(PetscViewerStringSPrintf(sviewer,"\n"));
113       PetscCall(PetscViewerDestroy(&sviewer));
114       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
115       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
116       PetscCall(PetscViewerFlush(viewer));
117       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
118       PetscCall(PetscFree(s));
119       if (osm->is_local) {
120         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
121 #define len  16*(nidx+1)+512
122         PetscCall(PetscMalloc1(len, &s));
123         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
124 #undef len
125         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
126         PetscCall(ISGetLocalSize(osm->is_local[i],&nidx));
127         PetscCall(ISGetIndices(osm->is_local[i],&idx));
128         for (j=0; j<nidx; j++) {
129           PetscCall(PetscViewerStringSPrintf(sviewer,"%" PetscInt_FMT " ",idx[j]));
130         }
131         PetscCall(ISRestoreIndices(osm->is_local[i],&idx));
132         PetscCall(PetscViewerStringSPrintf(sviewer,"\n"));
133         PetscCall(PetscViewerDestroy(&sviewer));
134         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
135         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
136         PetscCall(PetscViewerFlush(viewer));
137         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
138         PetscCall(PetscFree(s));
139       }
140     } else {
141       /* Participate in collective viewer calls. */
142       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
143       PetscCall(PetscViewerFlush(viewer));
144       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
145       /* Assume either all ranks have is_local or none do. */
146       if (osm->is_local) {
147         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
148         PetscCall(PetscViewerFlush(viewer));
149         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
150       }
151     }
152   }
153   PetscCall(PetscViewerFlush(viewer));
154   PetscCall(PetscViewerDestroy(&viewer));
155   PetscFunctionReturn(0);
156 }
157 
158 static PetscErrorCode PCSetUp_ASM(PC pc)
159 {
160   PC_ASM         *osm = (PC_ASM*)pc->data;
161   PetscBool      flg;
162   PetscInt       i,m,m_local;
163   MatReuse       scall = MAT_REUSE_MATRIX;
164   IS             isl;
165   KSP            ksp;
166   PC             subpc;
167   const char     *prefix,*pprefix;
168   Vec            vec;
169   DM             *domain_dm = NULL;
170 
171   PetscFunctionBegin;
172   if (!pc->setupcalled) {
173     PetscInt m;
174 
175     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
176     if (osm->n_local_true == PETSC_DECIDE) {
177       /* no subdomains given */
178       /* try pc->dm first, if allowed */
179       if (osm->dm_subdomains && pc->dm) {
180         PetscInt  num_domains, d;
181         char      **domain_names;
182         IS        *inner_domain_is, *outer_domain_is;
183         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
184         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
185                               A future improvement of this code might allow one to use
186                               DM-defined subdomains and also increase the overlap,
187                               but that is not currently supported */
188         if (num_domains) {
189           PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
190         }
191         for (d = 0; d < num_domains; ++d) {
192           if (domain_names)    PetscCall(PetscFree(domain_names[d]));
193           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
194           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
195         }
196         PetscCall(PetscFree(domain_names));
197         PetscCall(PetscFree(inner_domain_is));
198         PetscCall(PetscFree(outer_domain_is));
199       }
200       if (osm->n_local_true == PETSC_DECIDE) {
201         /* still no subdomains; use one subdomain per processor */
202         osm->n_local_true = 1;
203       }
204     }
205     { /* determine the global and max number of subdomains */
206       struct {PetscInt max,sum;} inwork,outwork;
207       PetscMPIInt size;
208 
209       inwork.max   = osm->n_local_true;
210       inwork.sum   = osm->n_local_true;
211       PetscCall(MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc)));
212       osm->n_local = outwork.max;
213       osm->n       = outwork.sum;
214 
215       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
216       if (outwork.max == 1 && outwork.sum == size) {
217         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
218         PetscCall(MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE));
219       }
220     }
221     if (!osm->is) { /* create the index sets */
222       PetscCall(PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is));
223     }
224     if (osm->n_local_true > 1 && !osm->is_local) {
225       PetscCall(PetscMalloc1(osm->n_local_true,&osm->is_local));
226       for (i=0; i<osm->n_local_true; i++) {
227         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
228           PetscCall(ISDuplicate(osm->is[i],&osm->is_local[i]));
229           PetscCall(ISCopy(osm->is[i],osm->is_local[i]));
230         } else {
231           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
232           osm->is_local[i] = osm->is[i];
233         }
234       }
235     }
236     PetscCall(PCGetOptionsPrefix(pc,&prefix));
237     if (osm->overlap > 0) {
238       /* Extend the "overlapping" regions by a number of steps */
239       PetscCall(MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap));
240     }
241     if (osm->sort_indices) {
242       for (i=0; i<osm->n_local_true; i++) {
243         PetscCall(ISSort(osm->is[i]));
244         if (osm->is_local) {
245           PetscCall(ISSort(osm->is_local[i]));
246         }
247       }
248     }
249     flg = PETSC_FALSE;
250     PetscCall(PetscOptionsHasName(NULL,prefix,"-pc_asm_print_subdomains",&flg));
251     if (flg) PetscCall(PCASMPrintSubdomains(pc));
252     if (!osm->ksp) {
253       /* Create the local solvers */
254       PetscCall(PetscMalloc1(osm->n_local_true,&osm->ksp));
255       if (domain_dm) {
256         PetscCall(PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n"));
257       }
258       for (i=0; i<osm->n_local_true; i++) {
259         PetscCall(KSPCreate(PETSC_COMM_SELF,&ksp));
260         PetscCall(KSPSetErrorIfNotConverged(ksp,pc->erroriffailure));
261         PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp));
262         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1));
263         PetscCall(KSPSetType(ksp,KSPPREONLY));
264         PetscCall(KSPGetPC(ksp,&subpc));
265         PetscCall(PCGetOptionsPrefix(pc,&prefix));
266         PetscCall(KSPSetOptionsPrefix(ksp,prefix));
267         PetscCall(KSPAppendOptionsPrefix(ksp,"sub_"));
268         if (domain_dm) {
269           PetscCall(KSPSetDM(ksp, domain_dm[i]));
270           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
271           PetscCall(DMDestroy(&domain_dm[i]));
272         }
273         osm->ksp[i] = ksp;
274       }
275       if (domain_dm) {
276         PetscCall(PetscFree(domain_dm));
277       }
278     }
279 
280     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
281     PetscCall(ISSortRemoveDups(osm->lis));
282     PetscCall(ISGetLocalSize(osm->lis, &m));
283 
284     scall = MAT_INITIAL_MATRIX;
285   } else {
286     /*
287        Destroy the blocks from the previous iteration
288     */
289     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
290       PetscCall(MatDestroyMatrices(osm->n_local_true,&osm->pmat));
291       scall = MAT_INITIAL_MATRIX;
292     }
293   }
294 
295   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
296   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
297     if (osm->n_local_true > 0) {
298       PetscCall(MatDestroySubMatrices(osm->n_local_true,&osm->pmat));
299     }
300     scall = MAT_INITIAL_MATRIX;
301   }
302 
303   /*
304      Extract out the submatrices
305   */
306   PetscCall(MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat));
307   if (scall == MAT_INITIAL_MATRIX) {
308     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix));
309     for (i=0; i<osm->n_local_true; i++) {
310       PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]));
311       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix));
312     }
313   }
314 
315   /* Convert the types of the submatrices (if needbe) */
316   if (osm->sub_mat_type) {
317     for (i=0; i<osm->n_local_true; i++) {
318       PetscCall(MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i])));
319     }
320   }
321 
322   if (!pc->setupcalled) {
323     VecType vtype;
324 
325     /* Create the local work vectors (from the local matrices) and scatter contexts */
326     PetscCall(MatCreateVecs(pc->pmat,&vec,NULL));
327 
328     PetscCheck(!osm->is_local || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE),PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
329     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
330       PetscCall(PetscMalloc1(osm->n_local_true,&osm->lprolongation));
331     }
332     PetscCall(PetscMalloc1(osm->n_local_true,&osm->lrestriction));
333     PetscCall(PetscMalloc1(osm->n_local_true,&osm->x));
334     PetscCall(PetscMalloc1(osm->n_local_true,&osm->y));
335 
336     PetscCall(ISGetLocalSize(osm->lis,&m));
337     PetscCall(ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl));
338     PetscCall(MatGetVecType(osm->pmat[0],&vtype));
339     PetscCall(VecCreate(PETSC_COMM_SELF,&osm->lx));
340     PetscCall(VecSetSizes(osm->lx,m,m));
341     PetscCall(VecSetType(osm->lx,vtype));
342     PetscCall(VecDuplicate(osm->lx, &osm->ly));
343     PetscCall(VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction));
344     PetscCall(ISDestroy(&isl));
345 
346     for (i=0; i<osm->n_local_true; ++i) {
347       ISLocalToGlobalMapping ltog;
348       IS                     isll;
349       const PetscInt         *idx_is;
350       PetscInt               *idx_lis,nout;
351 
352       PetscCall(ISGetLocalSize(osm->is[i],&m));
353       PetscCall(MatCreateVecs(osm->pmat[i],&osm->x[i],NULL));
354       PetscCall(VecDuplicate(osm->x[i],&osm->y[i]));
355 
356       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
357       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis,&ltog));
358       PetscCall(ISGetLocalSize(osm->is[i],&m));
359       PetscCall(ISGetIndices(osm->is[i], &idx_is));
360       PetscCall(PetscMalloc1(m,&idx_lis));
361       PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis));
362       PetscCheck(nout == m,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
363       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
364       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll));
365       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
366       PetscCall(ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl));
367       PetscCall(VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]));
368       PetscCall(ISDestroy(&isll));
369       PetscCall(ISDestroy(&isl));
370       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
371         ISLocalToGlobalMapping ltog;
372         IS                     isll,isll_local;
373         const PetscInt         *idx_local;
374         PetscInt               *idx1, *idx2, nout;
375 
376         PetscCall(ISGetLocalSize(osm->is_local[i],&m_local));
377         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
378 
379         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog));
380         PetscCall(PetscMalloc1(m_local,&idx1));
381         PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1));
382         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
383         PetscCheck(nout == m_local,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
384         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll));
385 
386         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis,&ltog));
387         PetscCall(PetscMalloc1(m_local,&idx2));
388         PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2));
389         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
390         PetscCheck(nout == m_local,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
391         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local));
392 
393         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
394         PetscCall(VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]));
395 
396         PetscCall(ISDestroy(&isll));
397         PetscCall(ISDestroy(&isll_local));
398       }
399     }
400     PetscCall(VecDestroy(&vec));
401   }
402 
403   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
404     IS      *cis;
405     PetscInt c;
406 
407     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
408     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
409     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
410     PetscCall(PetscFree(cis));
411   }
412 
413   /* Return control to the user so that the submatrices can be modified (e.g., to apply
414      different boundary conditions for the submatrices than for the global problem) */
415   PetscCall(PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP));
416 
417   /*
418      Loop over subdomains putting them into local ksp
419   */
420   PetscCall(KSPGetOptionsPrefix(osm->ksp[0],&prefix));
421   for (i=0; i<osm->n_local_true; i++) {
422     PetscCall(KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]));
423     PetscCall(MatSetOptionsPrefix(osm->pmat[i],prefix));
424     if (!pc->setupcalled) {
425       PetscCall(KSPSetFromOptions(osm->ksp[i]));
426     }
427   }
428   PetscFunctionReturn(0);
429 }
430 
431 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
432 {
433   PC_ASM             *osm = (PC_ASM*)pc->data;
434   PetscInt           i;
435   KSPConvergedReason reason;
436 
437   PetscFunctionBegin;
438   for (i=0; i<osm->n_local_true; i++) {
439     PetscCall(KSPSetUp(osm->ksp[i]));
440     PetscCall(KSPGetConvergedReason(osm->ksp[i],&reason));
441     if (reason == KSP_DIVERGED_PC_FAILED) {
442       pc->failedreason = PC_SUBPC_ERROR;
443     }
444   }
445   PetscFunctionReturn(0);
446 }
447 
448 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
449 {
450   PC_ASM         *osm = (PC_ASM*)pc->data;
451   PetscInt       i,n_local_true = osm->n_local_true;
452   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
453 
454   PetscFunctionBegin;
455   /*
456      support for limiting the restriction or interpolation to only local
457      subdomain values (leaving the other values 0).
458   */
459   if (!(osm->type & PC_ASM_RESTRICT)) {
460     forward = SCATTER_FORWARD_LOCAL;
461     /* have to zero the work RHS since scatter may leave some slots empty */
462     PetscCall(VecSet(osm->lx, 0.0));
463   }
464   if (!(osm->type & PC_ASM_INTERPOLATE)) {
465     reverse = SCATTER_REVERSE_LOCAL;
466   }
467 
468   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
469     /* zero the global and the local solutions */
470     PetscCall(VecSet(y, 0.0));
471     PetscCall(VecSet(osm->ly, 0.0));
472 
473     /* copy the global RHS to local RHS including the ghost nodes */
474     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
475     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
476 
477     /* restrict local RHS to the overlapping 0-block RHS */
478     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
479     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
480 
481     /* do the local solves */
482     for (i = 0; i < n_local_true; ++i) {
483 
484       /* solve the overlapping i-block */
485       PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i],0));
486       PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
487       PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
488       PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
489 
490       if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
491         PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
492         PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
493       } else { /* interpolate the overlapping i-block solution to the local solution */
494         PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
495         PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
496       }
497 
498       if (i < n_local_true-1) {
499         /* restrict local RHS to the overlapping (i+1)-block RHS */
500         PetscCall(VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
501         PetscCall(VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
502 
503         if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
504           /* update the overlapping (i+1)-block RHS using the current local solution */
505           PetscCall(MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]));
506           PetscCall(VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]));
507         }
508       }
509     }
510     /* add the local solution to the global solution including the ghost nodes */
511     PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
512     PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
513   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
514   PetscFunctionReturn(0);
515 }
516 
517 static PetscErrorCode PCMatApply_ASM(PC pc,Mat X,Mat Y)
518 {
519   PC_ASM         *osm = (PC_ASM*)pc->data;
520   Mat            Z,W;
521   Vec            x;
522   PetscInt       i,m,N;
523   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
524 
525   PetscFunctionBegin;
526   PetscCheck(osm->n_local_true <= 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Not yet implemented");
527   /*
528      support for limiting the restriction or interpolation to only local
529      subdomain values (leaving the other values 0).
530   */
531   if (!(osm->type & PC_ASM_RESTRICT)) {
532     forward = SCATTER_FORWARD_LOCAL;
533     /* have to zero the work RHS since scatter may leave some slots empty */
534     PetscCall(VecSet(osm->lx, 0.0));
535   }
536   if (!(osm->type & PC_ASM_INTERPOLATE)) {
537     reverse = SCATTER_REVERSE_LOCAL;
538   }
539   PetscCall(VecGetLocalSize(osm->x[0], &m));
540   PetscCall(MatGetSize(X, NULL, &N));
541   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
542   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
543     /* zero the global and the local solutions */
544     PetscCall(MatZeroEntries(Y));
545     PetscCall(VecSet(osm->ly, 0.0));
546 
547     for (i = 0; i < N; ++i) {
548       PetscCall(MatDenseGetColumnVecRead(X, i, &x));
549       /* copy the global RHS to local RHS including the ghost nodes */
550       PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
551       PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
552       PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
553 
554       PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
555       /* restrict local RHS to the overlapping 0-block RHS */
556       PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
557       PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
558       PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
559     }
560     PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
561     /* solve the overlapping 0-block */
562     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
563     PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
564     PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
565     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W,0));
566     PetscCall(MatDestroy(&Z));
567 
568     for (i = 0; i < N; ++i) {
569       PetscCall(VecSet(osm->ly, 0.0));
570       PetscCall(MatDenseGetColumnVecRead(W, i, &x));
571       if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
572         PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
573         PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
574       } else { /* interpolate the overlapping 0-block solution to the local solution */
575         PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
576         PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
577       }
578       PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
579 
580       PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
581       /* add the local solution to the global solution including the ghost nodes */
582       PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
583       PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
584       PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
585     }
586     PetscCall(MatDestroy(&W));
587   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
588   PetscFunctionReturn(0);
589 }
590 
591 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
592 {
593   PC_ASM         *osm = (PC_ASM*)pc->data;
594   PetscInt       i,n_local_true = osm->n_local_true;
595   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
596 
597   PetscFunctionBegin;
598   /*
599      Support for limiting the restriction or interpolation to only local
600      subdomain values (leaving the other values 0).
601 
602      Note: these are reversed from the PCApply_ASM() because we are applying the
603      transpose of the three terms
604   */
605 
606   if (!(osm->type & PC_ASM_INTERPOLATE)) {
607     forward = SCATTER_FORWARD_LOCAL;
608     /* have to zero the work RHS since scatter may leave some slots empty */
609     PetscCall(VecSet(osm->lx, 0.0));
610   }
611   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
612 
613   /* zero the global and the local solutions */
614   PetscCall(VecSet(y, 0.0));
615   PetscCall(VecSet(osm->ly, 0.0));
616 
617   /* Copy the global RHS to local RHS including the ghost nodes */
618   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
619   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
620 
621   /* Restrict local RHS to the overlapping 0-block RHS */
622   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
623   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
624 
625   /* do the local solves */
626   for (i = 0; i < n_local_true; ++i) {
627 
628     /* solve the overlapping i-block */
629     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0));
630     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
631     PetscCall(KSPCheckSolve(osm->ksp[i],pc,osm->y[i]));
632     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0));
633 
634     if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
635       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
636       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
637     } else { /* interpolate the overlapping i-block solution to the local solution */
638       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
639       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
640     }
641 
642     if (i < n_local_true-1) {
643       /* Restrict local RHS to the overlapping (i+1)-block RHS */
644       PetscCall(VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
645       PetscCall(VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
646     }
647   }
648   /* Add the local solution to the global solution including the ghost nodes */
649   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
650   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
651   PetscFunctionReturn(0);
652 }
653 
654 static PetscErrorCode PCReset_ASM(PC pc)
655 {
656   PC_ASM         *osm = (PC_ASM*)pc->data;
657   PetscInt       i;
658 
659   PetscFunctionBegin;
660   if (osm->ksp) {
661     for (i=0; i<osm->n_local_true; i++) {
662       PetscCall(KSPReset(osm->ksp[i]));
663     }
664   }
665   if (osm->pmat) {
666     if (osm->n_local_true > 0) {
667       PetscCall(MatDestroySubMatrices(osm->n_local_true,&osm->pmat));
668     }
669   }
670   if (osm->lrestriction) {
671     PetscCall(VecScatterDestroy(&osm->restriction));
672     for (i=0; i<osm->n_local_true; i++) {
673       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
674       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
675       PetscCall(VecDestroy(&osm->x[i]));
676       PetscCall(VecDestroy(&osm->y[i]));
677     }
678     PetscCall(PetscFree(osm->lrestriction));
679     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
680     PetscCall(PetscFree(osm->x));
681     PetscCall(PetscFree(osm->y));
682 
683   }
684   PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
685   PetscCall(ISDestroy(&osm->lis));
686   PetscCall(VecDestroy(&osm->lx));
687   PetscCall(VecDestroy(&osm->ly));
688   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
689     PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
690   }
691 
692   PetscCall(PetscFree(osm->sub_mat_type));
693 
694   osm->is       = NULL;
695   osm->is_local = NULL;
696   PetscFunctionReturn(0);
697 }
698 
699 static PetscErrorCode PCDestroy_ASM(PC pc)
700 {
701   PC_ASM         *osm = (PC_ASM*)pc->data;
702   PetscInt       i;
703 
704   PetscFunctionBegin;
705   PetscCall(PCReset_ASM(pc));
706   if (osm->ksp) {
707     for (i=0; i<osm->n_local_true; i++) {
708       PetscCall(KSPDestroy(&osm->ksp[i]));
709     }
710     PetscCall(PetscFree(osm->ksp));
711   }
712   PetscCall(PetscFree(pc->data));
713 
714   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",NULL));
715   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",NULL));
716   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",NULL));
717   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",NULL));
718   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",NULL));
719   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",NULL));
720   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",NULL));
721   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",NULL));
722   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",NULL));
723   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",NULL));
724   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",NULL));
725   PetscFunctionReturn(0);
726 }
727 
728 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
729 {
730   PC_ASM         *osm = (PC_ASM*)pc->data;
731   PetscInt       blocks,ovl;
732   PetscBool      flg;
733   PCASMType      asmtype;
734   PCCompositeType loctype;
735   char           sub_mat_type[256];
736 
737   PetscFunctionBegin;
738   PetscOptionsHeadBegin(PetscOptionsObject,"Additive Schwarz options");
739   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg));
740   PetscCall(PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg));
741   if (flg) {
742     PetscCall(PCASMSetTotalSubdomains(pc,blocks,NULL,NULL));
743     osm->dm_subdomains = PETSC_FALSE;
744   }
745   PetscCall(PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg));
746   if (flg) {
747     PetscCall(PCASMSetLocalSubdomains(pc,blocks,NULL,NULL));
748     osm->dm_subdomains = PETSC_FALSE;
749   }
750   PetscCall(PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg));
751   if (flg) {
752     PetscCall(PCASMSetOverlap(pc,ovl));
753     osm->dm_subdomains = PETSC_FALSE;
754   }
755   flg  = PETSC_FALSE;
756   PetscCall(PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg));
757   if (flg) PetscCall(PCASMSetType(pc,asmtype));
758   flg  = PETSC_FALSE;
759   PetscCall(PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg));
760   if (flg) PetscCall(PCASMSetLocalType(pc,loctype));
761   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg));
762   if (flg) {
763     PetscCall(PCASMSetSubMatType(pc,sub_mat_type));
764   }
765   PetscOptionsHeadEnd();
766   PetscFunctionReturn(0);
767 }
768 
769 /*------------------------------------------------------------------------------------*/
770 
771 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
772 {
773   PC_ASM         *osm = (PC_ASM*)pc->data;
774   PetscInt       i;
775 
776   PetscFunctionBegin;
777   PetscCheck(n >= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %" PetscInt_FMT,n);
778   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is),PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
779 
780   if (!pc->setupcalled) {
781     if (is) {
782       for (i=0; i<n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
783     }
784     if (is_local) {
785       for (i=0; i<n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
786     }
787     PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
788 
789     osm->n_local_true = n;
790     osm->is           = NULL;
791     osm->is_local     = NULL;
792     if (is) {
793       PetscCall(PetscMalloc1(n,&osm->is));
794       for (i=0; i<n; i++) osm->is[i] = is[i];
795       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
796       osm->overlap = -1;
797     }
798     if (is_local) {
799       PetscCall(PetscMalloc1(n,&osm->is_local));
800       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
801       if (!is) {
802         PetscCall(PetscMalloc1(osm->n_local_true,&osm->is));
803         for (i=0; i<osm->n_local_true; i++) {
804           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
805             PetscCall(ISDuplicate(osm->is_local[i],&osm->is[i]));
806             PetscCall(ISCopy(osm->is_local[i],osm->is[i]));
807           } else {
808             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
809             osm->is[i] = osm->is_local[i];
810           }
811         }
812       }
813     }
814   }
815   PetscFunctionReturn(0);
816 }
817 
818 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
819 {
820   PC_ASM         *osm = (PC_ASM*)pc->data;
821   PetscMPIInt    rank,size;
822   PetscInt       n;
823 
824   PetscFunctionBegin;
825   PetscCheck(N >= 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %" PetscInt_FMT,N);
826   PetscCheck(!is && !is_local,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
827 
828   /*
829      Split the subdomains equally among all processors
830   */
831   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank));
832   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
833   n    = N/size + ((N % size) > rank);
834   PetscCheck(n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT,(int)rank,(int)size,N);
835   PetscCheck(!pc->setupcalled || n == osm->n_local_true,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
836   if (!pc->setupcalled) {
837     PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
838 
839     osm->n_local_true = n;
840     osm->is           = NULL;
841     osm->is_local     = NULL;
842   }
843   PetscFunctionReturn(0);
844 }
845 
846 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
847 {
848   PC_ASM *osm = (PC_ASM*)pc->data;
849 
850   PetscFunctionBegin;
851   PetscCheck(ovl >= 0,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
852   PetscCheck(!pc->setupcalled || ovl == osm->overlap,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
853   if (!pc->setupcalled) osm->overlap = ovl;
854   PetscFunctionReturn(0);
855 }
856 
857 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
858 {
859   PC_ASM *osm = (PC_ASM*)pc->data;
860 
861   PetscFunctionBegin;
862   osm->type     = type;
863   osm->type_set = PETSC_TRUE;
864   PetscFunctionReturn(0);
865 }
866 
867 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
868 {
869   PC_ASM *osm = (PC_ASM*)pc->data;
870 
871   PetscFunctionBegin;
872   *type = osm->type;
873   PetscFunctionReturn(0);
874 }
875 
876 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
877 {
878   PC_ASM *osm = (PC_ASM *) pc->data;
879 
880   PetscFunctionBegin;
881   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
882   osm->loctype = type;
883   PetscFunctionReturn(0);
884 }
885 
886 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
887 {
888   PC_ASM *osm = (PC_ASM *) pc->data;
889 
890   PetscFunctionBegin;
891   *type = osm->loctype;
892   PetscFunctionReturn(0);
893 }
894 
895 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
896 {
897   PC_ASM *osm = (PC_ASM*)pc->data;
898 
899   PetscFunctionBegin;
900   osm->sort_indices = doSort;
901   PetscFunctionReturn(0);
902 }
903 
904 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
905 {
906   PC_ASM         *osm = (PC_ASM*)pc->data;
907 
908   PetscFunctionBegin;
909   PetscCheck(osm->n_local_true >= 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
910 
911   if (n_local) *n_local = osm->n_local_true;
912   if (first_local) {
913     PetscCallMPI(MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc)));
914     *first_local -= osm->n_local_true;
915   }
916   if (ksp) *ksp   = osm->ksp;
917   PetscFunctionReturn(0);
918 }
919 
920 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
921 {
922   PC_ASM         *osm = (PC_ASM*)pc->data;
923 
924   PetscFunctionBegin;
925   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
926   PetscValidPointer(sub_mat_type,2);
927   *sub_mat_type = osm->sub_mat_type;
928   PetscFunctionReturn(0);
929 }
930 
931 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
932 {
933   PC_ASM            *osm = (PC_ASM*)pc->data;
934 
935   PetscFunctionBegin;
936   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
937   PetscCall(PetscFree(osm->sub_mat_type));
938   PetscCall(PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type));
939   PetscFunctionReturn(0);
940 }
941 
942 /*@C
943     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
944 
945     Collective on pc
946 
947     Input Parameters:
948 +   pc - the preconditioner context
949 .   n - the number of subdomains for this processor (default value = 1)
950 .   is - the index set that defines the subdomains for this processor
951          (or NULL for PETSc to determine subdomains)
952 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
953          (or NULL to not provide these)
954 
955     Options Database Key:
956     To set the total number of subdomain blocks rather than specify the
957     index sets, use the option
958 .    -pc_asm_local_blocks <blks> - Sets local blocks
959 
960     Notes:
961     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
962 
963     By default the ASM preconditioner uses 1 block per processor.
964 
965     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
966 
967     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
968     back to form the global solution (this is the standard restricted additive Schwarz method)
969 
970     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
971     no code to handle that case.
972 
973     Level: advanced
974 
975 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
976           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`
977 @*/
978 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
979 {
980   PetscFunctionBegin;
981   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
982   PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));
983   PetscFunctionReturn(0);
984 }
985 
986 /*@C
987     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
988     additive Schwarz preconditioner.  Either all or no processors in the
989     PC communicator must call this routine, with the same index sets.
990 
991     Collective on pc
992 
993     Input Parameters:
994 +   pc - the preconditioner context
995 .   N  - the number of subdomains for all processors
996 .   is - the index sets that define the subdomains for all processors
997          (or NULL to ask PETSc to determine the subdomains)
998 -   is_local - the index sets that define the local part of the subdomains for this processor
999          (or NULL to not provide this information)
1000 
1001     Options Database Key:
1002     To set the total number of subdomain blocks rather than specify the
1003     index sets, use the option
1004 .    -pc_asm_blocks <blks> - Sets total blocks
1005 
1006     Notes:
1007     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
1008 
1009     By default the ASM preconditioner uses 1 block per processor.
1010 
1011     These index sets cannot be destroyed until after completion of the
1012     linear solves for which the ASM preconditioner is being used.
1013 
1014     Use PCASMSetLocalSubdomains() to set local subdomains.
1015 
1016     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
1017 
1018     Level: advanced
1019 
1020 .seealso: `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1021           `PCASMCreateSubdomains2D()`
1022 @*/
1023 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
1024 {
1025   PetscFunctionBegin;
1026   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1027   PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));
1028   PetscFunctionReturn(0);
1029 }
1030 
1031 /*@
1032     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1033     additive Schwarz preconditioner.  Either all or no processors in the
1034     PC communicator must call this routine.
1035 
1036     Logically Collective on pc
1037 
1038     Input Parameters:
1039 +   pc  - the preconditioner context
1040 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1041 
1042     Options Database Key:
1043 .   -pc_asm_overlap <ovl> - Sets overlap
1044 
1045     Notes:
1046     By default the ASM preconditioner uses 1 block per processor.  To use
1047     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1048     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1049 
1050     The overlap defaults to 1, so if one desires that no additional
1051     overlap be computed beyond what may have been set with a call to
1052     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1053     must be set to be 0.  In particular, if one does not explicitly set
1054     the subdomains an application code, then all overlap would be computed
1055     internally by PETSc, and using an overlap of 0 would result in an ASM
1056     variant that is equivalent to the block Jacobi preconditioner.
1057 
1058     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1059     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1060 
1061     Note that one can define initial index sets with any overlap via
1062     PCASMSetLocalSubdomains(); the routine
1063     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1064     if desired.
1065 
1066     Level: intermediate
1067 
1068 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1069           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`
1070 @*/
1071 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1072 {
1073   PetscFunctionBegin;
1074   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1075   PetscValidLogicalCollectiveInt(pc,ovl,2);
1076   PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
1077   PetscFunctionReturn(0);
1078 }
1079 
1080 /*@
1081     PCASMSetType - Sets the type of restriction and interpolation used
1082     for local problems in the additive Schwarz method.
1083 
1084     Logically Collective on pc
1085 
1086     Input Parameters:
1087 +   pc  - the preconditioner context
1088 -   type - variant of ASM, one of
1089 .vb
1090       PC_ASM_BASIC       - full interpolation and restriction
1091       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1092       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1093       PC_ASM_NONE        - local processor restriction and interpolation
1094 .ve
1095 
1096     Options Database Key:
1097 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1098 
1099     Notes:
1100     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1101     to limit the local processor interpolation
1102 
1103     Level: intermediate
1104 
1105 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1106           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1107 @*/
1108 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1109 {
1110   PetscFunctionBegin;
1111   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1112   PetscValidLogicalCollectiveEnum(pc,type,2);
1113   PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));
1114   PetscFunctionReturn(0);
1115 }
1116 
1117 /*@
1118     PCASMGetType - Gets the type of restriction and interpolation used
1119     for local problems in the additive Schwarz method.
1120 
1121     Logically Collective on pc
1122 
1123     Input Parameter:
1124 .   pc  - the preconditioner context
1125 
1126     Output Parameter:
1127 .   type - variant of ASM, one of
1128 
1129 .vb
1130       PC_ASM_BASIC       - full interpolation and restriction
1131       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1132       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1133       PC_ASM_NONE        - local processor restriction and interpolation
1134 .ve
1135 
1136     Options Database Key:
1137 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1138 
1139     Level: intermediate
1140 
1141 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1142           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1143 @*/
1144 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1145 {
1146   PetscFunctionBegin;
1147   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1148   PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));
1149   PetscFunctionReturn(0);
1150 }
1151 
1152 /*@
1153   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1154 
1155   Logically Collective on pc
1156 
1157   Input Parameters:
1158 + pc  - the preconditioner context
1159 - type - type of composition, one of
1160 .vb
1161   PC_COMPOSITE_ADDITIVE       - local additive combination
1162   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1163 .ve
1164 
1165   Options Database Key:
1166 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1167 
1168   Level: intermediate
1169 
1170 .seealso: `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASM`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1171 @*/
1172 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1173 {
1174   PetscFunctionBegin;
1175   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1176   PetscValidLogicalCollectiveEnum(pc, type, 2);
1177   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1178   PetscFunctionReturn(0);
1179 }
1180 
1181 /*@
1182   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1183 
1184   Logically Collective on pc
1185 
1186   Input Parameter:
1187 . pc  - the preconditioner context
1188 
1189   Output Parameter:
1190 . type - type of composition, one of
1191 .vb
1192   PC_COMPOSITE_ADDITIVE       - local additive combination
1193   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1194 .ve
1195 
1196   Options Database Key:
1197 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1198 
1199   Level: intermediate
1200 
1201 .seealso: `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1202 @*/
1203 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1204 {
1205   PetscFunctionBegin;
1206   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1207   PetscValidPointer(type, 2);
1208   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1209   PetscFunctionReturn(0);
1210 }
1211 
1212 /*@
1213     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1214 
1215     Logically Collective on pc
1216 
1217     Input Parameters:
1218 +   pc  - the preconditioner context
1219 -   doSort - sort the subdomain indices
1220 
1221     Level: intermediate
1222 
1223 .seealso: `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1224           `PCASMCreateSubdomains2D()`
1225 @*/
1226 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1227 {
1228   PetscFunctionBegin;
1229   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1230   PetscValidLogicalCollectiveBool(pc,doSort,2);
1231   PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
1232   PetscFunctionReturn(0);
1233 }
1234 
1235 /*@C
1236    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1237    this processor.
1238 
1239    Collective on pc iff first_local is requested
1240 
1241    Input Parameter:
1242 .  pc - the preconditioner context
1243 
1244    Output Parameters:
1245 +  n_local - the number of blocks on this processor or NULL
1246 .  first_local - the global number of the first block on this processor or NULL,
1247                  all processors must request or all must pass NULL
1248 -  ksp - the array of KSP contexts
1249 
1250    Note:
1251    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1252 
1253    You must call KSPSetUp() before calling PCASMGetSubKSP().
1254 
1255    Fortran note:
1256    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1257 
1258    Level: advanced
1259 
1260 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1261           `PCASMCreateSubdomains2D()`,
1262 @*/
1263 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1264 {
1265   PetscFunctionBegin;
1266   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1267   PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
1268   PetscFunctionReturn(0);
1269 }
1270 
1271 /* -------------------------------------------------------------------------------------*/
1272 /*MC
1273    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1274            its own KSP object.
1275 
1276    Options Database Keys:
1277 +  -pc_asm_blocks <blks> - Sets total blocks
1278 .  -pc_asm_overlap <ovl> - Sets overlap
1279 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1280 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1281 
1282      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1283       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1284       -pc_asm_type basic to use the standard ASM.
1285 
1286    Notes:
1287     Each processor can have one or more blocks, but a block cannot be shared by more
1288      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1289 
1290      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1291         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1292 
1293      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1294          and set the options directly on the resulting KSP object (you can access its PC
1295          with KSPGetPC())
1296 
1297    Level: beginner
1298 
1299     References:
1300 +   * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1301      Courant Institute, New York University Technical report
1302 -   * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1303     Cambridge University Press.
1304 
1305 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`,
1306           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1307           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1308 
1309 M*/
1310 
1311 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1312 {
1313   PC_ASM         *osm;
1314 
1315   PetscFunctionBegin;
1316   PetscCall(PetscNewLog(pc,&osm));
1317 
1318   osm->n                 = PETSC_DECIDE;
1319   osm->n_local           = 0;
1320   osm->n_local_true      = PETSC_DECIDE;
1321   osm->overlap           = 1;
1322   osm->ksp               = NULL;
1323   osm->restriction       = NULL;
1324   osm->lprolongation     = NULL;
1325   osm->lrestriction      = NULL;
1326   osm->x                 = NULL;
1327   osm->y                 = NULL;
1328   osm->is                = NULL;
1329   osm->is_local          = NULL;
1330   osm->mat               = NULL;
1331   osm->pmat              = NULL;
1332   osm->type              = PC_ASM_RESTRICT;
1333   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1334   osm->sort_indices      = PETSC_TRUE;
1335   osm->dm_subdomains     = PETSC_FALSE;
1336   osm->sub_mat_type      = NULL;
1337 
1338   pc->data                 = (void*)osm;
1339   pc->ops->apply           = PCApply_ASM;
1340   pc->ops->matapply        = PCMatApply_ASM;
1341   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1342   pc->ops->setup           = PCSetUp_ASM;
1343   pc->ops->reset           = PCReset_ASM;
1344   pc->ops->destroy         = PCDestroy_ASM;
1345   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1346   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1347   pc->ops->view            = PCView_ASM;
1348   pc->ops->applyrichardson = NULL;
1349 
1350   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM));
1351   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM));
1352   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM));
1353   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM));
1354   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM));
1355   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM));
1356   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM));
1357   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM));
1358   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM));
1359   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM));
1360   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM));
1361   PetscFunctionReturn(0);
1362 }
1363 
1364 /*@C
1365    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1366    preconditioner for any problem on a general grid.
1367 
1368    Collective
1369 
1370    Input Parameters:
1371 +  A - The global matrix operator
1372 -  n - the number of local blocks
1373 
1374    Output Parameters:
1375 .  outis - the array of index sets defining the subdomains
1376 
1377    Level: advanced
1378 
1379    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1380     from these if you use PCASMSetLocalSubdomains()
1381 
1382     In the Fortran version you must provide the array outis[] already allocated of length n.
1383 
1384 .seealso: `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1385 @*/
1386 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1387 {
1388   MatPartitioning mpart;
1389   const char      *prefix;
1390   PetscInt        i,j,rstart,rend,bs;
1391   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1392   Mat             Ad     = NULL, adj;
1393   IS              ispart,isnumb,*is;
1394 
1395   PetscFunctionBegin;
1396   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1397   PetscValidPointer(outis,3);
1398   PetscCheck(n >= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %" PetscInt_FMT,n);
1399 
1400   /* Get prefix, row distribution, and block size */
1401   PetscCall(MatGetOptionsPrefix(A,&prefix));
1402   PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
1403   PetscCall(MatGetBlockSize(A,&bs));
1404   PetscCheck(rstart/bs*bs == rstart && rend/bs*bs == rend,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT,rstart,rend,bs);
1405 
1406   /* Get diagonal block from matrix if possible */
1407   PetscCall(MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop));
1408   if (hasop) {
1409     PetscCall(MatGetDiagonalBlock(A,&Ad));
1410   }
1411   if (Ad) {
1412     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij));
1413     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij));
1414   }
1415   if (Ad && n > 1) {
1416     PetscBool match,done;
1417     /* Try to setup a good matrix partitioning if available */
1418     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF,&mpart));
1419     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix));
1420     PetscCall(MatPartitioningSetFromOptions(mpart));
1421     PetscCall(PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match));
1422     if (!match) {
1423       PetscCall(PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match));
1424     }
1425     if (!match) { /* assume a "good" partitioner is available */
1426       PetscInt       na;
1427       const PetscInt *ia,*ja;
1428       PetscCall(MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done));
1429       if (done) {
1430         /* Build adjacency matrix by hand. Unfortunately a call to
1431            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1432            remove the block-aij structure and we cannot expect
1433            MatPartitioning to split vertices as we need */
1434         PetscInt       i,j,len,nnz,cnt,*iia=NULL,*jja=NULL;
1435         const PetscInt *row;
1436         nnz = 0;
1437         for (i=0; i<na; i++) { /* count number of nonzeros */
1438           len = ia[i+1] - ia[i];
1439           row = ja + ia[i];
1440           for (j=0; j<len; j++) {
1441             if (row[j] == i) { /* don't count diagonal */
1442               len--; break;
1443             }
1444           }
1445           nnz += len;
1446         }
1447         PetscCall(PetscMalloc1(na+1,&iia));
1448         PetscCall(PetscMalloc1(nnz,&jja));
1449         nnz    = 0;
1450         iia[0] = 0;
1451         for (i=0; i<na; i++) { /* fill adjacency */
1452           cnt = 0;
1453           len = ia[i+1] - ia[i];
1454           row = ja + ia[i];
1455           for (j=0; j<len; j++) {
1456             if (row[j] != i) { /* if not diagonal */
1457               jja[nnz+cnt++] = row[j];
1458             }
1459           }
1460           nnz     += cnt;
1461           iia[i+1] = nnz;
1462         }
1463         /* Partitioning of the adjacency matrix */
1464         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj));
1465         PetscCall(MatPartitioningSetAdjacency(mpart,adj));
1466         PetscCall(MatPartitioningSetNParts(mpart,n));
1467         PetscCall(MatPartitioningApply(mpart,&ispart));
1468         PetscCall(ISPartitioningToNumbering(ispart,&isnumb));
1469         PetscCall(MatDestroy(&adj));
1470         foundpart = PETSC_TRUE;
1471       }
1472       PetscCall(MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done));
1473     }
1474     PetscCall(MatPartitioningDestroy(&mpart));
1475   }
1476 
1477   PetscCall(PetscMalloc1(n,&is));
1478   *outis = is;
1479 
1480   if (!foundpart) {
1481 
1482     /* Partitioning by contiguous chunks of rows */
1483 
1484     PetscInt mbs   = (rend-rstart)/bs;
1485     PetscInt start = rstart;
1486     for (i=0; i<n; i++) {
1487       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1488       PetscCall(ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]));
1489       start += count;
1490     }
1491 
1492   } else {
1493 
1494     /* Partitioning by adjacency of diagonal block  */
1495 
1496     const PetscInt *numbering;
1497     PetscInt       *count,nidx,*indices,*newidx,start=0;
1498     /* Get node count in each partition */
1499     PetscCall(PetscMalloc1(n,&count));
1500     PetscCall(ISPartitioningCount(ispart,n,count));
1501     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1502       for (i=0; i<n; i++) count[i] *= bs;
1503     }
1504     /* Build indices from node numbering */
1505     PetscCall(ISGetLocalSize(isnumb,&nidx));
1506     PetscCall(PetscMalloc1(nidx,&indices));
1507     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1508     PetscCall(ISGetIndices(isnumb,&numbering));
1509     PetscCall(PetscSortIntWithPermutation(nidx,numbering,indices));
1510     PetscCall(ISRestoreIndices(isnumb,&numbering));
1511     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1512       PetscCall(PetscMalloc1(nidx*bs,&newidx));
1513       for (i=0; i<nidx; i++) {
1514         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1515       }
1516       PetscCall(PetscFree(indices));
1517       nidx   *= bs;
1518       indices = newidx;
1519     }
1520     /* Shift to get global indices */
1521     for (i=0; i<nidx; i++) indices[i] += rstart;
1522 
1523     /* Build the index sets for each block */
1524     for (i=0; i<n; i++) {
1525       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]));
1526       PetscCall(ISSort(is[i]));
1527       start += count[i];
1528     }
1529 
1530     PetscCall(PetscFree(count));
1531     PetscCall(PetscFree(indices));
1532     PetscCall(ISDestroy(&isnumb));
1533     PetscCall(ISDestroy(&ispart));
1534 
1535   }
1536   PetscFunctionReturn(0);
1537 }
1538 
1539 /*@C
1540    PCASMDestroySubdomains - Destroys the index sets created with
1541    PCASMCreateSubdomains(). Should be called after setting subdomains
1542    with PCASMSetLocalSubdomains().
1543 
1544    Collective
1545 
1546    Input Parameters:
1547 +  n - the number of index sets
1548 .  is - the array of index sets
1549 -  is_local - the array of local index sets, can be NULL
1550 
1551    Level: advanced
1552 
1553 .seealso: `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1554 @*/
1555 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1556 {
1557   PetscInt       i;
1558 
1559   PetscFunctionBegin;
1560   if (n <= 0) PetscFunctionReturn(0);
1561   if (is) {
1562     PetscValidPointer(is,2);
1563     for (i=0; i<n; i++) PetscCall(ISDestroy(&is[i]));
1564     PetscCall(PetscFree(is));
1565   }
1566   if (is_local) {
1567     PetscValidPointer(is_local,3);
1568     for (i=0; i<n; i++) PetscCall(ISDestroy(&is_local[i]));
1569     PetscCall(PetscFree(is_local));
1570   }
1571   PetscFunctionReturn(0);
1572 }
1573 
1574 /*@
1575    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1576    preconditioner for a two-dimensional problem on a regular grid.
1577 
1578    Not Collective
1579 
1580    Input Parameters:
1581 +  m   - the number of mesh points in the x direction
1582 .  n   - the number of mesh points in the y direction
1583 .  M   - the number of subdomains in the x direction
1584 .  N   - the number of subdomains in the y direction
1585 .  dof - degrees of freedom per node
1586 -  overlap - overlap in mesh lines
1587 
1588    Output Parameters:
1589 +  Nsub - the number of subdomains created
1590 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1591 -  is_local - array of index sets defining non-overlapping subdomains
1592 
1593    Note:
1594    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1595    preconditioners.  More general related routines are
1596    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1597 
1598    Level: advanced
1599 
1600 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1601           `PCASMSetOverlap()`
1602 @*/
1603 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1604 {
1605   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1606   PetscInt       nidx,*idx,loc,ii,jj,count;
1607 
1608   PetscFunctionBegin;
1609   PetscCheck(dof == 1,PETSC_COMM_SELF,PETSC_ERR_SUP,"dof must be 1");
1610 
1611   *Nsub     = N*M;
1612   PetscCall(PetscMalloc1(*Nsub,is));
1613   PetscCall(PetscMalloc1(*Nsub,is_local));
1614   ystart    = 0;
1615   loc_outer = 0;
1616   for (i=0; i<N; i++) {
1617     height = n/N + ((n % N) > i); /* height of subdomain */
1618     PetscCheck(height >= 2,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1619     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1620     yright = ystart + height + overlap; if (yright > n) yright = n;
1621     xstart = 0;
1622     for (j=0; j<M; j++) {
1623       width = m/M + ((m % M) > j); /* width of subdomain */
1624       PetscCheck(width >= 2,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1625       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1626       xright = xstart + width + overlap; if (xright > m) xright = m;
1627       nidx   = (xright - xleft)*(yright - yleft);
1628       PetscCall(PetscMalloc1(nidx,&idx));
1629       loc    = 0;
1630       for (ii=yleft; ii<yright; ii++) {
1631         count = m*ii + xleft;
1632         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1633       }
1634       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer));
1635       if (overlap == 0) {
1636         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1637 
1638         (*is_local)[loc_outer] = (*is)[loc_outer];
1639       } else {
1640         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1641           for (jj=xstart; jj<xstart+width; jj++) {
1642             idx[loc++] = m*ii + jj;
1643           }
1644         }
1645         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer));
1646       }
1647       PetscCall(PetscFree(idx));
1648       xstart += width;
1649       loc_outer++;
1650     }
1651     ystart += height;
1652   }
1653   for (i=0; i<*Nsub; i++) PetscCall(ISSort((*is)[i]));
1654   PetscFunctionReturn(0);
1655 }
1656 
1657 /*@C
1658     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1659     only) for the additive Schwarz preconditioner.
1660 
1661     Not Collective
1662 
1663     Input Parameter:
1664 .   pc - the preconditioner context
1665 
1666     Output Parameters:
1667 +   n - if requested, the number of subdomains for this processor (default value = 1)
1668 .   is - if requested, the index sets that define the subdomains for this processor
1669 -   is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be NULL)
1670 
1671     Notes:
1672     The IS numbering is in the parallel, global numbering of the vector.
1673 
1674     Level: advanced
1675 
1676 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1677           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1678 @*/
1679 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1680 {
1681   PC_ASM         *osm = (PC_ASM*)pc->data;
1682   PetscBool      match;
1683 
1684   PetscFunctionBegin;
1685   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1686   if (n) PetscValidIntPointer(n,2);
1687   if (is) PetscValidPointer(is,3);
1688   if (is_local) PetscValidPointer(is_local,4);
1689   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1690   PetscCheck(match,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1691   if (n) *n = osm->n_local_true;
1692   if (is) *is = osm->is;
1693   if (is_local) *is_local = osm->is_local;
1694   PetscFunctionReturn(0);
1695 }
1696 
1697 /*@C
1698     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1699     only) for the additive Schwarz preconditioner.
1700 
1701     Not Collective
1702 
1703     Input Parameter:
1704 .   pc - the preconditioner context
1705 
1706     Output Parameters:
1707 +   n - if requested, the number of matrices for this processor (default value = 1)
1708 -   mat - if requested, the matrices
1709 
1710     Level: advanced
1711 
1712     Notes:
1713     Call after PCSetUp() (or KSPSetUp()) but before PCApply() and before PCSetUpOnBlocks())
1714 
1715            Usually one would use PCSetModifySubMatrices() to change the submatrices in building the preconditioner.
1716 
1717 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1718           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1719 @*/
1720 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1721 {
1722   PC_ASM         *osm;
1723   PetscBool      match;
1724 
1725   PetscFunctionBegin;
1726   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1727   if (n) PetscValidIntPointer(n,2);
1728   if (mat) PetscValidPointer(mat,3);
1729   PetscCheck(pc->setupcalled,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUp() or PCSetUp().");
1730   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1731   if (!match) {
1732     if (n) *n = 0;
1733     if (mat) *mat = NULL;
1734   } else {
1735     osm = (PC_ASM*)pc->data;
1736     if (n) *n = osm->n_local_true;
1737     if (mat) *mat = osm->pmat;
1738   }
1739   PetscFunctionReturn(0);
1740 }
1741 
1742 /*@
1743     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1744 
1745     Logically Collective
1746 
1747     Input Parameters:
1748 +   pc  - the preconditioner
1749 -   flg - boolean indicating whether to use subdomains defined by the DM
1750 
1751     Options Database Key:
1752 .   -pc_asm_dm_subdomains <bool> - use subdomains defined by the DM
1753 
1754     Level: intermediate
1755 
1756     Notes:
1757     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1758     so setting either of the first two effectively turns the latter off.
1759 
1760 .seealso: `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1761           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1762 @*/
1763 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1764 {
1765   PC_ASM         *osm = (PC_ASM*)pc->data;
1766   PetscBool      match;
1767 
1768   PetscFunctionBegin;
1769   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1770   PetscValidLogicalCollectiveBool(pc,flg,2);
1771   PetscCheck(!pc->setupcalled,((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1772   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1773   if (match) {
1774     osm->dm_subdomains = flg;
1775   }
1776   PetscFunctionReturn(0);
1777 }
1778 
1779 /*@
1780     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1781     Not Collective
1782 
1783     Input Parameter:
1784 .   pc  - the preconditioner
1785 
1786     Output Parameter:
1787 .   flg - boolean indicating whether to use subdomains defined by the DM
1788 
1789     Level: intermediate
1790 
1791 .seealso: `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1792           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1793 @*/
1794 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1795 {
1796   PC_ASM         *osm = (PC_ASM*)pc->data;
1797   PetscBool      match;
1798 
1799   PetscFunctionBegin;
1800   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1801   PetscValidBoolPointer(flg,2);
1802   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1803   if (match) *flg = osm->dm_subdomains;
1804   else *flg = PETSC_FALSE;
1805   PetscFunctionReturn(0);
1806 }
1807 
1808 /*@
1809      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1810 
1811    Not Collective
1812 
1813    Input Parameter:
1814 .  pc - the PC
1815 
1816    Output Parameter:
1817 .  -pc_asm_sub_mat_type - name of matrix type
1818 
1819    Level: advanced
1820 
1821 .seealso: `PCASMSetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1822 @*/
1823 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type)
1824 {
1825   PetscFunctionBegin;
1826   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1827   PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));
1828   PetscFunctionReturn(0);
1829 }
1830 
1831 /*@
1832      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1833 
1834    Collective on Mat
1835 
1836    Input Parameters:
1837 +  pc             - the PC object
1838 -  sub_mat_type   - matrix type
1839 
1840    Options Database Key:
1841 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1842 
1843    Notes:
1844    See "${PETSC_DIR}/include/petscmat.h" for available types
1845 
1846   Level: advanced
1847 
1848 .seealso: `PCASMGetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1849 @*/
1850 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1851 {
1852   PetscFunctionBegin;
1853   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1854   PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));
1855   PetscFunctionReturn(0);
1856 }
1857