xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 #include "petsc/private/matimpl.h"
15 
16 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
17 {
18   PC_ASM            *osm = (PC_ASM*)pc->data;
19   PetscMPIInt       rank;
20   PetscInt          i,bsz;
21   PetscBool         iascii,isstring;
22   PetscViewer       sviewer;
23   PetscViewerFormat format;
24   const char        *prefix;
25 
26   PetscFunctionBegin;
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
28   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
29   if (iascii) {
30     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
31     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %" PetscInt_FMT,osm->overlap));
32     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %" PetscInt_FMT,osm->n));
33     PetscCall(PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps));
34     PetscCall(PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]));
35     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n"));
36     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]));
37     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank));
38     PetscCall(PetscViewerGetFormat(viewer,&format));
39     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
40       if (osm->ksp) {
41         PetscCall(PetscViewerASCIIPrintf(viewer,"  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
42         PetscCall(PCGetOptionsPrefix(pc,&prefix));
43         PetscCall(PetscViewerASCIIPrintf(viewer,"  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n",prefix?prefix:""));
44         PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
45         if (rank == 0) {
46           PetscCall(PetscViewerASCIIPushTab(viewer));
47           PetscCall(KSPView(osm->ksp[0],sviewer));
48           PetscCall(PetscViewerASCIIPopTab(viewer));
49         }
50         PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
51       }
52     } else {
53       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
54       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %" PetscInt_FMT "\n",(int)rank,osm->n_local_true));
55       PetscCall(PetscViewerFlush(viewer));
56       PetscCall(PetscViewerASCIIPrintf(viewer,"  Local solver information for each block is in the following KSP and PC objects:\n"));
57       PetscCall(PetscViewerASCIIPushTab(viewer));
58       PetscCall(PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n"));
59       PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
60       for (i=0; i<osm->n_local_true; i++) {
61         PetscCall(ISGetLocalSize(osm->is[i],&bsz));
62         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n",(int)rank,i,bsz));
63         PetscCall(KSPView(osm->ksp[i],sviewer));
64         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n"));
65       }
66       PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
67       PetscCall(PetscViewerASCIIPopTab(viewer));
68       PetscCall(PetscViewerFlush(viewer));
69       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
70     }
71   } else if (isstring) {
72     PetscCall(PetscViewerStringSPrintf(viewer," blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]));
73     PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
74     if (osm->ksp) PetscCall(KSPView(osm->ksp[0],sviewer));
75     PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 static PetscErrorCode PCASMPrintSubdomains(PC pc)
81 {
82   PC_ASM         *osm = (PC_ASM*)pc->data;
83   const char     *prefix;
84   char           fname[PETSC_MAX_PATH_LEN+1];
85   PetscViewer    viewer, sviewer;
86   char           *s;
87   PetscInt       i,j,nidx;
88   const PetscInt *idx;
89   PetscMPIInt    rank, size;
90 
91   PetscFunctionBegin;
92   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
93   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
94   PetscCall(PCGetOptionsPrefix(pc,&prefix));
95   PetscCall(PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,sizeof(fname),NULL));
96   if (fname[0] == 0) PetscCall(PetscStrcpy(fname,"stdout"));
97   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer));
98   for (i=0; i<osm->n_local; i++) {
99     if (i < osm->n_local_true) {
100       PetscCall(ISGetLocalSize(osm->is[i],&nidx));
101       PetscCall(ISGetIndices(osm->is[i],&idx));
102       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
103 #define len  16*(nidx+1)+512
104       PetscCall(PetscMalloc1(len,&s));
105       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
106 #undef len
107       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
108       for (j=0; j<nidx; j++) {
109         PetscCall(PetscViewerStringSPrintf(sviewer,"%" PetscInt_FMT " ",idx[j]));
110       }
111       PetscCall(ISRestoreIndices(osm->is[i],&idx));
112       PetscCall(PetscViewerStringSPrintf(sviewer,"\n"));
113       PetscCall(PetscViewerDestroy(&sviewer));
114       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
115       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
116       PetscCall(PetscViewerFlush(viewer));
117       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
118       PetscCall(PetscFree(s));
119       if (osm->is_local) {
120         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
121 #define len  16*(nidx+1)+512
122         PetscCall(PetscMalloc1(len, &s));
123         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
124 #undef len
125         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
126         PetscCall(ISGetLocalSize(osm->is_local[i],&nidx));
127         PetscCall(ISGetIndices(osm->is_local[i],&idx));
128         for (j=0; j<nidx; j++) {
129           PetscCall(PetscViewerStringSPrintf(sviewer,"%" PetscInt_FMT " ",idx[j]));
130         }
131         PetscCall(ISRestoreIndices(osm->is_local[i],&idx));
132         PetscCall(PetscViewerStringSPrintf(sviewer,"\n"));
133         PetscCall(PetscViewerDestroy(&sviewer));
134         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
135         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
136         PetscCall(PetscViewerFlush(viewer));
137         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
138         PetscCall(PetscFree(s));
139       }
140     } else {
141       /* Participate in collective viewer calls. */
142       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
143       PetscCall(PetscViewerFlush(viewer));
144       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
145       /* Assume either all ranks have is_local or none do. */
146       if (osm->is_local) {
147         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
148         PetscCall(PetscViewerFlush(viewer));
149         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
150       }
151     }
152   }
153   PetscCall(PetscViewerFlush(viewer));
154   PetscCall(PetscViewerDestroy(&viewer));
155   PetscFunctionReturn(0);
156 }
157 
158 static PetscErrorCode PCSetUp_ASM(PC pc)
159 {
160   PC_ASM         *osm = (PC_ASM*)pc->data;
161   PetscBool      flg;
162   PetscInt       i,m,m_local;
163   MatReuse       scall = MAT_REUSE_MATRIX;
164   IS             isl;
165   KSP            ksp;
166   PC             subpc;
167   const char     *prefix,*pprefix;
168   Vec            vec;
169   DM             *domain_dm = NULL;
170 
171   PetscFunctionBegin;
172   if (!pc->setupcalled) {
173     PetscInt m;
174 
175     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
176     if (osm->n_local_true == PETSC_DECIDE) {
177       /* no subdomains given */
178       /* try pc->dm first, if allowed */
179       if (osm->dm_subdomains && pc->dm) {
180         PetscInt  num_domains, d;
181         char      **domain_names;
182         IS        *inner_domain_is, *outer_domain_is;
183         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
184         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
185                               A future improvement of this code might allow one to use
186                               DM-defined subdomains and also increase the overlap,
187                               but that is not currently supported */
188         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
189         for (d = 0; d < num_domains; ++d) {
190           if (domain_names)    PetscCall(PetscFree(domain_names[d]));
191           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
192           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
193         }
194         PetscCall(PetscFree(domain_names));
195         PetscCall(PetscFree(inner_domain_is));
196         PetscCall(PetscFree(outer_domain_is));
197       }
198       if (osm->n_local_true == PETSC_DECIDE) {
199         /* still no subdomains; use one subdomain per processor */
200         osm->n_local_true = 1;
201       }
202     }
203     { /* determine the global and max number of subdomains */
204       struct {PetscInt max,sum;} inwork,outwork;
205       PetscMPIInt size;
206 
207       inwork.max   = osm->n_local_true;
208       inwork.sum   = osm->n_local_true;
209       PetscCall(MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc)));
210       osm->n_local = outwork.max;
211       osm->n       = outwork.sum;
212 
213       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
214       if (outwork.max == 1 && outwork.sum == size) {
215         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
216         PetscCall(MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE));
217       }
218     }
219     if (!osm->is) { /* create the index sets */
220       PetscCall(PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is));
221     }
222     if (osm->n_local_true > 1 && !osm->is_local) {
223       PetscCall(PetscMalloc1(osm->n_local_true,&osm->is_local));
224       for (i=0; i<osm->n_local_true; i++) {
225         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
226           PetscCall(ISDuplicate(osm->is[i],&osm->is_local[i]));
227           PetscCall(ISCopy(osm->is[i],osm->is_local[i]));
228         } else {
229           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
230           osm->is_local[i] = osm->is[i];
231         }
232       }
233     }
234     PetscCall(PCGetOptionsPrefix(pc,&prefix));
235     if (osm->overlap > 0) {
236       /* Extend the "overlapping" regions by a number of steps */
237       PetscCall(MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap));
238     }
239     if (osm->sort_indices) {
240       for (i=0; i<osm->n_local_true; i++) {
241         PetscCall(ISSort(osm->is[i]));
242         if (osm->is_local) {
243           PetscCall(ISSort(osm->is_local[i]));
244         }
245       }
246     }
247     flg = PETSC_FALSE;
248     PetscCall(PetscOptionsHasName(NULL,prefix,"-pc_asm_print_subdomains",&flg));
249     if (flg) PetscCall(PCASMPrintSubdomains(pc));
250     if (!osm->ksp) {
251       /* Create the local solvers */
252       PetscCall(PetscMalloc1(osm->n_local_true,&osm->ksp));
253       if (domain_dm) {
254         PetscCall(PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n"));
255       }
256       for (i=0; i<osm->n_local_true; i++) {
257         PetscCall(KSPCreate(PETSC_COMM_SELF,&ksp));
258         PetscCall(KSPSetErrorIfNotConverged(ksp,pc->erroriffailure));
259         PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp));
260         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1));
261         PetscCall(KSPSetType(ksp,KSPPREONLY));
262         PetscCall(KSPGetPC(ksp,&subpc));
263         PetscCall(PCGetOptionsPrefix(pc,&prefix));
264         PetscCall(KSPSetOptionsPrefix(ksp,prefix));
265         PetscCall(KSPAppendOptionsPrefix(ksp,"sub_"));
266         if (domain_dm) {
267           PetscCall(KSPSetDM(ksp, domain_dm[i]));
268           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
269           PetscCall(DMDestroy(&domain_dm[i]));
270         }
271         osm->ksp[i] = ksp;
272       }
273       if (domain_dm) PetscCall(PetscFree(domain_dm));
274     }
275 
276     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
277     PetscCall(ISSortRemoveDups(osm->lis));
278     PetscCall(ISGetLocalSize(osm->lis, &m));
279 
280     scall = MAT_INITIAL_MATRIX;
281   } else {
282     /*
283        Destroy the blocks from the previous iteration
284     */
285     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
286       PetscCall(MatDestroyMatrices(osm->n_local_true,&osm->pmat));
287       scall = MAT_INITIAL_MATRIX;
288     }
289   }
290 
291   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
292   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
293     if (osm->n_local_true > 0) {
294       PetscCall(MatDestroySubMatrices(osm->n_local_true,&osm->pmat));
295     }
296     scall = MAT_INITIAL_MATRIX;
297   }
298 
299   /*
300      Extract out the submatrices
301   */
302   PetscCall(MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat));
303   if (scall == MAT_INITIAL_MATRIX) {
304     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix));
305     for (i=0; i<osm->n_local_true; i++) {
306       PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]));
307       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix));
308     }
309   }
310 
311   /* Convert the types of the submatrices (if needbe) */
312   if (osm->sub_mat_type) {
313     for (i=0; i<osm->n_local_true; i++) {
314       PetscCall(MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i])));
315     }
316   }
317 
318   if (!pc->setupcalled) {
319     VecType vtype;
320 
321     /* Create the local work vectors (from the local matrices) and scatter contexts */
322     PetscCall(MatCreateVecs(pc->pmat,&vec,NULL));
323 
324     PetscCheck(!osm->is_local || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE),PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
325     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
326       PetscCall(PetscMalloc1(osm->n_local_true,&osm->lprolongation));
327     }
328     PetscCall(PetscMalloc1(osm->n_local_true,&osm->lrestriction));
329     PetscCall(PetscMalloc1(osm->n_local_true,&osm->x));
330     PetscCall(PetscMalloc1(osm->n_local_true,&osm->y));
331 
332     PetscCall(ISGetLocalSize(osm->lis,&m));
333     PetscCall(ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl));
334     PetscCall(MatGetVecType(osm->pmat[0],&vtype));
335     PetscCall(VecCreate(PETSC_COMM_SELF,&osm->lx));
336     PetscCall(VecSetSizes(osm->lx,m,m));
337     PetscCall(VecSetType(osm->lx,vtype));
338     PetscCall(VecDuplicate(osm->lx, &osm->ly));
339     PetscCall(VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction));
340     PetscCall(ISDestroy(&isl));
341 
342     for (i=0; i<osm->n_local_true; ++i) {
343       ISLocalToGlobalMapping ltog;
344       IS                     isll;
345       const PetscInt         *idx_is;
346       PetscInt               *idx_lis,nout;
347 
348       PetscCall(ISGetLocalSize(osm->is[i],&m));
349       PetscCall(MatCreateVecs(osm->pmat[i],&osm->x[i],NULL));
350       PetscCall(VecDuplicate(osm->x[i],&osm->y[i]));
351 
352       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
353       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis,&ltog));
354       PetscCall(ISGetLocalSize(osm->is[i],&m));
355       PetscCall(ISGetIndices(osm->is[i], &idx_is));
356       PetscCall(PetscMalloc1(m,&idx_lis));
357       PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis));
358       PetscCheck(nout == m,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
359       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
360       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll));
361       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
362       PetscCall(ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl));
363       PetscCall(VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]));
364       PetscCall(ISDestroy(&isll));
365       PetscCall(ISDestroy(&isl));
366       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
367         ISLocalToGlobalMapping ltog;
368         IS                     isll,isll_local;
369         const PetscInt         *idx_local;
370         PetscInt               *idx1, *idx2, nout;
371 
372         PetscCall(ISGetLocalSize(osm->is_local[i],&m_local));
373         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
374 
375         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog));
376         PetscCall(PetscMalloc1(m_local,&idx1));
377         PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1));
378         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
379         PetscCheck(nout == m_local,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
380         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll));
381 
382         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis,&ltog));
383         PetscCall(PetscMalloc1(m_local,&idx2));
384         PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2));
385         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
386         PetscCheck(nout == m_local,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
387         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local));
388 
389         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
390         PetscCall(VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]));
391 
392         PetscCall(ISDestroy(&isll));
393         PetscCall(ISDestroy(&isll_local));
394       }
395     }
396     PetscCall(VecDestroy(&vec));
397   }
398 
399   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
400     IS      *cis;
401     PetscInt c;
402 
403     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
404     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
405     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
406     PetscCall(PetscFree(cis));
407   }
408 
409   /* Return control to the user so that the submatrices can be modified (e.g., to apply
410      different boundary conditions for the submatrices than for the global problem) */
411   PetscCall(PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP));
412 
413   /*
414      Loop over subdomains putting them into local ksp
415   */
416   PetscCall(KSPGetOptionsPrefix(osm->ksp[0],&prefix));
417   for (i=0; i<osm->n_local_true; i++) {
418     PetscCall(KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]));
419     PetscCall(MatSetOptionsPrefix(osm->pmat[i],prefix));
420     if (!pc->setupcalled) {
421       PetscCall(KSPSetFromOptions(osm->ksp[i]));
422     }
423   }
424   PetscFunctionReturn(0);
425 }
426 
427 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
428 {
429   PC_ASM             *osm = (PC_ASM*)pc->data;
430   PetscInt           i;
431   KSPConvergedReason reason;
432 
433   PetscFunctionBegin;
434   for (i=0; i<osm->n_local_true; i++) {
435     PetscCall(KSPSetUp(osm->ksp[i]));
436     PetscCall(KSPGetConvergedReason(osm->ksp[i],&reason));
437     if (reason == KSP_DIVERGED_PC_FAILED) {
438       pc->failedreason = PC_SUBPC_ERROR;
439     }
440   }
441   PetscFunctionReturn(0);
442 }
443 
444 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
445 {
446   PC_ASM         *osm = (PC_ASM*)pc->data;
447   PetscInt       i,n_local_true = osm->n_local_true;
448   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
449 
450   PetscFunctionBegin;
451   /*
452      support for limiting the restriction or interpolation to only local
453      subdomain values (leaving the other values 0).
454   */
455   if (!(osm->type & PC_ASM_RESTRICT)) {
456     forward = SCATTER_FORWARD_LOCAL;
457     /* have to zero the work RHS since scatter may leave some slots empty */
458     PetscCall(VecSet(osm->lx, 0.0));
459   }
460   if (!(osm->type & PC_ASM_INTERPOLATE)) {
461     reverse = SCATTER_REVERSE_LOCAL;
462   }
463 
464   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
465     /* zero the global and the local solutions */
466     PetscCall(VecSet(y, 0.0));
467     PetscCall(VecSet(osm->ly, 0.0));
468 
469     /* copy the global RHS to local RHS including the ghost nodes */
470     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
471     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
472 
473     /* restrict local RHS to the overlapping 0-block RHS */
474     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
475     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
476 
477     /* do the local solves */
478     for (i = 0; i < n_local_true; ++i) {
479 
480       /* solve the overlapping i-block */
481       PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i],0));
482       PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
483       PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
484       PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
485 
486       if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
487         PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
488         PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
489       } else { /* interpolate the overlapping i-block solution to the local solution */
490         PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
491         PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
492       }
493 
494       if (i < n_local_true-1) {
495         /* restrict local RHS to the overlapping (i+1)-block RHS */
496         PetscCall(VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
497         PetscCall(VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
498 
499         if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
500           /* update the overlapping (i+1)-block RHS using the current local solution */
501           PetscCall(MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]));
502           PetscCall(VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]));
503         }
504       }
505     }
506     /* add the local solution to the global solution including the ghost nodes */
507     PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
508     PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
509   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
510   PetscFunctionReturn(0);
511 }
512 
513 static PetscErrorCode PCMatApply_ASM(PC pc,Mat X,Mat Y)
514 {
515   PC_ASM         *osm = (PC_ASM*)pc->data;
516   Mat            Z,W;
517   Vec            x;
518   PetscInt       i,m,N;
519   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
520 
521   PetscFunctionBegin;
522   PetscCheck(osm->n_local_true <= 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Not yet implemented");
523   /*
524      support for limiting the restriction or interpolation to only local
525      subdomain values (leaving the other values 0).
526   */
527   if (!(osm->type & PC_ASM_RESTRICT)) {
528     forward = SCATTER_FORWARD_LOCAL;
529     /* have to zero the work RHS since scatter may leave some slots empty */
530     PetscCall(VecSet(osm->lx, 0.0));
531   }
532   if (!(osm->type & PC_ASM_INTERPOLATE)) {
533     reverse = SCATTER_REVERSE_LOCAL;
534   }
535   PetscCall(VecGetLocalSize(osm->x[0], &m));
536   PetscCall(MatGetSize(X, NULL, &N));
537   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
538   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
539     /* zero the global and the local solutions */
540     PetscCall(MatZeroEntries(Y));
541     PetscCall(VecSet(osm->ly, 0.0));
542 
543     for (i = 0; i < N; ++i) {
544       PetscCall(MatDenseGetColumnVecRead(X, i, &x));
545       /* copy the global RHS to local RHS including the ghost nodes */
546       PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
547       PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
548       PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
549 
550       PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
551       /* restrict local RHS to the overlapping 0-block RHS */
552       PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
553       PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
554       PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
555     }
556     PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
557     /* solve the overlapping 0-block */
558     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
559     PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
560     PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
561     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W,0));
562     PetscCall(MatDestroy(&Z));
563 
564     for (i = 0; i < N; ++i) {
565       PetscCall(VecSet(osm->ly, 0.0));
566       PetscCall(MatDenseGetColumnVecRead(W, i, &x));
567       if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
568         PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
569         PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
570       } else { /* interpolate the overlapping 0-block solution to the local solution */
571         PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
572         PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
573       }
574       PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
575 
576       PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
577       /* add the local solution to the global solution including the ghost nodes */
578       PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
579       PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
580       PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
581     }
582     PetscCall(MatDestroy(&W));
583   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
584   PetscFunctionReturn(0);
585 }
586 
587 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
588 {
589   PC_ASM         *osm = (PC_ASM*)pc->data;
590   PetscInt       i,n_local_true = osm->n_local_true;
591   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
592 
593   PetscFunctionBegin;
594   /*
595      Support for limiting the restriction or interpolation to only local
596      subdomain values (leaving the other values 0).
597 
598      Note: these are reversed from the PCApply_ASM() because we are applying the
599      transpose of the three terms
600   */
601 
602   if (!(osm->type & PC_ASM_INTERPOLATE)) {
603     forward = SCATTER_FORWARD_LOCAL;
604     /* have to zero the work RHS since scatter may leave some slots empty */
605     PetscCall(VecSet(osm->lx, 0.0));
606   }
607   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
608 
609   /* zero the global and the local solutions */
610   PetscCall(VecSet(y, 0.0));
611   PetscCall(VecSet(osm->ly, 0.0));
612 
613   /* Copy the global RHS to local RHS including the ghost nodes */
614   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
615   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
616 
617   /* Restrict local RHS to the overlapping 0-block RHS */
618   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
619   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
620 
621   /* do the local solves */
622   for (i = 0; i < n_local_true; ++i) {
623 
624     /* solve the overlapping i-block */
625     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0));
626     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
627     PetscCall(KSPCheckSolve(osm->ksp[i],pc,osm->y[i]));
628     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0));
629 
630     if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
631       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
632       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
633     } else { /* interpolate the overlapping i-block solution to the local solution */
634       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
635       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
636     }
637 
638     if (i < n_local_true-1) {
639       /* Restrict local RHS to the overlapping (i+1)-block RHS */
640       PetscCall(VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
641       PetscCall(VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
642     }
643   }
644   /* Add the local solution to the global solution including the ghost nodes */
645   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
646   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
647   PetscFunctionReturn(0);
648 }
649 
650 static PetscErrorCode PCReset_ASM(PC pc)
651 {
652   PC_ASM         *osm = (PC_ASM*)pc->data;
653   PetscInt       i;
654 
655   PetscFunctionBegin;
656   if (osm->ksp) {
657     for (i=0; i<osm->n_local_true; i++) {
658       PetscCall(KSPReset(osm->ksp[i]));
659     }
660   }
661   if (osm->pmat) {
662     if (osm->n_local_true > 0) {
663       PetscCall(MatDestroySubMatrices(osm->n_local_true,&osm->pmat));
664     }
665   }
666   if (osm->lrestriction) {
667     PetscCall(VecScatterDestroy(&osm->restriction));
668     for (i=0; i<osm->n_local_true; i++) {
669       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
670       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
671       PetscCall(VecDestroy(&osm->x[i]));
672       PetscCall(VecDestroy(&osm->y[i]));
673     }
674     PetscCall(PetscFree(osm->lrestriction));
675     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
676     PetscCall(PetscFree(osm->x));
677     PetscCall(PetscFree(osm->y));
678 
679   }
680   PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
681   PetscCall(ISDestroy(&osm->lis));
682   PetscCall(VecDestroy(&osm->lx));
683   PetscCall(VecDestroy(&osm->ly));
684   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
685     PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
686   }
687 
688   PetscCall(PetscFree(osm->sub_mat_type));
689 
690   osm->is       = NULL;
691   osm->is_local = NULL;
692   PetscFunctionReturn(0);
693 }
694 
695 static PetscErrorCode PCDestroy_ASM(PC pc)
696 {
697   PC_ASM         *osm = (PC_ASM*)pc->data;
698   PetscInt       i;
699 
700   PetscFunctionBegin;
701   PetscCall(PCReset_ASM(pc));
702   if (osm->ksp) {
703     for (i=0; i<osm->n_local_true; i++) {
704       PetscCall(KSPDestroy(&osm->ksp[i]));
705     }
706     PetscCall(PetscFree(osm->ksp));
707   }
708   PetscCall(PetscFree(pc->data));
709 
710   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",NULL));
711   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",NULL));
712   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",NULL));
713   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",NULL));
714   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",NULL));
715   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",NULL));
716   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",NULL));
717   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",NULL));
718   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",NULL));
719   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",NULL));
720   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",NULL));
721   PetscFunctionReturn(0);
722 }
723 
724 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
725 {
726   PC_ASM         *osm = (PC_ASM*)pc->data;
727   PetscInt       blocks,ovl;
728   PetscBool      flg;
729   PCASMType      asmtype;
730   PCCompositeType loctype;
731   char           sub_mat_type[256];
732 
733   PetscFunctionBegin;
734   PetscOptionsHeadBegin(PetscOptionsObject,"Additive Schwarz options");
735   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg));
736   PetscCall(PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg));
737   if (flg) {
738     PetscCall(PCASMSetTotalSubdomains(pc,blocks,NULL,NULL));
739     osm->dm_subdomains = PETSC_FALSE;
740   }
741   PetscCall(PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg));
742   if (flg) {
743     PetscCall(PCASMSetLocalSubdomains(pc,blocks,NULL,NULL));
744     osm->dm_subdomains = PETSC_FALSE;
745   }
746   PetscCall(PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg));
747   if (flg) {
748     PetscCall(PCASMSetOverlap(pc,ovl));
749     osm->dm_subdomains = PETSC_FALSE;
750   }
751   flg  = PETSC_FALSE;
752   PetscCall(PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg));
753   if (flg) PetscCall(PCASMSetType(pc,asmtype));
754   flg  = PETSC_FALSE;
755   PetscCall(PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg));
756   if (flg) PetscCall(PCASMSetLocalType(pc,loctype));
757   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg));
758   if (flg) PetscCall(PCASMSetSubMatType(pc,sub_mat_type));
759   PetscOptionsHeadEnd();
760   PetscFunctionReturn(0);
761 }
762 
763 /*------------------------------------------------------------------------------------*/
764 
765 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
766 {
767   PC_ASM         *osm = (PC_ASM*)pc->data;
768   PetscInt       i;
769 
770   PetscFunctionBegin;
771   PetscCheck(n >= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %" PetscInt_FMT,n);
772   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is),PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
773 
774   if (!pc->setupcalled) {
775     if (is) {
776       for (i=0; i<n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
777     }
778     if (is_local) {
779       for (i=0; i<n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
780     }
781     PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
782 
783     osm->n_local_true = n;
784     osm->is           = NULL;
785     osm->is_local     = NULL;
786     if (is) {
787       PetscCall(PetscMalloc1(n,&osm->is));
788       for (i=0; i<n; i++) osm->is[i] = is[i];
789       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
790       osm->overlap = -1;
791     }
792     if (is_local) {
793       PetscCall(PetscMalloc1(n,&osm->is_local));
794       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
795       if (!is) {
796         PetscCall(PetscMalloc1(osm->n_local_true,&osm->is));
797         for (i=0; i<osm->n_local_true; i++) {
798           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
799             PetscCall(ISDuplicate(osm->is_local[i],&osm->is[i]));
800             PetscCall(ISCopy(osm->is_local[i],osm->is[i]));
801           } else {
802             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
803             osm->is[i] = osm->is_local[i];
804           }
805         }
806       }
807     }
808   }
809   PetscFunctionReturn(0);
810 }
811 
812 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
813 {
814   PC_ASM         *osm = (PC_ASM*)pc->data;
815   PetscMPIInt    rank,size;
816   PetscInt       n;
817 
818   PetscFunctionBegin;
819   PetscCheck(N >= 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %" PetscInt_FMT,N);
820   PetscCheck(!is && !is_local,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
821 
822   /*
823      Split the subdomains equally among all processors
824   */
825   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank));
826   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
827   n    = N/size + ((N % size) > rank);
828   PetscCheck(n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT,(int)rank,(int)size,N);
829   PetscCheck(!pc->setupcalled || n == osm->n_local_true,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
830   if (!pc->setupcalled) {
831     PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
832 
833     osm->n_local_true = n;
834     osm->is           = NULL;
835     osm->is_local     = NULL;
836   }
837   PetscFunctionReturn(0);
838 }
839 
840 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
841 {
842   PC_ASM *osm = (PC_ASM*)pc->data;
843 
844   PetscFunctionBegin;
845   PetscCheck(ovl >= 0,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
846   PetscCheck(!pc->setupcalled || ovl == osm->overlap,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
847   if (!pc->setupcalled) osm->overlap = ovl;
848   PetscFunctionReturn(0);
849 }
850 
851 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
852 {
853   PC_ASM *osm = (PC_ASM*)pc->data;
854 
855   PetscFunctionBegin;
856   osm->type     = type;
857   osm->type_set = PETSC_TRUE;
858   PetscFunctionReturn(0);
859 }
860 
861 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
862 {
863   PC_ASM *osm = (PC_ASM*)pc->data;
864 
865   PetscFunctionBegin;
866   *type = osm->type;
867   PetscFunctionReturn(0);
868 }
869 
870 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
871 {
872   PC_ASM *osm = (PC_ASM *) pc->data;
873 
874   PetscFunctionBegin;
875   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
876   osm->loctype = type;
877   PetscFunctionReturn(0);
878 }
879 
880 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
881 {
882   PC_ASM *osm = (PC_ASM *) pc->data;
883 
884   PetscFunctionBegin;
885   *type = osm->loctype;
886   PetscFunctionReturn(0);
887 }
888 
889 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
890 {
891   PC_ASM *osm = (PC_ASM*)pc->data;
892 
893   PetscFunctionBegin;
894   osm->sort_indices = doSort;
895   PetscFunctionReturn(0);
896 }
897 
898 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
899 {
900   PC_ASM         *osm = (PC_ASM*)pc->data;
901 
902   PetscFunctionBegin;
903   PetscCheck(osm->n_local_true >= 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
904 
905   if (n_local) *n_local = osm->n_local_true;
906   if (first_local) {
907     PetscCallMPI(MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc)));
908     *first_local -= osm->n_local_true;
909   }
910   if (ksp) *ksp   = osm->ksp;
911   PetscFunctionReturn(0);
912 }
913 
914 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
915 {
916   PC_ASM         *osm = (PC_ASM*)pc->data;
917 
918   PetscFunctionBegin;
919   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
920   PetscValidPointer(sub_mat_type,2);
921   *sub_mat_type = osm->sub_mat_type;
922   PetscFunctionReturn(0);
923 }
924 
925 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
926 {
927   PC_ASM            *osm = (PC_ASM*)pc->data;
928 
929   PetscFunctionBegin;
930   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
931   PetscCall(PetscFree(osm->sub_mat_type));
932   PetscCall(PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type));
933   PetscFunctionReturn(0);
934 }
935 
936 /*@C
937     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
938 
939     Collective on pc
940 
941     Input Parameters:
942 +   pc - the preconditioner context
943 .   n - the number of subdomains for this processor (default value = 1)
944 .   is - the index set that defines the subdomains for this processor
945          (or NULL for PETSc to determine subdomains)
946 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
947          (or NULL to not provide these)
948 
949     Options Database Key:
950     To set the total number of subdomain blocks rather than specify the
951     index sets, use the option
952 .    -pc_asm_local_blocks <blks> - Sets local blocks
953 
954     Notes:
955     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
956 
957     By default the ASM preconditioner uses 1 block per processor.
958 
959     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
960 
961     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
962     back to form the global solution (this is the standard restricted additive Schwarz method)
963 
964     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
965     no code to handle that case.
966 
967     Level: advanced
968 
969 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
970           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`
971 @*/
972 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
973 {
974   PetscFunctionBegin;
975   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
976   PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));
977   PetscFunctionReturn(0);
978 }
979 
980 /*@C
981     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
982     additive Schwarz preconditioner.  Either all or no processors in the
983     PC communicator must call this routine, with the same index sets.
984 
985     Collective on pc
986 
987     Input Parameters:
988 +   pc - the preconditioner context
989 .   N  - the number of subdomains for all processors
990 .   is - the index sets that define the subdomains for all processors
991          (or NULL to ask PETSc to determine the subdomains)
992 -   is_local - the index sets that define the local part of the subdomains for this processor
993          (or NULL to not provide this information)
994 
995     Options Database Key:
996     To set the total number of subdomain blocks rather than specify the
997     index sets, use the option
998 .    -pc_asm_blocks <blks> - Sets total blocks
999 
1000     Notes:
1001     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
1002 
1003     By default the ASM preconditioner uses 1 block per processor.
1004 
1005     These index sets cannot be destroyed until after completion of the
1006     linear solves for which the ASM preconditioner is being used.
1007 
1008     Use PCASMSetLocalSubdomains() to set local subdomains.
1009 
1010     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
1011 
1012     Level: advanced
1013 
1014 .seealso: `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1015           `PCASMCreateSubdomains2D()`
1016 @*/
1017 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
1018 {
1019   PetscFunctionBegin;
1020   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1021   PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 /*@
1026     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1027     additive Schwarz preconditioner.  Either all or no processors in the
1028     PC communicator must call this routine.
1029 
1030     Logically Collective on pc
1031 
1032     Input Parameters:
1033 +   pc  - the preconditioner context
1034 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1035 
1036     Options Database Key:
1037 .   -pc_asm_overlap <ovl> - Sets overlap
1038 
1039     Notes:
1040     By default the ASM preconditioner uses 1 block per processor.  To use
1041     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1042     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1043 
1044     The overlap defaults to 1, so if one desires that no additional
1045     overlap be computed beyond what may have been set with a call to
1046     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1047     must be set to be 0.  In particular, if one does not explicitly set
1048     the subdomains an application code, then all overlap would be computed
1049     internally by PETSc, and using an overlap of 0 would result in an ASM
1050     variant that is equivalent to the block Jacobi preconditioner.
1051 
1052     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1053     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1054 
1055     Note that one can define initial index sets with any overlap via
1056     PCASMSetLocalSubdomains(); the routine
1057     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1058     if desired.
1059 
1060     Level: intermediate
1061 
1062 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1063           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`
1064 @*/
1065 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1066 {
1067   PetscFunctionBegin;
1068   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1069   PetscValidLogicalCollectiveInt(pc,ovl,2);
1070   PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 /*@
1075     PCASMSetType - Sets the type of restriction and interpolation used
1076     for local problems in the additive Schwarz method.
1077 
1078     Logically Collective on pc
1079 
1080     Input Parameters:
1081 +   pc  - the preconditioner context
1082 -   type - variant of ASM, one of
1083 .vb
1084       PC_ASM_BASIC       - full interpolation and restriction
1085       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1086       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1087       PC_ASM_NONE        - local processor restriction and interpolation
1088 .ve
1089 
1090     Options Database Key:
1091 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1092 
1093     Notes:
1094     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1095     to limit the local processor interpolation
1096 
1097     Level: intermediate
1098 
1099 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1100           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1101 @*/
1102 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1103 {
1104   PetscFunctionBegin;
1105   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1106   PetscValidLogicalCollectiveEnum(pc,type,2);
1107   PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 /*@
1112     PCASMGetType - Gets the type of restriction and interpolation used
1113     for local problems in the additive Schwarz method.
1114 
1115     Logically Collective on pc
1116 
1117     Input Parameter:
1118 .   pc  - the preconditioner context
1119 
1120     Output Parameter:
1121 .   type - variant of ASM, one of
1122 
1123 .vb
1124       PC_ASM_BASIC       - full interpolation and restriction
1125       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1126       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1127       PC_ASM_NONE        - local processor restriction and interpolation
1128 .ve
1129 
1130     Options Database Key:
1131 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1132 
1133     Level: intermediate
1134 
1135 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1136           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1137 @*/
1138 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1139 {
1140   PetscFunctionBegin;
1141   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1142   PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));
1143   PetscFunctionReturn(0);
1144 }
1145 
1146 /*@
1147   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1148 
1149   Logically Collective on pc
1150 
1151   Input Parameters:
1152 + pc  - the preconditioner context
1153 - type - type of composition, one of
1154 .vb
1155   PC_COMPOSITE_ADDITIVE       - local additive combination
1156   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1157 .ve
1158 
1159   Options Database Key:
1160 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1161 
1162   Level: intermediate
1163 
1164 .seealso: `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASM`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1165 @*/
1166 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1167 {
1168   PetscFunctionBegin;
1169   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1170   PetscValidLogicalCollectiveEnum(pc, type, 2);
1171   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1172   PetscFunctionReturn(0);
1173 }
1174 
1175 /*@
1176   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1177 
1178   Logically Collective on pc
1179 
1180   Input Parameter:
1181 . pc  - the preconditioner context
1182 
1183   Output Parameter:
1184 . type - type of composition, one of
1185 .vb
1186   PC_COMPOSITE_ADDITIVE       - local additive combination
1187   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1188 .ve
1189 
1190   Options Database Key:
1191 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1192 
1193   Level: intermediate
1194 
1195 .seealso: `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCASMSetType()`, `PCASMGetType()`, `PCCompositeType`
1196 @*/
1197 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1198 {
1199   PetscFunctionBegin;
1200   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1201   PetscValidPointer(type, 2);
1202   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1203   PetscFunctionReturn(0);
1204 }
1205 
1206 /*@
1207     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1208 
1209     Logically Collective on pc
1210 
1211     Input Parameters:
1212 +   pc  - the preconditioner context
1213 -   doSort - sort the subdomain indices
1214 
1215     Level: intermediate
1216 
1217 .seealso: `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1218           `PCASMCreateSubdomains2D()`
1219 @*/
1220 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1221 {
1222   PetscFunctionBegin;
1223   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1224   PetscValidLogicalCollectiveBool(pc,doSort,2);
1225   PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
1226   PetscFunctionReturn(0);
1227 }
1228 
1229 /*@C
1230    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1231    this processor.
1232 
1233    Collective on pc iff first_local is requested
1234 
1235    Input Parameter:
1236 .  pc - the preconditioner context
1237 
1238    Output Parameters:
1239 +  n_local - the number of blocks on this processor or NULL
1240 .  first_local - the global number of the first block on this processor or NULL,
1241                  all processors must request or all must pass NULL
1242 -  ksp - the array of KSP contexts
1243 
1244    Note:
1245    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1246 
1247    You must call KSPSetUp() before calling PCASMGetSubKSP().
1248 
1249    Fortran note:
1250    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1251 
1252    Level: advanced
1253 
1254 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1255           `PCASMCreateSubdomains2D()`,
1256 @*/
1257 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1258 {
1259   PetscFunctionBegin;
1260   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1261   PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
1262   PetscFunctionReturn(0);
1263 }
1264 
1265 /* -------------------------------------------------------------------------------------*/
1266 /*MC
1267    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1268            its own KSP object.
1269 
1270    Options Database Keys:
1271 +  -pc_asm_blocks <blks> - Sets total blocks
1272 .  -pc_asm_overlap <ovl> - Sets overlap
1273 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1274 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1275 
1276      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1277       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1278       -pc_asm_type basic to use the standard ASM.
1279 
1280    Notes:
1281     Each processor can have one or more blocks, but a block cannot be shared by more
1282      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1283 
1284      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1285         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1286 
1287      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1288          and set the options directly on the resulting KSP object (you can access its PC
1289          with KSPGetPC())
1290 
1291    Level: beginner
1292 
1293     References:
1294 +   * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1295      Courant Institute, New York University Technical report
1296 -   * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1297     Cambridge University Press.
1298 
1299 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`,
1300           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1301           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1302 
1303 M*/
1304 
1305 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1306 {
1307   PC_ASM         *osm;
1308 
1309   PetscFunctionBegin;
1310   PetscCall(PetscNewLog(pc,&osm));
1311 
1312   osm->n                 = PETSC_DECIDE;
1313   osm->n_local           = 0;
1314   osm->n_local_true      = PETSC_DECIDE;
1315   osm->overlap           = 1;
1316   osm->ksp               = NULL;
1317   osm->restriction       = NULL;
1318   osm->lprolongation     = NULL;
1319   osm->lrestriction      = NULL;
1320   osm->x                 = NULL;
1321   osm->y                 = NULL;
1322   osm->is                = NULL;
1323   osm->is_local          = NULL;
1324   osm->mat               = NULL;
1325   osm->pmat              = NULL;
1326   osm->type              = PC_ASM_RESTRICT;
1327   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1328   osm->sort_indices      = PETSC_TRUE;
1329   osm->dm_subdomains     = PETSC_FALSE;
1330   osm->sub_mat_type      = NULL;
1331 
1332   pc->data                 = (void*)osm;
1333   pc->ops->apply           = PCApply_ASM;
1334   pc->ops->matapply        = PCMatApply_ASM;
1335   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1336   pc->ops->setup           = PCSetUp_ASM;
1337   pc->ops->reset           = PCReset_ASM;
1338   pc->ops->destroy         = PCDestroy_ASM;
1339   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1340   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1341   pc->ops->view            = PCView_ASM;
1342   pc->ops->applyrichardson = NULL;
1343 
1344   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM));
1345   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM));
1346   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM));
1347   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM));
1348   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM));
1349   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM));
1350   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM));
1351   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM));
1352   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM));
1353   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM));
1354   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM));
1355   PetscFunctionReturn(0);
1356 }
1357 
1358 /*@C
1359    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1360    preconditioner for any problem on a general grid.
1361 
1362    Collective
1363 
1364    Input Parameters:
1365 +  A - The global matrix operator
1366 -  n - the number of local blocks
1367 
1368    Output Parameters:
1369 .  outis - the array of index sets defining the subdomains
1370 
1371    Level: advanced
1372 
1373    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1374     from these if you use PCASMSetLocalSubdomains()
1375 
1376     In the Fortran version you must provide the array outis[] already allocated of length n.
1377 
1378 .seealso: `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1379 @*/
1380 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1381 {
1382   MatPartitioning mpart;
1383   const char      *prefix;
1384   PetscInt        i,j,rstart,rend,bs;
1385   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1386   Mat             Ad     = NULL, adj;
1387   IS              ispart,isnumb,*is;
1388 
1389   PetscFunctionBegin;
1390   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1391   PetscValidPointer(outis,3);
1392   PetscCheck(n >= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %" PetscInt_FMT,n);
1393 
1394   /* Get prefix, row distribution, and block size */
1395   PetscCall(MatGetOptionsPrefix(A,&prefix));
1396   PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
1397   PetscCall(MatGetBlockSize(A,&bs));
1398   PetscCheck(rstart/bs*bs == rstart && rend/bs*bs == rend,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT,rstart,rend,bs);
1399 
1400   /* Get diagonal block from matrix if possible */
1401   PetscCall(MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop));
1402   if (hasop) {
1403     PetscCall(MatGetDiagonalBlock(A,&Ad));
1404   }
1405   if (Ad) {
1406     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij));
1407     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij));
1408   }
1409   if (Ad && n > 1) {
1410     PetscBool match,done;
1411     /* Try to setup a good matrix partitioning if available */
1412     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF,&mpart));
1413     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix));
1414     PetscCall(MatPartitioningSetFromOptions(mpart));
1415     PetscCall(PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match));
1416     if (!match) {
1417       PetscCall(PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match));
1418     }
1419     if (!match) { /* assume a "good" partitioner is available */
1420       PetscInt       na;
1421       const PetscInt *ia,*ja;
1422       PetscCall(MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done));
1423       if (done) {
1424         /* Build adjacency matrix by hand. Unfortunately a call to
1425            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1426            remove the block-aij structure and we cannot expect
1427            MatPartitioning to split vertices as we need */
1428         PetscInt       i,j,len,nnz,cnt,*iia=NULL,*jja=NULL;
1429         const PetscInt *row;
1430         nnz = 0;
1431         for (i=0; i<na; i++) { /* count number of nonzeros */
1432           len = ia[i+1] - ia[i];
1433           row = ja + ia[i];
1434           for (j=0; j<len; j++) {
1435             if (row[j] == i) { /* don't count diagonal */
1436               len--; break;
1437             }
1438           }
1439           nnz += len;
1440         }
1441         PetscCall(PetscMalloc1(na+1,&iia));
1442         PetscCall(PetscMalloc1(nnz,&jja));
1443         nnz    = 0;
1444         iia[0] = 0;
1445         for (i=0; i<na; i++) { /* fill adjacency */
1446           cnt = 0;
1447           len = ia[i+1] - ia[i];
1448           row = ja + ia[i];
1449           for (j=0; j<len; j++) {
1450             if (row[j] != i) { /* if not diagonal */
1451               jja[nnz+cnt++] = row[j];
1452             }
1453           }
1454           nnz     += cnt;
1455           iia[i+1] = nnz;
1456         }
1457         /* Partitioning of the adjacency matrix */
1458         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj));
1459         PetscCall(MatPartitioningSetAdjacency(mpart,adj));
1460         PetscCall(MatPartitioningSetNParts(mpart,n));
1461         PetscCall(MatPartitioningApply(mpart,&ispart));
1462         PetscCall(ISPartitioningToNumbering(ispart,&isnumb));
1463         PetscCall(MatDestroy(&adj));
1464         foundpart = PETSC_TRUE;
1465       }
1466       PetscCall(MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done));
1467     }
1468     PetscCall(MatPartitioningDestroy(&mpart));
1469   }
1470 
1471   PetscCall(PetscMalloc1(n,&is));
1472   *outis = is;
1473 
1474   if (!foundpart) {
1475 
1476     /* Partitioning by contiguous chunks of rows */
1477 
1478     PetscInt mbs   = (rend-rstart)/bs;
1479     PetscInt start = rstart;
1480     for (i=0; i<n; i++) {
1481       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1482       PetscCall(ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]));
1483       start += count;
1484     }
1485 
1486   } else {
1487 
1488     /* Partitioning by adjacency of diagonal block  */
1489 
1490     const PetscInt *numbering;
1491     PetscInt       *count,nidx,*indices,*newidx,start=0;
1492     /* Get node count in each partition */
1493     PetscCall(PetscMalloc1(n,&count));
1494     PetscCall(ISPartitioningCount(ispart,n,count));
1495     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1496       for (i=0; i<n; i++) count[i] *= bs;
1497     }
1498     /* Build indices from node numbering */
1499     PetscCall(ISGetLocalSize(isnumb,&nidx));
1500     PetscCall(PetscMalloc1(nidx,&indices));
1501     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1502     PetscCall(ISGetIndices(isnumb,&numbering));
1503     PetscCall(PetscSortIntWithPermutation(nidx,numbering,indices));
1504     PetscCall(ISRestoreIndices(isnumb,&numbering));
1505     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1506       PetscCall(PetscMalloc1(nidx*bs,&newidx));
1507       for (i=0; i<nidx; i++) {
1508         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1509       }
1510       PetscCall(PetscFree(indices));
1511       nidx   *= bs;
1512       indices = newidx;
1513     }
1514     /* Shift to get global indices */
1515     for (i=0; i<nidx; i++) indices[i] += rstart;
1516 
1517     /* Build the index sets for each block */
1518     for (i=0; i<n; i++) {
1519       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]));
1520       PetscCall(ISSort(is[i]));
1521       start += count[i];
1522     }
1523 
1524     PetscCall(PetscFree(count));
1525     PetscCall(PetscFree(indices));
1526     PetscCall(ISDestroy(&isnumb));
1527     PetscCall(ISDestroy(&ispart));
1528 
1529   }
1530   PetscFunctionReturn(0);
1531 }
1532 
1533 /*@C
1534    PCASMDestroySubdomains - Destroys the index sets created with
1535    PCASMCreateSubdomains(). Should be called after setting subdomains
1536    with PCASMSetLocalSubdomains().
1537 
1538    Collective
1539 
1540    Input Parameters:
1541 +  n - the number of index sets
1542 .  is - the array of index sets
1543 -  is_local - the array of local index sets, can be NULL
1544 
1545    Level: advanced
1546 
1547 .seealso: `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1548 @*/
1549 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1550 {
1551   PetscInt       i;
1552 
1553   PetscFunctionBegin;
1554   if (n <= 0) PetscFunctionReturn(0);
1555   if (is) {
1556     PetscValidPointer(is,2);
1557     for (i=0; i<n; i++) PetscCall(ISDestroy(&is[i]));
1558     PetscCall(PetscFree(is));
1559   }
1560   if (is_local) {
1561     PetscValidPointer(is_local,3);
1562     for (i=0; i<n; i++) PetscCall(ISDestroy(&is_local[i]));
1563     PetscCall(PetscFree(is_local));
1564   }
1565   PetscFunctionReturn(0);
1566 }
1567 
1568 /*@
1569    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1570    preconditioner for a two-dimensional problem on a regular grid.
1571 
1572    Not Collective
1573 
1574    Input Parameters:
1575 +  m   - the number of mesh points in the x direction
1576 .  n   - the number of mesh points in the y direction
1577 .  M   - the number of subdomains in the x direction
1578 .  N   - the number of subdomains in the y direction
1579 .  dof - degrees of freedom per node
1580 -  overlap - overlap in mesh lines
1581 
1582    Output Parameters:
1583 +  Nsub - the number of subdomains created
1584 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1585 -  is_local - array of index sets defining non-overlapping subdomains
1586 
1587    Note:
1588    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1589    preconditioners.  More general related routines are
1590    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1591 
1592    Level: advanced
1593 
1594 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1595           `PCASMSetOverlap()`
1596 @*/
1597 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1598 {
1599   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1600   PetscInt       nidx,*idx,loc,ii,jj,count;
1601 
1602   PetscFunctionBegin;
1603   PetscCheck(dof == 1,PETSC_COMM_SELF,PETSC_ERR_SUP,"dof must be 1");
1604 
1605   *Nsub     = N*M;
1606   PetscCall(PetscMalloc1(*Nsub,is));
1607   PetscCall(PetscMalloc1(*Nsub,is_local));
1608   ystart    = 0;
1609   loc_outer = 0;
1610   for (i=0; i<N; i++) {
1611     height = n/N + ((n % N) > i); /* height of subdomain */
1612     PetscCheck(height >= 2,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1613     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1614     yright = ystart + height + overlap; if (yright > n) yright = n;
1615     xstart = 0;
1616     for (j=0; j<M; j++) {
1617       width = m/M + ((m % M) > j); /* width of subdomain */
1618       PetscCheck(width >= 2,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1619       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1620       xright = xstart + width + overlap; if (xright > m) xright = m;
1621       nidx   = (xright - xleft)*(yright - yleft);
1622       PetscCall(PetscMalloc1(nidx,&idx));
1623       loc    = 0;
1624       for (ii=yleft; ii<yright; ii++) {
1625         count = m*ii + xleft;
1626         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1627       }
1628       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer));
1629       if (overlap == 0) {
1630         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1631 
1632         (*is_local)[loc_outer] = (*is)[loc_outer];
1633       } else {
1634         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1635           for (jj=xstart; jj<xstart+width; jj++) {
1636             idx[loc++] = m*ii + jj;
1637           }
1638         }
1639         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer));
1640       }
1641       PetscCall(PetscFree(idx));
1642       xstart += width;
1643       loc_outer++;
1644     }
1645     ystart += height;
1646   }
1647   for (i=0; i<*Nsub; i++) PetscCall(ISSort((*is)[i]));
1648   PetscFunctionReturn(0);
1649 }
1650 
1651 /*@C
1652     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1653     only) for the additive Schwarz preconditioner.
1654 
1655     Not Collective
1656 
1657     Input Parameter:
1658 .   pc - the preconditioner context
1659 
1660     Output Parameters:
1661 +   n - if requested, the number of subdomains for this processor (default value = 1)
1662 .   is - if requested, the index sets that define the subdomains for this processor
1663 -   is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be NULL)
1664 
1665     Notes:
1666     The IS numbering is in the parallel, global numbering of the vector.
1667 
1668     Level: advanced
1669 
1670 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1671           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1672 @*/
1673 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1674 {
1675   PC_ASM         *osm = (PC_ASM*)pc->data;
1676   PetscBool      match;
1677 
1678   PetscFunctionBegin;
1679   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1680   if (n) PetscValidIntPointer(n,2);
1681   if (is) PetscValidPointer(is,3);
1682   if (is_local) PetscValidPointer(is_local,4);
1683   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1684   PetscCheck(match,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1685   if (n) *n = osm->n_local_true;
1686   if (is) *is = osm->is;
1687   if (is_local) *is_local = osm->is_local;
1688   PetscFunctionReturn(0);
1689 }
1690 
1691 /*@C
1692     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1693     only) for the additive Schwarz preconditioner.
1694 
1695     Not Collective
1696 
1697     Input Parameter:
1698 .   pc - the preconditioner context
1699 
1700     Output Parameters:
1701 +   n - if requested, the number of matrices for this processor (default value = 1)
1702 -   mat - if requested, the matrices
1703 
1704     Level: advanced
1705 
1706     Notes:
1707     Call after PCSetUp() (or KSPSetUp()) but before PCApply() and before PCSetUpOnBlocks())
1708 
1709            Usually one would use PCSetModifySubMatrices() to change the submatrices in building the preconditioner.
1710 
1711 .seealso: `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1712           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1713 @*/
1714 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1715 {
1716   PC_ASM         *osm;
1717   PetscBool      match;
1718 
1719   PetscFunctionBegin;
1720   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1721   if (n) PetscValidIntPointer(n,2);
1722   if (mat) PetscValidPointer(mat,3);
1723   PetscCheck(pc->setupcalled,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUp() or PCSetUp().");
1724   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1725   if (!match) {
1726     if (n) *n = 0;
1727     if (mat) *mat = NULL;
1728   } else {
1729     osm = (PC_ASM*)pc->data;
1730     if (n) *n = osm->n_local_true;
1731     if (mat) *mat = osm->pmat;
1732   }
1733   PetscFunctionReturn(0);
1734 }
1735 
1736 /*@
1737     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1738 
1739     Logically Collective
1740 
1741     Input Parameters:
1742 +   pc  - the preconditioner
1743 -   flg - boolean indicating whether to use subdomains defined by the DM
1744 
1745     Options Database Key:
1746 .   -pc_asm_dm_subdomains <bool> - use subdomains defined by the DM
1747 
1748     Level: intermediate
1749 
1750     Notes:
1751     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1752     so setting either of the first two effectively turns the latter off.
1753 
1754 .seealso: `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1755           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1756 @*/
1757 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1758 {
1759   PC_ASM         *osm = (PC_ASM*)pc->data;
1760   PetscBool      match;
1761 
1762   PetscFunctionBegin;
1763   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1764   PetscValidLogicalCollectiveBool(pc,flg,2);
1765   PetscCheck(!pc->setupcalled,((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1766   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1767   if (match) {
1768     osm->dm_subdomains = flg;
1769   }
1770   PetscFunctionReturn(0);
1771 }
1772 
1773 /*@
1774     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1775     Not Collective
1776 
1777     Input Parameter:
1778 .   pc  - the preconditioner
1779 
1780     Output Parameter:
1781 .   flg - boolean indicating whether to use subdomains defined by the DM
1782 
1783     Level: intermediate
1784 
1785 .seealso: `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1786           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1787 @*/
1788 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1789 {
1790   PC_ASM         *osm = (PC_ASM*)pc->data;
1791   PetscBool      match;
1792 
1793   PetscFunctionBegin;
1794   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1795   PetscValidBoolPointer(flg,2);
1796   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1797   if (match) *flg = osm->dm_subdomains;
1798   else *flg = PETSC_FALSE;
1799   PetscFunctionReturn(0);
1800 }
1801 
1802 /*@
1803      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1804 
1805    Not Collective
1806 
1807    Input Parameter:
1808 .  pc - the PC
1809 
1810    Output Parameter:
1811 .  -pc_asm_sub_mat_type - name of matrix type
1812 
1813    Level: advanced
1814 
1815 .seealso: `PCASMSetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1816 @*/
1817 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type)
1818 {
1819   PetscFunctionBegin;
1820   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1821   PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));
1822   PetscFunctionReturn(0);
1823 }
1824 
1825 /*@
1826      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1827 
1828    Collective on Mat
1829 
1830    Input Parameters:
1831 +  pc             - the PC object
1832 -  sub_mat_type   - matrix type
1833 
1834    Options Database Key:
1835 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1836 
1837    Notes:
1838    See "${PETSC_DIR}/include/petscmat.h" for available types
1839 
1840   Level: advanced
1841 
1842 .seealso: `PCASMGetSubMatType()`, `PCASM`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1843 @*/
1844 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1845 {
1846   PetscFunctionBegin;
1847   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1848   PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));
1849   PetscFunctionReturn(0);
1850 }
1851