xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision dfd676b1a855b7f967ece75a22ee7f6626d10f89)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 
13 #include <petsc/private/pcasmimpl.h> /*I "petscpc.h" I*/
14 
15 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
16 {
17   PC_ASM            *osm = (PC_ASM*)pc->data;
18   PetscMPIInt       rank;
19   PetscInt          i,bsz;
20   PetscBool         iascii,isstring;
21   PetscViewer       sviewer;
22   PetscViewerFormat format;
23   const char        *prefix;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
27   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
28   if (iascii) {
29     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
30     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %" PetscInt_FMT,osm->overlap));
31     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %" PetscInt_FMT,osm->n));
32     PetscCall(PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps));
33     PetscCall(PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]));
34     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n"));
35     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]));
36     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank));
37     PetscCall(PetscViewerGetFormat(viewer,&format));
38     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
39       if (osm->ksp) {
40         PetscCall(PetscViewerASCIIPrintf(viewer,"  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
41         PetscCall(PCGetOptionsPrefix(pc,&prefix));
42         PetscCall(PetscViewerASCIIPrintf(viewer,"  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n",prefix?prefix:""));
43         PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
44         if (rank == 0) {
45           PetscCall(PetscViewerASCIIPushTab(viewer));
46           PetscCall(KSPView(osm->ksp[0],sviewer));
47           PetscCall(PetscViewerASCIIPopTab(viewer));
48         }
49         PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
50       }
51     } else {
52       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
53       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %" PetscInt_FMT "\n",(int)rank,osm->n_local_true));
54       PetscCall(PetscViewerFlush(viewer));
55       PetscCall(PetscViewerASCIIPrintf(viewer,"  Local solver information for each block is in the following KSP and PC objects:\n"));
56       PetscCall(PetscViewerASCIIPushTab(viewer));
57       PetscCall(PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n"));
58       PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
59       for (i=0; i<osm->n_local_true; i++) {
60         PetscCall(ISGetLocalSize(osm->is[i],&bsz));
61         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n",(int)rank,i,bsz));
62         PetscCall(KSPView(osm->ksp[i],sviewer));
63         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n"));
64       }
65       PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
66       PetscCall(PetscViewerASCIIPopTab(viewer));
67       PetscCall(PetscViewerFlush(viewer));
68       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
69     }
70   } else if (isstring) {
71     PetscCall(PetscViewerStringSPrintf(viewer," blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]));
72     PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
73     if (osm->ksp) PetscCall(KSPView(osm->ksp[0],sviewer));
74     PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
75   }
76   PetscFunctionReturn(0);
77 }
78 
79 static PetscErrorCode PCASMPrintSubdomains(PC pc)
80 {
81   PC_ASM         *osm = (PC_ASM*)pc->data;
82   const char     *prefix;
83   char           fname[PETSC_MAX_PATH_LEN+1];
84   PetscViewer    viewer, sviewer;
85   char           *s;
86   PetscInt       i,j,nidx;
87   const PetscInt *idx;
88   PetscMPIInt    rank, size;
89 
90   PetscFunctionBegin;
91   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
92   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
93   PetscCall(PCGetOptionsPrefix(pc,&prefix));
94   PetscCall(PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,sizeof(fname),NULL));
95   if (fname[0] == 0) PetscCall(PetscStrcpy(fname,"stdout"));
96   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer));
97   for (i=0; i<osm->n_local; i++) {
98     if (i < osm->n_local_true) {
99       PetscCall(ISGetLocalSize(osm->is[i],&nidx));
100       PetscCall(ISGetIndices(osm->is[i],&idx));
101       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
102 #define len  16*(nidx+1)+512
103       PetscCall(PetscMalloc1(len,&s));
104       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
105 #undef len
106       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
107       for (j=0; j<nidx; j++) {
108         PetscCall(PetscViewerStringSPrintf(sviewer,"%" PetscInt_FMT " ",idx[j]));
109       }
110       PetscCall(ISRestoreIndices(osm->is[i],&idx));
111       PetscCall(PetscViewerStringSPrintf(sviewer,"\n"));
112       PetscCall(PetscViewerDestroy(&sviewer));
113       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
114       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
115       PetscCall(PetscViewerFlush(viewer));
116       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
117       PetscCall(PetscFree(s));
118       if (osm->is_local) {
119         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
120 #define len  16*(nidx+1)+512
121         PetscCall(PetscMalloc1(len, &s));
122         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
123 #undef len
124         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
125         PetscCall(ISGetLocalSize(osm->is_local[i],&nidx));
126         PetscCall(ISGetIndices(osm->is_local[i],&idx));
127         for (j=0; j<nidx; j++) {
128           PetscCall(PetscViewerStringSPrintf(sviewer,"%" PetscInt_FMT " ",idx[j]));
129         }
130         PetscCall(ISRestoreIndices(osm->is_local[i],&idx));
131         PetscCall(PetscViewerStringSPrintf(sviewer,"\n"));
132         PetscCall(PetscViewerDestroy(&sviewer));
133         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
134         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
135         PetscCall(PetscViewerFlush(viewer));
136         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
137         PetscCall(PetscFree(s));
138       }
139     } else {
140       /* Participate in collective viewer calls. */
141       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
142       PetscCall(PetscViewerFlush(viewer));
143       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
144       /* Assume either all ranks have is_local or none do. */
145       if (osm->is_local) {
146         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
147         PetscCall(PetscViewerFlush(viewer));
148         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
149       }
150     }
151   }
152   PetscCall(PetscViewerFlush(viewer));
153   PetscCall(PetscViewerDestroy(&viewer));
154   PetscFunctionReturn(0);
155 }
156 
157 static PetscErrorCode PCSetUp_ASM(PC pc)
158 {
159   PC_ASM         *osm = (PC_ASM*)pc->data;
160   PetscBool      flg;
161   PetscInt       i,m,m_local;
162   MatReuse       scall = MAT_REUSE_MATRIX;
163   IS             isl;
164   KSP            ksp;
165   PC             subpc;
166   const char     *prefix,*pprefix;
167   Vec            vec;
168   DM             *domain_dm = NULL;
169 
170   PetscFunctionBegin;
171   if (!pc->setupcalled) {
172     PetscInt m;
173 
174     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
175     if (osm->n_local_true == PETSC_DECIDE) {
176       /* no subdomains given */
177       /* try pc->dm first, if allowed */
178       if (osm->dm_subdomains && pc->dm) {
179         PetscInt  num_domains, d;
180         char      **domain_names;
181         IS        *inner_domain_is, *outer_domain_is;
182         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
183         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
184                               A future improvement of this code might allow one to use
185                               DM-defined subdomains and also increase the overlap,
186                               but that is not currently supported */
187         if (num_domains) {
188           PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
189         }
190         for (d = 0; d < num_domains; ++d) {
191           if (domain_names)    PetscCall(PetscFree(domain_names[d]));
192           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
193           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
194         }
195         PetscCall(PetscFree(domain_names));
196         PetscCall(PetscFree(inner_domain_is));
197         PetscCall(PetscFree(outer_domain_is));
198       }
199       if (osm->n_local_true == PETSC_DECIDE) {
200         /* still no subdomains; use one subdomain per processor */
201         osm->n_local_true = 1;
202       }
203     }
204     { /* determine the global and max number of subdomains */
205       struct {PetscInt max,sum;} inwork,outwork;
206       PetscMPIInt size;
207 
208       inwork.max   = osm->n_local_true;
209       inwork.sum   = osm->n_local_true;
210       PetscCall(MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc)));
211       osm->n_local = outwork.max;
212       osm->n       = outwork.sum;
213 
214       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
215       if (outwork.max == 1 && outwork.sum == size) {
216         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
217         PetscCall(MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE));
218       }
219     }
220     if (!osm->is) { /* create the index sets */
221       PetscCall(PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is));
222     }
223     if (osm->n_local_true > 1 && !osm->is_local) {
224       PetscCall(PetscMalloc1(osm->n_local_true,&osm->is_local));
225       for (i=0; i<osm->n_local_true; i++) {
226         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
227           PetscCall(ISDuplicate(osm->is[i],&osm->is_local[i]));
228           PetscCall(ISCopy(osm->is[i],osm->is_local[i]));
229         } else {
230           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
231           osm->is_local[i] = osm->is[i];
232         }
233       }
234     }
235     PetscCall(PCGetOptionsPrefix(pc,&prefix));
236     if (osm->overlap > 0) {
237       /* Extend the "overlapping" regions by a number of steps */
238       PetscCall(MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap));
239     }
240     if (osm->sort_indices) {
241       for (i=0; i<osm->n_local_true; i++) {
242         PetscCall(ISSort(osm->is[i]));
243         if (osm->is_local) {
244           PetscCall(ISSort(osm->is_local[i]));
245         }
246       }
247     }
248     flg = PETSC_FALSE;
249     PetscCall(PetscOptionsHasName(NULL,prefix,"-pc_asm_print_subdomains",&flg));
250     if (flg) PetscCall(PCASMPrintSubdomains(pc));
251     if (!osm->ksp) {
252       /* Create the local solvers */
253       PetscCall(PetscMalloc1(osm->n_local_true,&osm->ksp));
254       if (domain_dm) {
255         PetscCall(PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n"));
256       }
257       for (i=0; i<osm->n_local_true; i++) {
258         PetscCall(KSPCreate(PETSC_COMM_SELF,&ksp));
259         PetscCall(KSPSetErrorIfNotConverged(ksp,pc->erroriffailure));
260         PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp));
261         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1));
262         PetscCall(KSPSetType(ksp,KSPPREONLY));
263         PetscCall(KSPGetPC(ksp,&subpc));
264         PetscCall(PCGetOptionsPrefix(pc,&prefix));
265         PetscCall(KSPSetOptionsPrefix(ksp,prefix));
266         PetscCall(KSPAppendOptionsPrefix(ksp,"sub_"));
267         if (domain_dm) {
268           PetscCall(KSPSetDM(ksp, domain_dm[i]));
269           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
270           PetscCall(DMDestroy(&domain_dm[i]));
271         }
272         osm->ksp[i] = ksp;
273       }
274       if (domain_dm) {
275         PetscCall(PetscFree(domain_dm));
276       }
277     }
278 
279     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
280     PetscCall(ISSortRemoveDups(osm->lis));
281     PetscCall(ISGetLocalSize(osm->lis, &m));
282 
283     scall = MAT_INITIAL_MATRIX;
284   } else {
285     /*
286        Destroy the blocks from the previous iteration
287     */
288     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
289       PetscCall(MatDestroyMatrices(osm->n_local_true,&osm->pmat));
290       scall = MAT_INITIAL_MATRIX;
291     }
292   }
293 
294   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
295   if ((scall == MAT_REUSE_MATRIX) && osm->sub_mat_type) {
296     if (osm->n_local_true > 0) {
297       PetscCall(MatDestroySubMatrices(osm->n_local_true,&osm->pmat));
298     }
299     scall = MAT_INITIAL_MATRIX;
300   }
301 
302   /*
303      Extract out the submatrices
304   */
305   PetscCall(MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat));
306   if (scall == MAT_INITIAL_MATRIX) {
307     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix));
308     for (i=0; i<osm->n_local_true; i++) {
309       PetscCall(PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]));
310       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix));
311     }
312   }
313 
314   /* Convert the types of the submatrices (if needbe) */
315   if (osm->sub_mat_type) {
316     for (i=0; i<osm->n_local_true; i++) {
317       PetscCall(MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i])));
318     }
319   }
320 
321   if (!pc->setupcalled) {
322     VecType vtype;
323 
324     /* Create the local work vectors (from the local matrices) and scatter contexts */
325     PetscCall(MatCreateVecs(pc->pmat,&vec,NULL));
326 
327     PetscCheck(!osm->is_local || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE),PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
328     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
329       PetscCall(PetscMalloc1(osm->n_local_true,&osm->lprolongation));
330     }
331     PetscCall(PetscMalloc1(osm->n_local_true,&osm->lrestriction));
332     PetscCall(PetscMalloc1(osm->n_local_true,&osm->x));
333     PetscCall(PetscMalloc1(osm->n_local_true,&osm->y));
334 
335     PetscCall(ISGetLocalSize(osm->lis,&m));
336     PetscCall(ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl));
337     PetscCall(MatGetVecType(osm->pmat[0],&vtype));
338     PetscCall(VecCreate(PETSC_COMM_SELF,&osm->lx));
339     PetscCall(VecSetSizes(osm->lx,m,m));
340     PetscCall(VecSetType(osm->lx,vtype));
341     PetscCall(VecDuplicate(osm->lx, &osm->ly));
342     PetscCall(VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction));
343     PetscCall(ISDestroy(&isl));
344 
345     for (i=0; i<osm->n_local_true; ++i) {
346       ISLocalToGlobalMapping ltog;
347       IS                     isll;
348       const PetscInt         *idx_is;
349       PetscInt               *idx_lis,nout;
350 
351       PetscCall(ISGetLocalSize(osm->is[i],&m));
352       PetscCall(MatCreateVecs(osm->pmat[i],&osm->x[i],NULL));
353       PetscCall(VecDuplicate(osm->x[i],&osm->y[i]));
354 
355       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
356       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis,&ltog));
357       PetscCall(ISGetLocalSize(osm->is[i],&m));
358       PetscCall(ISGetIndices(osm->is[i], &idx_is));
359       PetscCall(PetscMalloc1(m,&idx_lis));
360       PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis));
361       PetscCheck(nout == m,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
362       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
363       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll));
364       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
365       PetscCall(ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl));
366       PetscCall(VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]));
367       PetscCall(ISDestroy(&isll));
368       PetscCall(ISDestroy(&isl));
369       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
370         ISLocalToGlobalMapping ltog;
371         IS                     isll,isll_local;
372         const PetscInt         *idx_local;
373         PetscInt               *idx1, *idx2, nout;
374 
375         PetscCall(ISGetLocalSize(osm->is_local[i],&m_local));
376         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));
377 
378         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog));
379         PetscCall(PetscMalloc1(m_local,&idx1));
380         PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1));
381         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
382         PetscCheck(nout == m_local,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
383         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll));
384 
385         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis,&ltog));
386         PetscCall(PetscMalloc1(m_local,&idx2));
387         PetscCall(ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2));
388         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
389         PetscCheck(nout == m_local,PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
390         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local));
391 
392         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
393         PetscCall(VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]));
394 
395         PetscCall(ISDestroy(&isll));
396         PetscCall(ISDestroy(&isll_local));
397       }
398     }
399     PetscCall(VecDestroy(&vec));
400   }
401 
402   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
403     IS      *cis;
404     PetscInt c;
405 
406     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
407     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
408     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
409     PetscCall(PetscFree(cis));
410   }
411 
412   /* Return control to the user so that the submatrices can be modified (e.g., to apply
413      different boundary conditions for the submatrices than for the global problem) */
414   PetscCall(PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP));
415 
416   /*
417      Loop over subdomains putting them into local ksp
418   */
419   PetscCall(KSPGetOptionsPrefix(osm->ksp[0],&prefix));
420   for (i=0; i<osm->n_local_true; i++) {
421     PetscCall(KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]));
422     PetscCall(MatSetOptionsPrefix(osm->pmat[i],prefix));
423     if (!pc->setupcalled) {
424       PetscCall(KSPSetFromOptions(osm->ksp[i]));
425     }
426   }
427   PetscFunctionReturn(0);
428 }
429 
430 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
431 {
432   PC_ASM             *osm = (PC_ASM*)pc->data;
433   PetscInt           i;
434   KSPConvergedReason reason;
435 
436   PetscFunctionBegin;
437   for (i=0; i<osm->n_local_true; i++) {
438     PetscCall(KSPSetUp(osm->ksp[i]));
439     PetscCall(KSPGetConvergedReason(osm->ksp[i],&reason));
440     if (reason == KSP_DIVERGED_PC_FAILED) {
441       pc->failedreason = PC_SUBPC_ERROR;
442     }
443   }
444   PetscFunctionReturn(0);
445 }
446 
447 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
448 {
449   PC_ASM         *osm = (PC_ASM*)pc->data;
450   PetscInt       i,n_local_true = osm->n_local_true;
451   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
452 
453   PetscFunctionBegin;
454   /*
455      support for limiting the restriction or interpolation to only local
456      subdomain values (leaving the other values 0).
457   */
458   if (!(osm->type & PC_ASM_RESTRICT)) {
459     forward = SCATTER_FORWARD_LOCAL;
460     /* have to zero the work RHS since scatter may leave some slots empty */
461     PetscCall(VecSet(osm->lx, 0.0));
462   }
463   if (!(osm->type & PC_ASM_INTERPOLATE)) {
464     reverse = SCATTER_REVERSE_LOCAL;
465   }
466 
467   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
468     /* zero the global and the local solutions */
469     PetscCall(VecSet(y, 0.0));
470     PetscCall(VecSet(osm->ly, 0.0));
471 
472     /* copy the global RHS to local RHS including the ghost nodes */
473     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
474     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
475 
476     /* restrict local RHS to the overlapping 0-block RHS */
477     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
478     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
479 
480     /* do the local solves */
481     for (i = 0; i < n_local_true; ++i) {
482 
483       /* solve the overlapping i-block */
484       PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i],0));
485       PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
486       PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
487       PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
488 
489       if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
490         PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
491         PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
492       } else { /* interpolate the overlapping i-block solution to the local solution */
493         PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
494         PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
495       }
496 
497       if (i < n_local_true-1) {
498         /* restrict local RHS to the overlapping (i+1)-block RHS */
499         PetscCall(VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
500         PetscCall(VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
501 
502         if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
503           /* update the overlapping (i+1)-block RHS using the current local solution */
504           PetscCall(MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]));
505           PetscCall(VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]));
506         }
507       }
508     }
509     /* add the local solution to the global solution including the ghost nodes */
510     PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
511     PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
512   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
513   PetscFunctionReturn(0);
514 }
515 
516 static PetscErrorCode PCMatApply_ASM(PC pc,Mat X,Mat Y)
517 {
518   PC_ASM         *osm = (PC_ASM*)pc->data;
519   Mat            Z,W;
520   Vec            x;
521   PetscInt       i,m,N;
522   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
523 
524   PetscFunctionBegin;
525   PetscCheck(osm->n_local_true <= 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Not yet implemented");
526   /*
527      support for limiting the restriction or interpolation to only local
528      subdomain values (leaving the other values 0).
529   */
530   if (!(osm->type & PC_ASM_RESTRICT)) {
531     forward = SCATTER_FORWARD_LOCAL;
532     /* have to zero the work RHS since scatter may leave some slots empty */
533     PetscCall(VecSet(osm->lx, 0.0));
534   }
535   if (!(osm->type & PC_ASM_INTERPOLATE)) {
536     reverse = SCATTER_REVERSE_LOCAL;
537   }
538   PetscCall(VecGetLocalSize(osm->x[0], &m));
539   PetscCall(MatGetSize(X, NULL, &N));
540   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));
541   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
542     /* zero the global and the local solutions */
543     PetscCall(MatZeroEntries(Y));
544     PetscCall(VecSet(osm->ly, 0.0));
545 
546     for (i = 0; i < N; ++i) {
547       PetscCall(MatDenseGetColumnVecRead(X, i, &x));
548       /* copy the global RHS to local RHS including the ghost nodes */
549       PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
550       PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
551       PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
552 
553       PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
554       /* restrict local RHS to the overlapping 0-block RHS */
555       PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
556       PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
557       PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
558     }
559     PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
560     /* solve the overlapping 0-block */
561     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
562     PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
563     PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
564     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W,0));
565     PetscCall(MatDestroy(&Z));
566 
567     for (i = 0; i < N; ++i) {
568       PetscCall(VecSet(osm->ly, 0.0));
569       PetscCall(MatDenseGetColumnVecRead(W, i, &x));
570       if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
571         PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
572         PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
573       } else { /* interpolate the overlapping 0-block solution to the local solution */
574         PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
575         PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
576       }
577       PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
578 
579       PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
580       /* add the local solution to the global solution including the ghost nodes */
581       PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
582       PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
583       PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
584     }
585     PetscCall(MatDestroy(&W));
586   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
587   PetscFunctionReturn(0);
588 }
589 
590 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
591 {
592   PC_ASM         *osm = (PC_ASM*)pc->data;
593   PetscInt       i,n_local_true = osm->n_local_true;
594   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
595 
596   PetscFunctionBegin;
597   /*
598      Support for limiting the restriction or interpolation to only local
599      subdomain values (leaving the other values 0).
600 
601      Note: these are reversed from the PCApply_ASM() because we are applying the
602      transpose of the three terms
603   */
604 
605   if (!(osm->type & PC_ASM_INTERPOLATE)) {
606     forward = SCATTER_FORWARD_LOCAL;
607     /* have to zero the work RHS since scatter may leave some slots empty */
608     PetscCall(VecSet(osm->lx, 0.0));
609   }
610   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
611 
612   /* zero the global and the local solutions */
613   PetscCall(VecSet(y, 0.0));
614   PetscCall(VecSet(osm->ly, 0.0));
615 
616   /* Copy the global RHS to local RHS including the ghost nodes */
617   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
618   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
619 
620   /* Restrict local RHS to the overlapping 0-block RHS */
621   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
622   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
623 
624   /* do the local solves */
625   for (i = 0; i < n_local_true; ++i) {
626 
627     /* solve the overlapping i-block */
628     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0));
629     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
630     PetscCall(KSPCheckSolve(osm->ksp[i],pc,osm->y[i]));
631     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0));
632 
633     if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
634       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
635       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
636     } else { /* interpolate the overlapping i-block solution to the local solution */
637       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
638       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
639     }
640 
641     if (i < n_local_true-1) {
642       /* Restrict local RHS to the overlapping (i+1)-block RHS */
643       PetscCall(VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
644       PetscCall(VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward));
645     }
646   }
647   /* Add the local solution to the global solution including the ghost nodes */
648   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
649   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
650 
651   PetscFunctionReturn(0);
652 
653 }
654 
655 static PetscErrorCode PCReset_ASM(PC pc)
656 {
657   PC_ASM         *osm = (PC_ASM*)pc->data;
658   PetscInt       i;
659 
660   PetscFunctionBegin;
661   if (osm->ksp) {
662     for (i=0; i<osm->n_local_true; i++) {
663       PetscCall(KSPReset(osm->ksp[i]));
664     }
665   }
666   if (osm->pmat) {
667     if (osm->n_local_true > 0) {
668       PetscCall(MatDestroySubMatrices(osm->n_local_true,&osm->pmat));
669     }
670   }
671   if (osm->lrestriction) {
672     PetscCall(VecScatterDestroy(&osm->restriction));
673     for (i=0; i<osm->n_local_true; i++) {
674       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
675       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
676       PetscCall(VecDestroy(&osm->x[i]));
677       PetscCall(VecDestroy(&osm->y[i]));
678     }
679     PetscCall(PetscFree(osm->lrestriction));
680     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
681     PetscCall(PetscFree(osm->x));
682     PetscCall(PetscFree(osm->y));
683 
684   }
685   PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
686   PetscCall(ISDestroy(&osm->lis));
687   PetscCall(VecDestroy(&osm->lx));
688   PetscCall(VecDestroy(&osm->ly));
689   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
690     PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));
691   }
692 
693   PetscCall(PetscFree(osm->sub_mat_type));
694 
695   osm->is       = NULL;
696   osm->is_local = NULL;
697   PetscFunctionReturn(0);
698 }
699 
700 static PetscErrorCode PCDestroy_ASM(PC pc)
701 {
702   PC_ASM         *osm = (PC_ASM*)pc->data;
703   PetscInt       i;
704 
705   PetscFunctionBegin;
706   PetscCall(PCReset_ASM(pc));
707   if (osm->ksp) {
708     for (i=0; i<osm->n_local_true; i++) {
709       PetscCall(KSPDestroy(&osm->ksp[i]));
710     }
711     PetscCall(PetscFree(osm->ksp));
712   }
713   PetscCall(PetscFree(pc->data));
714 
715   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",NULL));
716   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",NULL));
717   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",NULL));
718   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",NULL));
719   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",NULL));
720   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",NULL));
721   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",NULL));
722   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",NULL));
723   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",NULL));
724   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",NULL));
725   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",NULL));
726   PetscFunctionReturn(0);
727 }
728 
729 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
730 {
731   PC_ASM         *osm = (PC_ASM*)pc->data;
732   PetscInt       blocks,ovl;
733   PetscBool      flg;
734   PCASMType      asmtype;
735   PCCompositeType loctype;
736   char           sub_mat_type[256];
737 
738   PetscFunctionBegin;
739   PetscOptionsHeadBegin(PetscOptionsObject,"Additive Schwarz options");
740   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg));
741   PetscCall(PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg));
742   if (flg) {
743     PetscCall(PCASMSetTotalSubdomains(pc,blocks,NULL,NULL));
744     osm->dm_subdomains = PETSC_FALSE;
745   }
746   PetscCall(PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg));
747   if (flg) {
748     PetscCall(PCASMSetLocalSubdomains(pc,blocks,NULL,NULL));
749     osm->dm_subdomains = PETSC_FALSE;
750   }
751   PetscCall(PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg));
752   if (flg) {
753     PetscCall(PCASMSetOverlap(pc,ovl));
754     osm->dm_subdomains = PETSC_FALSE;
755   }
756   flg  = PETSC_FALSE;
757   PetscCall(PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg));
758   if (flg) PetscCall(PCASMSetType(pc,asmtype));
759   flg  = PETSC_FALSE;
760   PetscCall(PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg));
761   if (flg) PetscCall(PCASMSetLocalType(pc,loctype));
762   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg));
763   if (flg) {
764     PetscCall(PCASMSetSubMatType(pc,sub_mat_type));
765   }
766   PetscOptionsHeadEnd();
767   PetscFunctionReturn(0);
768 }
769 
770 /*------------------------------------------------------------------------------------*/
771 
772 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
773 {
774   PC_ASM         *osm = (PC_ASM*)pc->data;
775   PetscInt       i;
776 
777   PetscFunctionBegin;
778   PetscCheck(n >= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %" PetscInt_FMT,n);
779   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is),PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
780 
781   if (!pc->setupcalled) {
782     if (is) {
783       for (i=0; i<n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
784     }
785     if (is_local) {
786       for (i=0; i<n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
787     }
788     PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
789 
790     osm->n_local_true = n;
791     osm->is           = NULL;
792     osm->is_local     = NULL;
793     if (is) {
794       PetscCall(PetscMalloc1(n,&osm->is));
795       for (i=0; i<n; i++) osm->is[i] = is[i];
796       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
797       osm->overlap = -1;
798     }
799     if (is_local) {
800       PetscCall(PetscMalloc1(n,&osm->is_local));
801       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
802       if (!is) {
803         PetscCall(PetscMalloc1(osm->n_local_true,&osm->is));
804         for (i=0; i<osm->n_local_true; i++) {
805           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
806             PetscCall(ISDuplicate(osm->is_local[i],&osm->is[i]));
807             PetscCall(ISCopy(osm->is_local[i],osm->is[i]));
808           } else {
809             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
810             osm->is[i] = osm->is_local[i];
811           }
812         }
813       }
814     }
815   }
816   PetscFunctionReturn(0);
817 }
818 
819 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
820 {
821   PC_ASM         *osm = (PC_ASM*)pc->data;
822   PetscMPIInt    rank,size;
823   PetscInt       n;
824 
825   PetscFunctionBegin;
826   PetscCheck(N >= 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %" PetscInt_FMT,N);
827   PetscCheck(!is && !is_local,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
828 
829   /*
830      Split the subdomains equally among all processors
831   */
832   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank));
833   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size));
834   n    = N/size + ((N % size) > rank);
835   PetscCheck(n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT,(int)rank,(int)size,N);
836   PetscCheck(!pc->setupcalled || n == osm->n_local_true,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
837   if (!pc->setupcalled) {
838     PetscCall(PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local));
839 
840     osm->n_local_true = n;
841     osm->is           = NULL;
842     osm->is_local     = NULL;
843   }
844   PetscFunctionReturn(0);
845 }
846 
847 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
848 {
849   PC_ASM *osm = (PC_ASM*)pc->data;
850 
851   PetscFunctionBegin;
852   PetscCheck(ovl >= 0,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
853   PetscCheck(!pc->setupcalled || ovl == osm->overlap,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
854   if (!pc->setupcalled) osm->overlap = ovl;
855   PetscFunctionReturn(0);
856 }
857 
858 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
859 {
860   PC_ASM *osm = (PC_ASM*)pc->data;
861 
862   PetscFunctionBegin;
863   osm->type     = type;
864   osm->type_set = PETSC_TRUE;
865   PetscFunctionReturn(0);
866 }
867 
868 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
869 {
870   PC_ASM *osm = (PC_ASM*)pc->data;
871 
872   PetscFunctionBegin;
873   *type = osm->type;
874   PetscFunctionReturn(0);
875 }
876 
877 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
878 {
879   PC_ASM *osm = (PC_ASM *) pc->data;
880 
881   PetscFunctionBegin;
882   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
883   osm->loctype = type;
884   PetscFunctionReturn(0);
885 }
886 
887 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
888 {
889   PC_ASM *osm = (PC_ASM *) pc->data;
890 
891   PetscFunctionBegin;
892   *type = osm->loctype;
893   PetscFunctionReturn(0);
894 }
895 
896 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
897 {
898   PC_ASM *osm = (PC_ASM*)pc->data;
899 
900   PetscFunctionBegin;
901   osm->sort_indices = doSort;
902   PetscFunctionReturn(0);
903 }
904 
905 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
906 {
907   PC_ASM         *osm = (PC_ASM*)pc->data;
908 
909   PetscFunctionBegin;
910   PetscCheck(osm->n_local_true >= 1,PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
911 
912   if (n_local) *n_local = osm->n_local_true;
913   if (first_local) {
914     PetscCallMPI(MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc)));
915     *first_local -= osm->n_local_true;
916   }
917   if (ksp) *ksp   = osm->ksp;
918   PetscFunctionReturn(0);
919 }
920 
921 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
922 {
923   PC_ASM         *osm = (PC_ASM*)pc->data;
924 
925   PetscFunctionBegin;
926   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
927   PetscValidPointer(sub_mat_type,2);
928   *sub_mat_type = osm->sub_mat_type;
929   PetscFunctionReturn(0);
930 }
931 
932 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
933 {
934   PC_ASM            *osm = (PC_ASM*)pc->data;
935 
936   PetscFunctionBegin;
937   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
938   PetscCall(PetscFree(osm->sub_mat_type));
939   PetscCall(PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type));
940   PetscFunctionReturn(0);
941 }
942 
943 /*@C
944     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
945 
946     Collective on pc
947 
948     Input Parameters:
949 +   pc - the preconditioner context
950 .   n - the number of subdomains for this processor (default value = 1)
951 .   is - the index set that defines the subdomains for this processor
952          (or NULL for PETSc to determine subdomains)
953 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
954          (or NULL to not provide these)
955 
956     Options Database Key:
957     To set the total number of subdomain blocks rather than specify the
958     index sets, use the option
959 .    -pc_asm_local_blocks <blks> - Sets local blocks
960 
961     Notes:
962     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
963 
964     By default the ASM preconditioner uses 1 block per processor.
965 
966     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
967 
968     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
969     back to form the global solution (this is the standard restricted additive Schwarz method)
970 
971     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
972     no code to handle that case.
973 
974     Level: advanced
975 
976 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
977           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
978 @*/
979 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
980 {
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
983   PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));
984   PetscFunctionReturn(0);
985 }
986 
987 /*@C
988     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
989     additive Schwarz preconditioner.  Either all or no processors in the
990     PC communicator must call this routine, with the same index sets.
991 
992     Collective on pc
993 
994     Input Parameters:
995 +   pc - the preconditioner context
996 .   N  - the number of subdomains for all processors
997 .   is - the index sets that define the subdomains for all processors
998          (or NULL to ask PETSc to determine the subdomains)
999 -   is_local - the index sets that define the local part of the subdomains for this processor
1000          (or NULL to not provide this information)
1001 
1002     Options Database Key:
1003     To set the total number of subdomain blocks rather than specify the
1004     index sets, use the option
1005 .    -pc_asm_blocks <blks> - Sets total blocks
1006 
1007     Notes:
1008     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
1009 
1010     By default the ASM preconditioner uses 1 block per processor.
1011 
1012     These index sets cannot be destroyed until after completion of the
1013     linear solves for which the ASM preconditioner is being used.
1014 
1015     Use PCASMSetLocalSubdomains() to set local subdomains.
1016 
1017     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
1018 
1019     Level: advanced
1020 
1021 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1022           PCASMCreateSubdomains2D()
1023 @*/
1024 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
1025 {
1026   PetscFunctionBegin;
1027   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1028   PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 /*@
1033     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1034     additive Schwarz preconditioner.  Either all or no processors in the
1035     PC communicator must call this routine.
1036 
1037     Logically Collective on pc
1038 
1039     Input Parameters:
1040 +   pc  - the preconditioner context
1041 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1042 
1043     Options Database Key:
1044 .   -pc_asm_overlap <ovl> - Sets overlap
1045 
1046     Notes:
1047     By default the ASM preconditioner uses 1 block per processor.  To use
1048     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1049     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1050 
1051     The overlap defaults to 1, so if one desires that no additional
1052     overlap be computed beyond what may have been set with a call to
1053     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1054     must be set to be 0.  In particular, if one does not explicitly set
1055     the subdomains an application code, then all overlap would be computed
1056     internally by PETSc, and using an overlap of 0 would result in an ASM
1057     variant that is equivalent to the block Jacobi preconditioner.
1058 
1059     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1060     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1061 
1062     Note that one can define initial index sets with any overlap via
1063     PCASMSetLocalSubdomains(); the routine
1064     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1065     if desired.
1066 
1067     Level: intermediate
1068 
1069 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1070           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1071 @*/
1072 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1073 {
1074   PetscFunctionBegin;
1075   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1076   PetscValidLogicalCollectiveInt(pc,ovl,2);
1077   PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
1078   PetscFunctionReturn(0);
1079 }
1080 
1081 /*@
1082     PCASMSetType - Sets the type of restriction and interpolation used
1083     for local problems in the additive Schwarz method.
1084 
1085     Logically Collective on pc
1086 
1087     Input Parameters:
1088 +   pc  - the preconditioner context
1089 -   type - variant of ASM, one of
1090 .vb
1091       PC_ASM_BASIC       - full interpolation and restriction
1092       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1093       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1094       PC_ASM_NONE        - local processor restriction and interpolation
1095 .ve
1096 
1097     Options Database Key:
1098 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1099 
1100     Notes:
1101     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1102     to limit the local processor interpolation
1103 
1104     Level: intermediate
1105 
1106 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1107           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1108 @*/
1109 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1110 {
1111   PetscFunctionBegin;
1112   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1113   PetscValidLogicalCollectiveEnum(pc,type,2);
1114   PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));
1115   PetscFunctionReturn(0);
1116 }
1117 
1118 /*@
1119     PCASMGetType - Gets the type of restriction and interpolation used
1120     for local problems in the additive Schwarz method.
1121 
1122     Logically Collective on pc
1123 
1124     Input Parameter:
1125 .   pc  - the preconditioner context
1126 
1127     Output Parameter:
1128 .   type - variant of ASM, one of
1129 
1130 .vb
1131       PC_ASM_BASIC       - full interpolation and restriction
1132       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1133       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1134       PC_ASM_NONE        - local processor restriction and interpolation
1135 .ve
1136 
1137     Options Database Key:
1138 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1139 
1140     Level: intermediate
1141 
1142 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1143           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1144 @*/
1145 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1146 {
1147   PetscFunctionBegin;
1148   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1149   PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));
1150   PetscFunctionReturn(0);
1151 }
1152 
1153 /*@
1154   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1155 
1156   Logically Collective on pc
1157 
1158   Input Parameters:
1159 + pc  - the preconditioner context
1160 - type - type of composition, one of
1161 .vb
1162   PC_COMPOSITE_ADDITIVE       - local additive combination
1163   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1164 .ve
1165 
1166   Options Database Key:
1167 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1168 
1169   Level: intermediate
1170 
1171 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1172 @*/
1173 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1174 {
1175   PetscFunctionBegin;
1176   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1177   PetscValidLogicalCollectiveEnum(pc, type, 2);
1178   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1179   PetscFunctionReturn(0);
1180 }
1181 
1182 /*@
1183   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1184 
1185   Logically Collective on pc
1186 
1187   Input Parameter:
1188 . pc  - the preconditioner context
1189 
1190   Output Parameter:
1191 . type - type of composition, one of
1192 .vb
1193   PC_COMPOSITE_ADDITIVE       - local additive combination
1194   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1195 .ve
1196 
1197   Options Database Key:
1198 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1199 
1200   Level: intermediate
1201 
1202 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1203 @*/
1204 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1205 {
1206   PetscFunctionBegin;
1207   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1208   PetscValidPointer(type, 2);
1209   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1210   PetscFunctionReturn(0);
1211 }
1212 
1213 /*@
1214     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1215 
1216     Logically Collective on pc
1217 
1218     Input Parameters:
1219 +   pc  - the preconditioner context
1220 -   doSort - sort the subdomain indices
1221 
1222     Level: intermediate
1223 
1224 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1225           PCASMCreateSubdomains2D()
1226 @*/
1227 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1228 {
1229   PetscFunctionBegin;
1230   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1231   PetscValidLogicalCollectiveBool(pc,doSort,2);
1232   PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
1233   PetscFunctionReturn(0);
1234 }
1235 
1236 /*@C
1237    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1238    this processor.
1239 
1240    Collective on pc iff first_local is requested
1241 
1242    Input Parameter:
1243 .  pc - the preconditioner context
1244 
1245    Output Parameters:
1246 +  n_local - the number of blocks on this processor or NULL
1247 .  first_local - the global number of the first block on this processor or NULL,
1248                  all processors must request or all must pass NULL
1249 -  ksp - the array of KSP contexts
1250 
1251    Note:
1252    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1253 
1254    You must call KSPSetUp() before calling PCASMGetSubKSP().
1255 
1256    Fortran note:
1257    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1258 
1259    Level: advanced
1260 
1261 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1262           PCASMCreateSubdomains2D(),
1263 @*/
1264 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1265 {
1266   PetscFunctionBegin;
1267   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1268   PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
1269   PetscFunctionReturn(0);
1270 }
1271 
1272 /* -------------------------------------------------------------------------------------*/
1273 /*MC
1274    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1275            its own KSP object.
1276 
1277    Options Database Keys:
1278 +  -pc_asm_blocks <blks> - Sets total blocks
1279 .  -pc_asm_overlap <ovl> - Sets overlap
1280 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1281 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1282 
1283      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1284       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1285       -pc_asm_type basic to use the standard ASM.
1286 
1287    Notes:
1288     Each processor can have one or more blocks, but a block cannot be shared by more
1289      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1290 
1291      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1292         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1293 
1294      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1295          and set the options directly on the resulting KSP object (you can access its PC
1296          with KSPGetPC())
1297 
1298    Level: beginner
1299 
1300     References:
1301 +   * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1302      Courant Institute, New York University Technical report
1303 -   * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1304     Cambridge University Press.
1305 
1306 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1307            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1308            PCASMSetTotalSubdomains(), PCSetModifySubMatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1309 
1310 M*/
1311 
1312 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1313 {
1314   PC_ASM         *osm;
1315 
1316   PetscFunctionBegin;
1317   PetscCall(PetscNewLog(pc,&osm));
1318 
1319   osm->n                 = PETSC_DECIDE;
1320   osm->n_local           = 0;
1321   osm->n_local_true      = PETSC_DECIDE;
1322   osm->overlap           = 1;
1323   osm->ksp               = NULL;
1324   osm->restriction       = NULL;
1325   osm->lprolongation     = NULL;
1326   osm->lrestriction      = NULL;
1327   osm->x                 = NULL;
1328   osm->y                 = NULL;
1329   osm->is                = NULL;
1330   osm->is_local          = NULL;
1331   osm->mat               = NULL;
1332   osm->pmat              = NULL;
1333   osm->type              = PC_ASM_RESTRICT;
1334   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1335   osm->sort_indices      = PETSC_TRUE;
1336   osm->dm_subdomains     = PETSC_FALSE;
1337   osm->sub_mat_type      = NULL;
1338 
1339   pc->data                 = (void*)osm;
1340   pc->ops->apply           = PCApply_ASM;
1341   pc->ops->matapply        = PCMatApply_ASM;
1342   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1343   pc->ops->setup           = PCSetUp_ASM;
1344   pc->ops->reset           = PCReset_ASM;
1345   pc->ops->destroy         = PCDestroy_ASM;
1346   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1347   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1348   pc->ops->view            = PCView_ASM;
1349   pc->ops->applyrichardson = NULL;
1350 
1351   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM));
1352   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM));
1353   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM));
1354   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM));
1355   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM));
1356   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM));
1357   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM));
1358   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM));
1359   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM));
1360   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM));
1361   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM));
1362   PetscFunctionReturn(0);
1363 }
1364 
1365 /*@C
1366    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1367    preconditioner for any problem on a general grid.
1368 
1369    Collective
1370 
1371    Input Parameters:
1372 +  A - The global matrix operator
1373 -  n - the number of local blocks
1374 
1375    Output Parameters:
1376 .  outis - the array of index sets defining the subdomains
1377 
1378    Level: advanced
1379 
1380    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1381     from these if you use PCASMSetLocalSubdomains()
1382 
1383     In the Fortran version you must provide the array outis[] already allocated of length n.
1384 
1385 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1386 @*/
1387 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1388 {
1389   MatPartitioning mpart;
1390   const char      *prefix;
1391   PetscInt        i,j,rstart,rend,bs;
1392   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1393   Mat             Ad     = NULL, adj;
1394   IS              ispart,isnumb,*is;
1395 
1396   PetscFunctionBegin;
1397   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1398   PetscValidPointer(outis,3);
1399   PetscCheck(n >= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %" PetscInt_FMT,n);
1400 
1401   /* Get prefix, row distribution, and block size */
1402   PetscCall(MatGetOptionsPrefix(A,&prefix));
1403   PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
1404   PetscCall(MatGetBlockSize(A,&bs));
1405   PetscCheck(rstart/bs*bs == rstart && rend/bs*bs == rend,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT,rstart,rend,bs);
1406 
1407   /* Get diagonal block from matrix if possible */
1408   PetscCall(MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop));
1409   if (hasop) {
1410     PetscCall(MatGetDiagonalBlock(A,&Ad));
1411   }
1412   if (Ad) {
1413     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij));
1414     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij));
1415   }
1416   if (Ad && n > 1) {
1417     PetscBool match,done;
1418     /* Try to setup a good matrix partitioning if available */
1419     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF,&mpart));
1420     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix));
1421     PetscCall(MatPartitioningSetFromOptions(mpart));
1422     PetscCall(PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match));
1423     if (!match) {
1424       PetscCall(PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match));
1425     }
1426     if (!match) { /* assume a "good" partitioner is available */
1427       PetscInt       na;
1428       const PetscInt *ia,*ja;
1429       PetscCall(MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done));
1430       if (done) {
1431         /* Build adjacency matrix by hand. Unfortunately a call to
1432            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1433            remove the block-aij structure and we cannot expect
1434            MatPartitioning to split vertices as we need */
1435         PetscInt       i,j,len,nnz,cnt,*iia=NULL,*jja=NULL;
1436         const PetscInt *row;
1437         nnz = 0;
1438         for (i=0; i<na; i++) { /* count number of nonzeros */
1439           len = ia[i+1] - ia[i];
1440           row = ja + ia[i];
1441           for (j=0; j<len; j++) {
1442             if (row[j] == i) { /* don't count diagonal */
1443               len--; break;
1444             }
1445           }
1446           nnz += len;
1447         }
1448         PetscCall(PetscMalloc1(na+1,&iia));
1449         PetscCall(PetscMalloc1(nnz,&jja));
1450         nnz    = 0;
1451         iia[0] = 0;
1452         for (i=0; i<na; i++) { /* fill adjacency */
1453           cnt = 0;
1454           len = ia[i+1] - ia[i];
1455           row = ja + ia[i];
1456           for (j=0; j<len; j++) {
1457             if (row[j] != i) { /* if not diagonal */
1458               jja[nnz+cnt++] = row[j];
1459             }
1460           }
1461           nnz     += cnt;
1462           iia[i+1] = nnz;
1463         }
1464         /* Partitioning of the adjacency matrix */
1465         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj));
1466         PetscCall(MatPartitioningSetAdjacency(mpart,adj));
1467         PetscCall(MatPartitioningSetNParts(mpart,n));
1468         PetscCall(MatPartitioningApply(mpart,&ispart));
1469         PetscCall(ISPartitioningToNumbering(ispart,&isnumb));
1470         PetscCall(MatDestroy(&adj));
1471         foundpart = PETSC_TRUE;
1472       }
1473       PetscCall(MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done));
1474     }
1475     PetscCall(MatPartitioningDestroy(&mpart));
1476   }
1477 
1478   PetscCall(PetscMalloc1(n,&is));
1479   *outis = is;
1480 
1481   if (!foundpart) {
1482 
1483     /* Partitioning by contiguous chunks of rows */
1484 
1485     PetscInt mbs   = (rend-rstart)/bs;
1486     PetscInt start = rstart;
1487     for (i=0; i<n; i++) {
1488       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1489       PetscCall(ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]));
1490       start += count;
1491     }
1492 
1493   } else {
1494 
1495     /* Partitioning by adjacency of diagonal block  */
1496 
1497     const PetscInt *numbering;
1498     PetscInt       *count,nidx,*indices,*newidx,start=0;
1499     /* Get node count in each partition */
1500     PetscCall(PetscMalloc1(n,&count));
1501     PetscCall(ISPartitioningCount(ispart,n,count));
1502     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1503       for (i=0; i<n; i++) count[i] *= bs;
1504     }
1505     /* Build indices from node numbering */
1506     PetscCall(ISGetLocalSize(isnumb,&nidx));
1507     PetscCall(PetscMalloc1(nidx,&indices));
1508     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1509     PetscCall(ISGetIndices(isnumb,&numbering));
1510     PetscCall(PetscSortIntWithPermutation(nidx,numbering,indices));
1511     PetscCall(ISRestoreIndices(isnumb,&numbering));
1512     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1513       PetscCall(PetscMalloc1(nidx*bs,&newidx));
1514       for (i=0; i<nidx; i++) {
1515         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1516       }
1517       PetscCall(PetscFree(indices));
1518       nidx   *= bs;
1519       indices = newidx;
1520     }
1521     /* Shift to get global indices */
1522     for (i=0; i<nidx; i++) indices[i] += rstart;
1523 
1524     /* Build the index sets for each block */
1525     for (i=0; i<n; i++) {
1526       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]));
1527       PetscCall(ISSort(is[i]));
1528       start += count[i];
1529     }
1530 
1531     PetscCall(PetscFree(count));
1532     PetscCall(PetscFree(indices));
1533     PetscCall(ISDestroy(&isnumb));
1534     PetscCall(ISDestroy(&ispart));
1535 
1536   }
1537   PetscFunctionReturn(0);
1538 }
1539 
1540 /*@C
1541    PCASMDestroySubdomains - Destroys the index sets created with
1542    PCASMCreateSubdomains(). Should be called after setting subdomains
1543    with PCASMSetLocalSubdomains().
1544 
1545    Collective
1546 
1547    Input Parameters:
1548 +  n - the number of index sets
1549 .  is - the array of index sets
1550 -  is_local - the array of local index sets, can be NULL
1551 
1552    Level: advanced
1553 
1554 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1555 @*/
1556 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1557 {
1558   PetscInt       i;
1559 
1560   PetscFunctionBegin;
1561   if (n <= 0) PetscFunctionReturn(0);
1562   if (is) {
1563     PetscValidPointer(is,2);
1564     for (i=0; i<n; i++) PetscCall(ISDestroy(&is[i]));
1565     PetscCall(PetscFree(is));
1566   }
1567   if (is_local) {
1568     PetscValidPointer(is_local,3);
1569     for (i=0; i<n; i++) PetscCall(ISDestroy(&is_local[i]));
1570     PetscCall(PetscFree(is_local));
1571   }
1572   PetscFunctionReturn(0);
1573 }
1574 
1575 /*@
1576    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1577    preconditioner for a two-dimensional problem on a regular grid.
1578 
1579    Not Collective
1580 
1581    Input Parameters:
1582 +  m   - the number of mesh points in the x direction
1583 .  n   - the number of mesh points in the y direction
1584 .  M   - the number of subdomains in the x direction
1585 .  N   - the number of subdomains in the y direction
1586 .  dof - degrees of freedom per node
1587 -  overlap - overlap in mesh lines
1588 
1589    Output Parameters:
1590 +  Nsub - the number of subdomains created
1591 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1592 -  is_local - array of index sets defining non-overlapping subdomains
1593 
1594    Note:
1595    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1596    preconditioners.  More general related routines are
1597    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1598 
1599    Level: advanced
1600 
1601 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1602           PCASMSetOverlap()
1603 @*/
1604 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1605 {
1606   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1607   PetscInt       nidx,*idx,loc,ii,jj,count;
1608 
1609   PetscFunctionBegin;
1610   PetscCheck(dof == 1,PETSC_COMM_SELF,PETSC_ERR_SUP,"dof must be 1");
1611 
1612   *Nsub     = N*M;
1613   PetscCall(PetscMalloc1(*Nsub,is));
1614   PetscCall(PetscMalloc1(*Nsub,is_local));
1615   ystart    = 0;
1616   loc_outer = 0;
1617   for (i=0; i<N; i++) {
1618     height = n/N + ((n % N) > i); /* height of subdomain */
1619     PetscCheck(height >= 2,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1620     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1621     yright = ystart + height + overlap; if (yright > n) yright = n;
1622     xstart = 0;
1623     for (j=0; j<M; j++) {
1624       width = m/M + ((m % M) > j); /* width of subdomain */
1625       PetscCheck(width >= 2,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1626       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1627       xright = xstart + width + overlap; if (xright > m) xright = m;
1628       nidx   = (xright - xleft)*(yright - yleft);
1629       PetscCall(PetscMalloc1(nidx,&idx));
1630       loc    = 0;
1631       for (ii=yleft; ii<yright; ii++) {
1632         count = m*ii + xleft;
1633         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1634       }
1635       PetscCall(ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer));
1636       if (overlap == 0) {
1637         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));
1638 
1639         (*is_local)[loc_outer] = (*is)[loc_outer];
1640       } else {
1641         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1642           for (jj=xstart; jj<xstart+width; jj++) {
1643             idx[loc++] = m*ii + jj;
1644           }
1645         }
1646         PetscCall(ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer));
1647       }
1648       PetscCall(PetscFree(idx));
1649       xstart += width;
1650       loc_outer++;
1651     }
1652     ystart += height;
1653   }
1654   for (i=0; i<*Nsub; i++) PetscCall(ISSort((*is)[i]));
1655   PetscFunctionReturn(0);
1656 }
1657 
1658 /*@C
1659     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1660     only) for the additive Schwarz preconditioner.
1661 
1662     Not Collective
1663 
1664     Input Parameter:
1665 .   pc - the preconditioner context
1666 
1667     Output Parameters:
1668 +   n - if requested, the number of subdomains for this processor (default value = 1)
1669 .   is - if requested, the index sets that define the subdomains for this processor
1670 -   is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be NULL)
1671 
1672     Notes:
1673     The IS numbering is in the parallel, global numbering of the vector.
1674 
1675     Level: advanced
1676 
1677 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1678           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1679 @*/
1680 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1681 {
1682   PC_ASM         *osm = (PC_ASM*)pc->data;
1683   PetscBool      match;
1684 
1685   PetscFunctionBegin;
1686   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1687   if (n) PetscValidIntPointer(n,2);
1688   if (is) PetscValidPointer(is,3);
1689   if (is_local) PetscValidPointer(is_local,4);
1690   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1691   PetscCheck(match,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1692   if (n) *n = osm->n_local_true;
1693   if (is) *is = osm->is;
1694   if (is_local) *is_local = osm->is_local;
1695   PetscFunctionReturn(0);
1696 }
1697 
1698 /*@C
1699     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1700     only) for the additive Schwarz preconditioner.
1701 
1702     Not Collective
1703 
1704     Input Parameter:
1705 .   pc - the preconditioner context
1706 
1707     Output Parameters:
1708 +   n - if requested, the number of matrices for this processor (default value = 1)
1709 -   mat - if requested, the matrices
1710 
1711     Level: advanced
1712 
1713     Notes:
1714     Call after PCSetUp() (or KSPSetUp()) but before PCApply() and before PCSetUpOnBlocks())
1715 
1716            Usually one would use PCSetModifySubMatrices() to change the submatrices in building the preconditioner.
1717 
1718 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1719           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubMatrices()
1720 @*/
1721 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1722 {
1723   PC_ASM         *osm;
1724   PetscBool      match;
1725 
1726   PetscFunctionBegin;
1727   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1728   if (n) PetscValidIntPointer(n,2);
1729   if (mat) PetscValidPointer(mat,3);
1730   PetscCheck(pc->setupcalled,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUp() or PCSetUp().");
1731   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1732   if (!match) {
1733     if (n) *n = 0;
1734     if (mat) *mat = NULL;
1735   } else {
1736     osm = (PC_ASM*)pc->data;
1737     if (n) *n = osm->n_local_true;
1738     if (mat) *mat = osm->pmat;
1739   }
1740   PetscFunctionReturn(0);
1741 }
1742 
1743 /*@
1744     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1745 
1746     Logically Collective
1747 
1748     Input Parameters:
1749 +   pc  - the preconditioner
1750 -   flg - boolean indicating whether to use subdomains defined by the DM
1751 
1752     Options Database Key:
1753 .   -pc_asm_dm_subdomains <bool> - use subdomains defined by the DM
1754 
1755     Level: intermediate
1756 
1757     Notes:
1758     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1759     so setting either of the first two effectively turns the latter off.
1760 
1761 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1762           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1763 @*/
1764 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1765 {
1766   PC_ASM         *osm = (PC_ASM*)pc->data;
1767   PetscBool      match;
1768 
1769   PetscFunctionBegin;
1770   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1771   PetscValidLogicalCollectiveBool(pc,flg,2);
1772   PetscCheck(!pc->setupcalled,((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1773   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1774   if (match) {
1775     osm->dm_subdomains = flg;
1776   }
1777   PetscFunctionReturn(0);
1778 }
1779 
1780 /*@
1781     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1782     Not Collective
1783 
1784     Input Parameter:
1785 .   pc  - the preconditioner
1786 
1787     Output Parameter:
1788 .   flg - boolean indicating whether to use subdomains defined by the DM
1789 
1790     Level: intermediate
1791 
1792 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1793           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1794 @*/
1795 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1796 {
1797   PC_ASM         *osm = (PC_ASM*)pc->data;
1798   PetscBool      match;
1799 
1800   PetscFunctionBegin;
1801   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1802   PetscValidBoolPointer(flg,2);
1803   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCASM,&match));
1804   if (match) *flg = osm->dm_subdomains;
1805   else *flg = PETSC_FALSE;
1806   PetscFunctionReturn(0);
1807 }
1808 
1809 /*@
1810      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1811 
1812    Not Collective
1813 
1814    Input Parameter:
1815 .  pc - the PC
1816 
1817    Output Parameter:
1818 .  -pc_asm_sub_mat_type - name of matrix type
1819 
1820    Level: advanced
1821 
1822 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1823 @*/
1824 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type)
1825 {
1826   PetscFunctionBegin;
1827   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1828   PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));
1829   PetscFunctionReturn(0);
1830 }
1831 
1832 /*@
1833      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1834 
1835    Collective on Mat
1836 
1837    Input Parameters:
1838 +  pc             - the PC object
1839 -  sub_mat_type   - matrix type
1840 
1841    Options Database Key:
1842 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1843 
1844    Notes:
1845    See "${PETSC_DIR}/include/petscmat.h" for available types
1846 
1847   Level: advanced
1848 
1849 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1850 @*/
1851 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1852 {
1853   PetscFunctionBegin;
1854   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1855   PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));
1856   PetscFunctionReturn(0);
1857 }
1858