xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 2b8d69ca7ea5fe9190df62c1dce3bbd66fce84dd)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   /* For multiplicative solve */
34   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
35   Vec        lx, ly;              /* work vectors */
36   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "PCView_ASM"
41 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
42 {
43   PC_ASM         *osm = (PC_ASM*)pc->data;
44   PetscErrorCode ierr;
45   PetscMPIInt    rank;
46   PetscInt       i,bsz;
47   PetscBool      iascii,isstring;
48   PetscViewer    sviewer;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
53   if (iascii) {
54     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
55     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
56     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
57     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
58     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
59     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
60     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
61     if (osm->same_local_solves) {
62       if (osm->ksp) {
63         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
64         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
65         if (!rank) {
66           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
68           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
69         }
70         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71       }
72     } else {
73       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
75       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
80       for (i=0; i<osm->n_local_true; i++) {
81         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
82         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
83         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
85       }
86       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
87       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
90     }
91   } else if (isstring) {
92     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
93     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
94     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
95     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96   }
97   PetscFunctionReturn(0);
98 }
99 
100 #undef __FUNCT__
101 #define __FUNCT__ "PCASMPrintSubdomains"
102 static PetscErrorCode PCASMPrintSubdomains(PC pc)
103 {
104   PC_ASM         *osm = (PC_ASM*)pc->data;
105   const char     *prefix;
106   char           fname[PETSC_MAX_PATH_LEN+1];
107   PetscViewer    viewer, sviewer;
108   char           *s;
109   PetscInt       i,j,nidx;
110   const PetscInt *idx;
111   PetscMPIInt    rank, size;
112   PetscErrorCode ierr;
113 
114   PetscFunctionBegin;
115   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
116   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
117   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
118   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
119   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
120   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
121   for (i=0; i<osm->n_local; i++) {
122     if (i < osm->n_local_true) {
123       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
124       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
125       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
126       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
127       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
128       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
129       for (j=0; j<nidx; j++) {
130         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
131       }
132       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
133       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
134       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
137       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
138       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
139       ierr = PetscFree(s);CHKERRQ(ierr);
140       if (osm->is_local) {
141         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
142         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
143         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
144         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
145         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
146         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
147         for (j=0; j<nidx; j++) {
148           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
149         }
150         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
151         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
152         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
153         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
155         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
156         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
157         ierr = PetscFree(s);CHKERRQ(ierr);
158       }
159     } else {
160       /* Participate in collective viewer calls. */
161       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
162       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
163       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
164       /* Assume either all ranks have is_local or none do. */
165       if (osm->is_local) {
166         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
167         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
168         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
169       }
170     }
171   }
172   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
173   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
174   PetscFunctionReturn(0);
175 }
176 
177 #undef __FUNCT__
178 #define __FUNCT__ "PCSetUp_ASM"
179 static PetscErrorCode PCSetUp_ASM(PC pc)
180 {
181   PC_ASM         *osm = (PC_ASM*)pc->data;
182   PetscErrorCode ierr;
183   PetscBool      symset,flg;
184   PetscInt       i,m,m_local;
185   MatReuse       scall = MAT_REUSE_MATRIX;
186   IS             isl;
187   KSP            ksp;
188   PC             subpc;
189   const char     *prefix,*pprefix;
190   Vec            vec;
191   DM             *domain_dm = NULL;
192 
193   PetscFunctionBegin;
194   if (!pc->setupcalled) {
195 
196     if (!osm->type_set) {
197       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
198       if (symset && flg) osm->type = PC_ASM_BASIC;
199     }
200 
201     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
202     if (osm->n_local_true == PETSC_DECIDE) {
203       /* no subdomains given */
204       /* try pc->dm first, if allowed */
205       if (osm->dm_subdomains && pc->dm) {
206         PetscInt  num_domains, d;
207         char      **domain_names;
208         IS        *inner_domain_is, *outer_domain_is;
209         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
210         if (num_domains) {
211           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
212         }
213         for (d = 0; d < num_domains; ++d) {
214           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
215           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
216           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
217         }
218         ierr = PetscFree(domain_names);CHKERRQ(ierr);
219         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
220         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
221       }
222       if (osm->n_local_true == PETSC_DECIDE) {
223         /* still no subdomains; use one subdomain per processor */
224         osm->n_local_true = 1;
225       }
226     }
227     { /* determine the global and max number of subdomains */
228       struct {PetscInt max,sum;} inwork,outwork;
229       inwork.max   = osm->n_local_true;
230       inwork.sum   = osm->n_local_true;
231       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
232       osm->n_local = outwork.max;
233       osm->n       = outwork.sum;
234     }
235     if (!osm->is) { /* create the index sets */
236       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
237     }
238     if (osm->n_local_true > 1 && !osm->is_local) {
239       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
240       for (i=0; i<osm->n_local_true; i++) {
241         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
242           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
243           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
244         } else {
245           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
246           osm->is_local[i] = osm->is[i];
247         }
248       }
249     }
250     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
251     flg  = PETSC_FALSE;
252     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
253     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
254 
255     if (osm->overlap > 0) {
256       /* Extend the "overlapping" regions by a number of steps */
257       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
258     }
259     if (osm->sort_indices) {
260       for (i=0; i<osm->n_local_true; i++) {
261         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
262         if (osm->is_local) {
263           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
264         }
265       }
266     }
267     /* Create the local work vectors and scatter contexts */
268     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
269     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
270     if (osm->is_local) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
271     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
272     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
273     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
274     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
275     for (i=0; i<osm->n_local_true; ++i) {
276       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
277       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
278       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
279       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
280       ierr = ISDestroy(&isl);CHKERRQ(ierr);
281       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
282       if (osm->is_local) {
283         ISLocalToGlobalMapping ltog;
284         IS                     isll;
285         const PetscInt         *idx_local;
286         PetscInt               *idx,nout;
287 
288         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
289         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
290         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
291         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
292         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
293         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
294         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
295         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
296         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
297         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
298         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
299         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
300         ierr = ISDestroy(&isll);CHKERRQ(ierr);
301 
302         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
303         ierr = ISDestroy(&isl);CHKERRQ(ierr);
304       } else {
305         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
306 
307         osm->y_local[i] = osm->y[i];
308 
309         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
310 
311         osm->prolongation[i] = osm->restriction[i];
312 
313         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
314       }
315     }
316     for (i=osm->n_local_true; i<osm->n_local; i++) {
317       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
318       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
319       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
320       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
321       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
322       if (osm->is_local) {
323         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
324         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
325       } else {
326         osm->prolongation[i] = osm->restriction[i];
327         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
328       }
329       ierr = ISDestroy(&isl);CHKERRQ(ierr);
330     }
331     ierr = VecDestroy(&vec);CHKERRQ(ierr);
332 
333     if (!osm->ksp) {
334       /* Create the local solvers */
335       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
336       if (domain_dm) {
337         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
338       }
339       for (i=0; i<osm->n_local_true; i++) {
340         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
341         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
342         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
343         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
344         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
345         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
346         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
347         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
348         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
349         if (domain_dm) {
350           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
351           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
352           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
353         }
354         osm->ksp[i] = ksp;
355       }
356       if (domain_dm) {
357         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
358       }
359     }
360     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
361       PetscInt m;
362 
363       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
364       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
365       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
366       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
367       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
368     }
369     scall = MAT_INITIAL_MATRIX;
370   } else {
371     /*
372        Destroy the blocks from the previous iteration
373     */
374     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
375       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
376       scall = MAT_INITIAL_MATRIX;
377     }
378   }
379 
380   /*
381      Extract out the submatrices
382   */
383   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
384   if (scall == MAT_INITIAL_MATRIX) {
385     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
386     for (i=0; i<osm->n_local_true; i++) {
387       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
388       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
389     }
390   }
391   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
392     IS      *cis;
393     PetscInt c;
394 
395     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
396     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
397     ierr = MatGetSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
398     ierr = PetscFree(cis);CHKERRQ(ierr);
399   }
400 
401   /* Return control to the user so that the submatrices can be modified (e.g., to apply
402      different boundary conditions for the submatrices than for the global problem) */
403   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
404 
405   /*
406      Loop over subdomains putting them into local ksp
407   */
408   for (i=0; i<osm->n_local_true; i++) {
409     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
410     if (!pc->setupcalled) {
411       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
412     }
413   }
414   PetscFunctionReturn(0);
415 }
416 
417 #include <petsc/private/kspimpl.h>
418 #undef __FUNCT__
419 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
420 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
421 {
422   PC_ASM         *osm = (PC_ASM*)pc->data;
423   PetscErrorCode ierr;
424   PetscInt       i;
425 
426   PetscFunctionBegin;
427   for (i=0; i<osm->n_local_true; i++) {
428     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
429     if (osm->ksp[i]->reason == KSP_DIVERGED_PCSETUP_FAILED) {
430       pc->failedreason = PC_SUBPC_ERROR;
431     }
432   }
433   PetscFunctionReturn(0);
434 }
435 
436 #undef __FUNCT__
437 #define __FUNCT__ "PCApply_ASM"
438 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
439 {
440   PC_ASM         *osm = (PC_ASM*)pc->data;
441   PetscErrorCode ierr;
442   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
443   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
444 
445   PetscFunctionBegin;
446   /*
447      Support for limiting the restriction or interpolation to only local
448      subdomain values (leaving the other values 0).
449   */
450   if (!(osm->type & PC_ASM_RESTRICT)) {
451     forward = SCATTER_FORWARD_LOCAL;
452     /* have to zero the work RHS since scatter may leave some slots empty */
453     for (i=0; i<n_local_true; i++) {
454       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
455     }
456   }
457   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
458 
459   switch (osm->loctype)
460   {
461   case PC_COMPOSITE_ADDITIVE:
462     for (i=0; i<n_local; i++) {
463       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
464     }
465     ierr = VecZeroEntries(y);CHKERRQ(ierr);
466     /* do the local solves */
467     for (i=0; i<n_local_true; i++) {
468       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
469       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
470       if (osm->localization) {
471         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
472         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
473       }
474       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
475     }
476     /* handle the rest of the scatters that do not have local solves */
477     for (i=n_local_true; i<n_local; i++) {
478       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
479       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
480     }
481     for (i=0; i<n_local; i++) {
482       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
483     }
484     break;
485   case PC_COMPOSITE_MULTIPLICATIVE:
486     ierr = VecZeroEntries(y);CHKERRQ(ierr);
487     /* do the local solves */
488     for (i = 0; i < n_local_true; ++i) {
489       if (i > 0) {
490         /* Update rhs */
491         ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
492         ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
493       } else {
494         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
495       }
496       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
497       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
498       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
499       if (osm->localization) {
500         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
501         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
502       }
503       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
504       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
505       if (i < n_local_true-1) {
506         ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
507         ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
508         ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
509         ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr);
510         ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
511         ierr = VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
512         ierr = VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
513       }
514     }
515     /* handle the rest of the scatters that do not have local solves */
516     for (i = n_local_true; i < n_local; ++i) {
517       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
518       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
519       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
520       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
521     }
522     break;
523   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
524   }
525   PetscFunctionReturn(0);
526 }
527 
528 #undef __FUNCT__
529 #define __FUNCT__ "PCApplyTranspose_ASM"
530 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
531 {
532   PC_ASM         *osm = (PC_ASM*)pc->data;
533   PetscErrorCode ierr;
534   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
535   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
536 
537   PetscFunctionBegin;
538   /*
539      Support for limiting the restriction or interpolation to only local
540      subdomain values (leaving the other values 0).
541 
542      Note: these are reversed from the PCApply_ASM() because we are applying the
543      transpose of the three terms
544   */
545   if (!(osm->type & PC_ASM_INTERPOLATE)) {
546     forward = SCATTER_FORWARD_LOCAL;
547     /* have to zero the work RHS since scatter may leave some slots empty */
548     for (i=0; i<n_local_true; i++) {
549       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
550     }
551   }
552   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
553 
554   for (i=0; i<n_local; i++) {
555     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
556   }
557   ierr = VecZeroEntries(y);CHKERRQ(ierr);
558   /* do the local solves */
559   for (i=0; i<n_local_true; i++) {
560     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
561     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
562     if (osm->localization) {
563       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
564       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
565     }
566     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
567   }
568   /* handle the rest of the scatters that do not have local solves */
569   for (i=n_local_true; i<n_local; i++) {
570     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
571     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
572   }
573   for (i=0; i<n_local; i++) {
574     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
575   }
576   PetscFunctionReturn(0);
577 }
578 
579 #undef __FUNCT__
580 #define __FUNCT__ "PCReset_ASM"
581 static PetscErrorCode PCReset_ASM(PC pc)
582 {
583   PC_ASM         *osm = (PC_ASM*)pc->data;
584   PetscErrorCode ierr;
585   PetscInt       i;
586 
587   PetscFunctionBegin;
588   if (osm->ksp) {
589     for (i=0; i<osm->n_local_true; i++) {
590       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
591     }
592   }
593   if (osm->pmat) {
594     if (osm->n_local_true > 0) {
595       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
596     }
597   }
598   if (osm->restriction) {
599     for (i=0; i<osm->n_local; i++) {
600       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
601       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
602       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
603       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
604       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
605       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
606     }
607     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
608     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
609     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
610     ierr = PetscFree(osm->x);CHKERRQ(ierr);
611     ierr = PetscFree(osm->y);CHKERRQ(ierr);
612     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
613   }
614   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
615   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
616     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
617     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
618     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
619     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
620   }
621 
622   osm->is       = 0;
623   osm->is_local = 0;
624   PetscFunctionReturn(0);
625 }
626 
627 #undef __FUNCT__
628 #define __FUNCT__ "PCDestroy_ASM"
629 static PetscErrorCode PCDestroy_ASM(PC pc)
630 {
631   PC_ASM         *osm = (PC_ASM*)pc->data;
632   PetscErrorCode ierr;
633   PetscInt       i;
634 
635   PetscFunctionBegin;
636   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
637   if (osm->ksp) {
638     for (i=0; i<osm->n_local_true; i++) {
639       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
640     }
641     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
642   }
643   ierr = PetscFree(pc->data);CHKERRQ(ierr);
644   PetscFunctionReturn(0);
645 }
646 
647 #undef __FUNCT__
648 #define __FUNCT__ "PCSetFromOptions_ASM"
649 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
650 {
651   PC_ASM         *osm = (PC_ASM*)pc->data;
652   PetscErrorCode ierr;
653   PetscInt       blocks,ovl;
654   PetscBool      symset,flg;
655   PCASMType      asmtype;
656   PCCompositeType loctype;
657 
658   PetscFunctionBegin;
659   /* set the type to symmetric if matrix is symmetric */
660   if (!osm->type_set && pc->pmat) {
661     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
662     if (symset && flg) osm->type = PC_ASM_BASIC;
663   }
664   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
665   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
666   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
667   if (flg) {
668     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
669     osm->dm_subdomains = PETSC_FALSE;
670   }
671   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
672   if (flg) {
673     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
674     osm->dm_subdomains = PETSC_FALSE;
675   }
676   flg  = PETSC_FALSE;
677   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
678   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
679   flg  = PETSC_FALSE;
680   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
681   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
682   ierr = PetscOptionsTail();CHKERRQ(ierr);
683   PetscFunctionReturn(0);
684 }
685 
686 /*------------------------------------------------------------------------------------*/
687 
688 #undef __FUNCT__
689 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
690 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
691 {
692   PC_ASM         *osm = (PC_ASM*)pc->data;
693   PetscErrorCode ierr;
694   PetscInt       i;
695 
696   PetscFunctionBegin;
697   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
698   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
699 
700   if (!pc->setupcalled) {
701     if (is) {
702       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
703     }
704     if (is_local) {
705       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
706     }
707     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
708 
709     osm->n_local_true = n;
710     osm->is           = 0;
711     osm->is_local     = 0;
712     if (is) {
713       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
714       for (i=0; i<n; i++) osm->is[i] = is[i];
715       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
716       osm->overlap = -1;
717     }
718     if (is_local) {
719       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
720       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
721       if (!is) {
722         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
723         for (i=0; i<osm->n_local_true; i++) {
724           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
725             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
726             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
727           } else {
728             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
729             osm->is[i] = osm->is_local[i];
730           }
731         }
732       }
733     }
734   }
735   PetscFunctionReturn(0);
736 }
737 
738 #undef __FUNCT__
739 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
740 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
741 {
742   PC_ASM         *osm = (PC_ASM*)pc->data;
743   PetscErrorCode ierr;
744   PetscMPIInt    rank,size;
745   PetscInt       n;
746 
747   PetscFunctionBegin;
748   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
749   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
750 
751   /*
752      Split the subdomains equally among all processors
753   */
754   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
755   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
756   n    = N/size + ((N % size) > rank);
757   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
758   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
759   if (!pc->setupcalled) {
760     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
761 
762     osm->n_local_true = n;
763     osm->is           = 0;
764     osm->is_local     = 0;
765   }
766   PetscFunctionReturn(0);
767 }
768 
769 #undef __FUNCT__
770 #define __FUNCT__ "PCASMSetOverlap_ASM"
771 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
772 {
773   PC_ASM *osm = (PC_ASM*)pc->data;
774 
775   PetscFunctionBegin;
776   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
777   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
778   if (!pc->setupcalled) osm->overlap = ovl;
779   PetscFunctionReturn(0);
780 }
781 
782 #undef __FUNCT__
783 #define __FUNCT__ "PCASMSetType_ASM"
784 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
785 {
786   PC_ASM *osm = (PC_ASM*)pc->data;
787 
788   PetscFunctionBegin;
789   osm->type     = type;
790   osm->type_set = PETSC_TRUE;
791   PetscFunctionReturn(0);
792 }
793 
794 #undef __FUNCT__
795 #define __FUNCT__ "PCASMGetType_ASM"
796 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
797 {
798   PC_ASM *osm = (PC_ASM*)pc->data;
799 
800   PetscFunctionBegin;
801   *type = osm->type;
802   PetscFunctionReturn(0);
803 }
804 
805 #undef __FUNCT__
806 #define __FUNCT__ "PCASMSetLocalType_ASM"
807 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
808 {
809   PC_ASM *osm = (PC_ASM *) pc->data;
810 
811   PetscFunctionBegin;
812   osm->loctype = type;
813   PetscFunctionReturn(0);
814 }
815 
816 #undef __FUNCT__
817 #define __FUNCT__ "PCASMGetLocalType_ASM"
818 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
819 {
820   PC_ASM *osm = (PC_ASM *) pc->data;
821 
822   PetscFunctionBegin;
823   *type = osm->loctype;
824   PetscFunctionReturn(0);
825 }
826 
827 #undef __FUNCT__
828 #define __FUNCT__ "PCASMSetSortIndices_ASM"
829 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
830 {
831   PC_ASM *osm = (PC_ASM*)pc->data;
832 
833   PetscFunctionBegin;
834   osm->sort_indices = doSort;
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "PCASMGetSubKSP_ASM"
840 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
841 {
842   PC_ASM         *osm = (PC_ASM*)pc->data;
843   PetscErrorCode ierr;
844 
845   PetscFunctionBegin;
846   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
847 
848   if (n_local) *n_local = osm->n_local_true;
849   if (first_local) {
850     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
851     *first_local -= osm->n_local_true;
852   }
853   if (ksp) {
854     /* Assume that local solves are now different; not necessarily
855        true though!  This flag is used only for PCView_ASM() */
856     *ksp                   = osm->ksp;
857     osm->same_local_solves = PETSC_FALSE;
858   }
859   PetscFunctionReturn(0);
860 }
861 
862 #undef __FUNCT__
863 #define __FUNCT__ "PCASMSetLocalSubdomains"
864 /*@C
865     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
866 
867     Collective on PC
868 
869     Input Parameters:
870 +   pc - the preconditioner context
871 .   n - the number of subdomains for this processor (default value = 1)
872 .   is - the index set that defines the subdomains for this processor
873          (or NULL for PETSc to determine subdomains)
874 -   is_local - the index sets that define the local part of the subdomains for this processor
875          (or NULL to use the default of 1 subdomain per process)
876 
877     Notes:
878     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
879 
880     By default the ASM preconditioner uses 1 block per processor.
881 
882     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
883 
884     Level: advanced
885 
886 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
887 
888 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
889           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
890 @*/
891 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
892 {
893   PetscErrorCode ierr;
894 
895   PetscFunctionBegin;
896   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
897   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
898   PetscFunctionReturn(0);
899 }
900 
901 #undef __FUNCT__
902 #define __FUNCT__ "PCASMSetTotalSubdomains"
903 /*@C
904     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
905     additive Schwarz preconditioner.  Either all or no processors in the
906     PC communicator must call this routine, with the same index sets.
907 
908     Collective on PC
909 
910     Input Parameters:
911 +   pc - the preconditioner context
912 .   N  - the number of subdomains for all processors
913 .   is - the index sets that define the subdomains for all processors
914          (or NULL to ask PETSc to compe up with subdomains)
915 -   is_local - the index sets that define the local part of the subdomains for this processor
916          (or NULL to use the default of 1 subdomain per process)
917 
918     Options Database Key:
919     To set the total number of subdomain blocks rather than specify the
920     index sets, use the option
921 .    -pc_asm_blocks <blks> - Sets total blocks
922 
923     Notes:
924     Currently you cannot use this to set the actual subdomains with the argument is.
925 
926     By default the ASM preconditioner uses 1 block per processor.
927 
928     These index sets cannot be destroyed until after completion of the
929     linear solves for which the ASM preconditioner is being used.
930 
931     Use PCASMSetLocalSubdomains() to set local subdomains.
932 
933     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
934 
935     Level: advanced
936 
937 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
938 
939 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
940           PCASMCreateSubdomains2D()
941 @*/
942 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
943 {
944   PetscErrorCode ierr;
945 
946   PetscFunctionBegin;
947   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
948   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
949   PetscFunctionReturn(0);
950 }
951 
952 #undef __FUNCT__
953 #define __FUNCT__ "PCASMSetOverlap"
954 /*@
955     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
956     additive Schwarz preconditioner.  Either all or no processors in the
957     PC communicator must call this routine. If MatIncreaseOverlap is used,
958     use option -mat_increase_overlap when the problem size large.
959 
960     Logically Collective on PC
961 
962     Input Parameters:
963 +   pc  - the preconditioner context
964 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
965 
966     Options Database Key:
967 .   -pc_asm_overlap <ovl> - Sets overlap
968 
969     Notes:
970     By default the ASM preconditioner uses 1 block per processor.  To use
971     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
972     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
973 
974     The overlap defaults to 1, so if one desires that no additional
975     overlap be computed beyond what may have been set with a call to
976     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
977     must be set to be 0.  In particular, if one does not explicitly set
978     the subdomains an application code, then all overlap would be computed
979     internally by PETSc, and using an overlap of 0 would result in an ASM
980     variant that is equivalent to the block Jacobi preconditioner.
981 
982     Note that one can define initial index sets with any overlap via
983     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
984     PCASMSetOverlap() merely allows PETSc to extend that overlap further
985     if desired.
986 
987     Level: intermediate
988 
989 .keywords: PC, ASM, set, overlap
990 
991 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
992           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
993 @*/
994 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
995 {
996   PetscErrorCode ierr;
997 
998   PetscFunctionBegin;
999   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1000   PetscValidLogicalCollectiveInt(pc,ovl,2);
1001   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1002   PetscFunctionReturn(0);
1003 }
1004 
1005 #undef __FUNCT__
1006 #define __FUNCT__ "PCASMSetType"
1007 /*@
1008     PCASMSetType - Sets the type of restriction and interpolation used
1009     for local problems in the additive Schwarz method.
1010 
1011     Logically Collective on PC
1012 
1013     Input Parameters:
1014 +   pc  - the preconditioner context
1015 -   type - variant of ASM, one of
1016 .vb
1017       PC_ASM_BASIC       - full interpolation and restriction
1018       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1019       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1020       PC_ASM_NONE        - local processor restriction and interpolation
1021 .ve
1022 
1023     Options Database Key:
1024 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1025 
1026     Level: intermediate
1027 
1028 .keywords: PC, ASM, set, type
1029 
1030 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1031           PCASMCreateSubdomains2D()
1032 @*/
1033 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1034 {
1035   PetscErrorCode ierr;
1036 
1037   PetscFunctionBegin;
1038   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1039   PetscValidLogicalCollectiveEnum(pc,type,2);
1040   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1041   PetscFunctionReturn(0);
1042 }
1043 
1044 #undef __FUNCT__
1045 #define __FUNCT__ "PCASMGetType"
1046 /*@
1047     PCASMGetType - Gets the type of restriction and interpolation used
1048     for local problems in the additive Schwarz method.
1049 
1050     Logically Collective on PC
1051 
1052     Input Parameter:
1053 .   pc  - the preconditioner context
1054 
1055     Output Parameter:
1056 .   type - variant of ASM, one of
1057 
1058 .vb
1059       PC_ASM_BASIC       - full interpolation and restriction
1060       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1061       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1062       PC_ASM_NONE        - local processor restriction and interpolation
1063 .ve
1064 
1065     Options Database Key:
1066 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1067 
1068     Level: intermediate
1069 
1070 .keywords: PC, ASM, set, type
1071 
1072 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1073           PCASMCreateSubdomains2D()
1074 @*/
1075 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1076 {
1077   PetscErrorCode ierr;
1078 
1079   PetscFunctionBegin;
1080   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1081   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1082   PetscFunctionReturn(0);
1083 }
1084 
1085 #undef __FUNCT__
1086 #define __FUNCT__ "PCASMSetLocalType"
1087 /*@
1088   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1089 
1090   Logically Collective on PC
1091 
1092   Input Parameters:
1093 + pc  - the preconditioner context
1094 - type - type of composition, one of
1095 .vb
1096   PC_COMPOSITE_ADDITIVE       - local additive combination
1097   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1098 .ve
1099 
1100   Options Database Key:
1101 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1102 
1103   Level: intermediate
1104 
1105 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASMCreate()
1106 @*/
1107 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1108 {
1109   PetscErrorCode ierr;
1110 
1111   PetscFunctionBegin;
1112   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1113   PetscValidLogicalCollectiveEnum(pc, type, 2);
1114   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1115   PetscFunctionReturn(0);
1116 }
1117 
1118 #undef __FUNCT__
1119 #define __FUNCT__ "PCASMGetLocalType"
1120 /*@
1121   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1122 
1123   Logically Collective on PC
1124 
1125   Input Parameter:
1126 . pc  - the preconditioner context
1127 
1128   Output Parameter:
1129 . type - type of composition, one of
1130 .vb
1131   PC_COMPOSITE_ADDITIVE       - local additive combination
1132   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1133 .ve
1134 
1135   Options Database Key:
1136 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1137 
1138   Level: intermediate
1139 
1140 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate()
1141 @*/
1142 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1143 {
1144   PetscErrorCode ierr;
1145 
1146   PetscFunctionBegin;
1147   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1148   PetscValidPointer(type, 2);
1149   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1150   PetscFunctionReturn(0);
1151 }
1152 
1153 #undef __FUNCT__
1154 #define __FUNCT__ "PCASMSetSortIndices"
1155 /*@
1156     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1157 
1158     Logically Collective on PC
1159 
1160     Input Parameters:
1161 +   pc  - the preconditioner context
1162 -   doSort - sort the subdomain indices
1163 
1164     Level: intermediate
1165 
1166 .keywords: PC, ASM, set, type
1167 
1168 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1169           PCASMCreateSubdomains2D()
1170 @*/
1171 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1172 {
1173   PetscErrorCode ierr;
1174 
1175   PetscFunctionBegin;
1176   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1177   PetscValidLogicalCollectiveBool(pc,doSort,2);
1178   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1179   PetscFunctionReturn(0);
1180 }
1181 
1182 #undef __FUNCT__
1183 #define __FUNCT__ "PCASMGetSubKSP"
1184 /*@C
1185    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1186    this processor.
1187 
1188    Collective on PC iff first_local is requested
1189 
1190    Input Parameter:
1191 .  pc - the preconditioner context
1192 
1193    Output Parameters:
1194 +  n_local - the number of blocks on this processor or NULL
1195 .  first_local - the global number of the first block on this processor or NULL,
1196                  all processors must request or all must pass NULL
1197 -  ksp - the array of KSP contexts
1198 
1199    Note:
1200    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1201 
1202    Currently for some matrix implementations only 1 block per processor
1203    is supported.
1204 
1205    You must call KSPSetUp() before calling PCASMGetSubKSP().
1206 
1207    Fortran note:
1208    The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.
1209 
1210    Level: advanced
1211 
1212 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1213 
1214 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1215           PCASMCreateSubdomains2D(),
1216 @*/
1217 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1218 {
1219   PetscErrorCode ierr;
1220 
1221   PetscFunctionBegin;
1222   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1223   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1224   PetscFunctionReturn(0);
1225 }
1226 
1227 /* -------------------------------------------------------------------------------------*/
1228 /*MC
1229    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1230            its own KSP object.
1231 
1232    Options Database Keys:
1233 +  -pc_asm_blocks <blks> - Sets total blocks
1234 .  -pc_asm_overlap <ovl> - Sets overlap
1235 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1236 
1237      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1238       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1239       -pc_asm_type basic to use the standard ASM.
1240 
1241    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1242      than one processor. Defaults to one block per processor.
1243 
1244      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1245         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1246 
1247      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1248          and set the options directly on the resulting KSP object (you can access its PC
1249          with KSPGetPC())
1250 
1251 
1252    Level: beginner
1253 
1254    Concepts: additive Schwarz method
1255 
1256     References:
1257 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1258      Courant Institute, New York University Technical report
1259 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1260     Cambridge University Press.
1261 
1262 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1263            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1264            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1265 
1266 M*/
1267 
1268 #undef __FUNCT__
1269 #define __FUNCT__ "PCCreate_ASM"
1270 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1271 {
1272   PetscErrorCode ierr;
1273   PC_ASM         *osm;
1274 
1275   PetscFunctionBegin;
1276   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1277 
1278   osm->n                 = PETSC_DECIDE;
1279   osm->n_local           = 0;
1280   osm->n_local_true      = PETSC_DECIDE;
1281   osm->overlap           = 1;
1282   osm->ksp               = 0;
1283   osm->restriction       = 0;
1284   osm->localization      = 0;
1285   osm->prolongation      = 0;
1286   osm->x                 = 0;
1287   osm->y                 = 0;
1288   osm->y_local           = 0;
1289   osm->is                = 0;
1290   osm->is_local          = 0;
1291   osm->mat               = 0;
1292   osm->pmat              = 0;
1293   osm->type              = PC_ASM_RESTRICT;
1294   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1295   osm->same_local_solves = PETSC_TRUE;
1296   osm->sort_indices      = PETSC_TRUE;
1297   osm->dm_subdomains     = PETSC_FALSE;
1298 
1299   pc->data                 = (void*)osm;
1300   pc->ops->apply           = PCApply_ASM;
1301   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1302   pc->ops->setup           = PCSetUp_ASM;
1303   pc->ops->reset           = PCReset_ASM;
1304   pc->ops->destroy         = PCDestroy_ASM;
1305   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1306   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1307   pc->ops->view            = PCView_ASM;
1308   pc->ops->applyrichardson = 0;
1309 
1310   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1311   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1312   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1313   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1314   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1315   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1316   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1317   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1318   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 #undef __FUNCT__
1323 #define __FUNCT__ "PCASMCreateSubdomains"
1324 /*@C
1325    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1326    preconditioner for a any problem on a general grid.
1327 
1328    Collective
1329 
1330    Input Parameters:
1331 +  A - The global matrix operator
1332 -  n - the number of local blocks
1333 
1334    Output Parameters:
1335 .  outis - the array of index sets defining the subdomains
1336 
1337    Level: advanced
1338 
1339    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1340     from these if you use PCASMSetLocalSubdomains()
1341 
1342     In the Fortran version you must provide the array outis[] already allocated of length n.
1343 
1344 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1345 
1346 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1347 @*/
1348 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1349 {
1350   MatPartitioning mpart;
1351   const char      *prefix;
1352   PetscErrorCode  (*f)(Mat,Mat*);
1353   PetscMPIInt     size;
1354   PetscInt        i,j,rstart,rend,bs;
1355   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1356   Mat             Ad     = NULL, adj;
1357   IS              ispart,isnumb,*is;
1358   PetscErrorCode  ierr;
1359 
1360   PetscFunctionBegin;
1361   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1362   PetscValidPointer(outis,3);
1363   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1364 
1365   /* Get prefix, row distribution, and block size */
1366   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1367   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1368   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1369   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1370 
1371   /* Get diagonal block from matrix if possible */
1372   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1373   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
1374   if (f) {
1375     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1376   } else if (size == 1) {
1377     Ad = A;
1378   }
1379   if (Ad) {
1380     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1381     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1382   }
1383   if (Ad && n > 1) {
1384     PetscBool match,done;
1385     /* Try to setup a good matrix partitioning if available */
1386     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1387     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1388     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1389     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1390     if (!match) {
1391       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1392     }
1393     if (!match) { /* assume a "good" partitioner is available */
1394       PetscInt       na;
1395       const PetscInt *ia,*ja;
1396       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1397       if (done) {
1398         /* Build adjacency matrix by hand. Unfortunately a call to
1399            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1400            remove the block-aij structure and we cannot expect
1401            MatPartitioning to split vertices as we need */
1402         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1403         const PetscInt *row;
1404         nnz = 0;
1405         for (i=0; i<na; i++) { /* count number of nonzeros */
1406           len = ia[i+1] - ia[i];
1407           row = ja + ia[i];
1408           for (j=0; j<len; j++) {
1409             if (row[j] == i) { /* don't count diagonal */
1410               len--; break;
1411             }
1412           }
1413           nnz += len;
1414         }
1415         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1416         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1417         nnz    = 0;
1418         iia[0] = 0;
1419         for (i=0; i<na; i++) { /* fill adjacency */
1420           cnt = 0;
1421           len = ia[i+1] - ia[i];
1422           row = ja + ia[i];
1423           for (j=0; j<len; j++) {
1424             if (row[j] != i) { /* if not diagonal */
1425               jja[nnz+cnt++] = row[j];
1426             }
1427           }
1428           nnz     += cnt;
1429           iia[i+1] = nnz;
1430         }
1431         /* Partitioning of the adjacency matrix */
1432         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1433         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1434         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1435         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1436         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1437         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1438         foundpart = PETSC_TRUE;
1439       }
1440       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1441     }
1442     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1443   }
1444 
1445   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1446   *outis = is;
1447 
1448   if (!foundpart) {
1449 
1450     /* Partitioning by contiguous chunks of rows */
1451 
1452     PetscInt mbs   = (rend-rstart)/bs;
1453     PetscInt start = rstart;
1454     for (i=0; i<n; i++) {
1455       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1456       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1457       start += count;
1458     }
1459 
1460   } else {
1461 
1462     /* Partitioning by adjacency of diagonal block  */
1463 
1464     const PetscInt *numbering;
1465     PetscInt       *count,nidx,*indices,*newidx,start=0;
1466     /* Get node count in each partition */
1467     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1468     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1469     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1470       for (i=0; i<n; i++) count[i] *= bs;
1471     }
1472     /* Build indices from node numbering */
1473     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1474     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1475     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1476     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1477     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1478     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1479     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1480       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1481       for (i=0; i<nidx; i++) {
1482         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1483       }
1484       ierr    = PetscFree(indices);CHKERRQ(ierr);
1485       nidx   *= bs;
1486       indices = newidx;
1487     }
1488     /* Shift to get global indices */
1489     for (i=0; i<nidx; i++) indices[i] += rstart;
1490 
1491     /* Build the index sets for each block */
1492     for (i=0; i<n; i++) {
1493       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1494       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1495       start += count[i];
1496     }
1497 
1498     ierr = PetscFree(count);CHKERRQ(ierr);
1499     ierr = PetscFree(indices);CHKERRQ(ierr);
1500     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1501     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1502 
1503   }
1504   PetscFunctionReturn(0);
1505 }
1506 
1507 #undef __FUNCT__
1508 #define __FUNCT__ "PCASMDestroySubdomains"
1509 /*@C
1510    PCASMDestroySubdomains - Destroys the index sets created with
1511    PCASMCreateSubdomains(). Should be called after setting subdomains
1512    with PCASMSetLocalSubdomains().
1513 
1514    Collective
1515 
1516    Input Parameters:
1517 +  n - the number of index sets
1518 .  is - the array of index sets
1519 -  is_local - the array of local index sets, can be NULL
1520 
1521    Level: advanced
1522 
1523 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1524 
1525 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1526 @*/
1527 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1528 {
1529   PetscInt       i;
1530   PetscErrorCode ierr;
1531 
1532   PetscFunctionBegin;
1533   if (n <= 0) PetscFunctionReturn(0);
1534   if (is) {
1535     PetscValidPointer(is,2);
1536     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1537     ierr = PetscFree(is);CHKERRQ(ierr);
1538   }
1539   if (is_local) {
1540     PetscValidPointer(is_local,3);
1541     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1542     ierr = PetscFree(is_local);CHKERRQ(ierr);
1543   }
1544   PetscFunctionReturn(0);
1545 }
1546 
1547 #undef __FUNCT__
1548 #define __FUNCT__ "PCASMCreateSubdomains2D"
1549 /*@
1550    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1551    preconditioner for a two-dimensional problem on a regular grid.
1552 
1553    Not Collective
1554 
1555    Input Parameters:
1556 +  m, n - the number of mesh points in the x and y directions
1557 .  M, N - the number of subdomains in the x and y directions
1558 .  dof - degrees of freedom per node
1559 -  overlap - overlap in mesh lines
1560 
1561    Output Parameters:
1562 +  Nsub - the number of subdomains created
1563 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1564 -  is_local - array of index sets defining non-overlapping subdomains
1565 
1566    Note:
1567    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1568    preconditioners.  More general related routines are
1569    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1570 
1571    Level: advanced
1572 
1573 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1574 
1575 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1576           PCASMSetOverlap()
1577 @*/
1578 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1579 {
1580   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1581   PetscErrorCode ierr;
1582   PetscInt       nidx,*idx,loc,ii,jj,count;
1583 
1584   PetscFunctionBegin;
1585   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1586 
1587   *Nsub     = N*M;
1588   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1589   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1590   ystart    = 0;
1591   loc_outer = 0;
1592   for (i=0; i<N; i++) {
1593     height = n/N + ((n % N) > i); /* height of subdomain */
1594     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1595     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1596     yright = ystart + height + overlap; if (yright > n) yright = n;
1597     xstart = 0;
1598     for (j=0; j<M; j++) {
1599       width = m/M + ((m % M) > j); /* width of subdomain */
1600       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1601       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1602       xright = xstart + width + overlap; if (xright > m) xright = m;
1603       nidx   = (xright - xleft)*(yright - yleft);
1604       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1605       loc    = 0;
1606       for (ii=yleft; ii<yright; ii++) {
1607         count = m*ii + xleft;
1608         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1609       }
1610       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1611       if (overlap == 0) {
1612         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1613 
1614         (*is_local)[loc_outer] = (*is)[loc_outer];
1615       } else {
1616         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1617           for (jj=xstart; jj<xstart+width; jj++) {
1618             idx[loc++] = m*ii + jj;
1619           }
1620         }
1621         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1622       }
1623       ierr    = PetscFree(idx);CHKERRQ(ierr);
1624       xstart += width;
1625       loc_outer++;
1626     }
1627     ystart += height;
1628   }
1629   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1630   PetscFunctionReturn(0);
1631 }
1632 
1633 #undef __FUNCT__
1634 #define __FUNCT__ "PCASMGetLocalSubdomains"
1635 /*@C
1636     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1637     only) for the additive Schwarz preconditioner.
1638 
1639     Not Collective
1640 
1641     Input Parameter:
1642 .   pc - the preconditioner context
1643 
1644     Output Parameters:
1645 +   n - the number of subdomains for this processor (default value = 1)
1646 .   is - the index sets that define the subdomains for this processor
1647 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1648 
1649 
1650     Notes:
1651     The IS numbering is in the parallel, global numbering of the vector.
1652 
1653     Level: advanced
1654 
1655 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1656 
1657 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1658           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1659 @*/
1660 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1661 {
1662   PC_ASM         *osm;
1663   PetscErrorCode ierr;
1664   PetscBool      match;
1665 
1666   PetscFunctionBegin;
1667   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1668   PetscValidIntPointer(n,2);
1669   if (is) PetscValidPointer(is,3);
1670   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1671   if (!match) {
1672     if (n) *n = 0;
1673     if (is) *is = NULL;
1674   } else {
1675     osm = (PC_ASM*)pc->data;
1676     if (n) *n = osm->n_local_true;
1677     if (is) *is = osm->is;
1678     if (is_local) *is_local = osm->is_local;
1679   }
1680   PetscFunctionReturn(0);
1681 }
1682 
1683 #undef __FUNCT__
1684 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1685 /*@C
1686     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1687     only) for the additive Schwarz preconditioner.
1688 
1689     Not Collective
1690 
1691     Input Parameter:
1692 .   pc - the preconditioner context
1693 
1694     Output Parameters:
1695 +   n - the number of matrices for this processor (default value = 1)
1696 -   mat - the matrices
1697 
1698 
1699     Level: advanced
1700 
1701     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1702 
1703            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1704 
1705 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1706 
1707 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1708           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1709 @*/
1710 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1711 {
1712   PC_ASM         *osm;
1713   PetscErrorCode ierr;
1714   PetscBool      match;
1715 
1716   PetscFunctionBegin;
1717   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1718   PetscValidIntPointer(n,2);
1719   if (mat) PetscValidPointer(mat,3);
1720   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1721   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1722   if (!match) {
1723     if (n) *n = 0;
1724     if (mat) *mat = NULL;
1725   } else {
1726     osm = (PC_ASM*)pc->data;
1727     if (n) *n = osm->n_local_true;
1728     if (mat) *mat = osm->pmat;
1729   }
1730   PetscFunctionReturn(0);
1731 }
1732 
1733 #undef __FUNCT__
1734 #define __FUNCT__ "PCASMSetDMSubdomains"
1735 /*@
1736     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1737     Logically Collective
1738 
1739     Input Parameter:
1740 +   pc  - the preconditioner
1741 -   flg - boolean indicating whether to use subdomains defined by the DM
1742 
1743     Options Database Key:
1744 .   -pc_asm_dm_subdomains
1745 
1746     Level: intermediate
1747 
1748     Notes:
1749     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1750     so setting either of the first two effectively turns the latter off.
1751 
1752 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1753 
1754 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1755           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1756 @*/
1757 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1758 {
1759   PC_ASM         *osm = (PC_ASM*)pc->data;
1760   PetscErrorCode ierr;
1761   PetscBool      match;
1762 
1763   PetscFunctionBegin;
1764   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1765   PetscValidLogicalCollectiveBool(pc,flg,2);
1766   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1767   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1768   if (match) {
1769     osm->dm_subdomains = flg;
1770   }
1771   PetscFunctionReturn(0);
1772 }
1773 
1774 #undef __FUNCT__
1775 #define __FUNCT__ "PCASMGetDMSubdomains"
1776 /*@
1777     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1778     Not Collective
1779 
1780     Input Parameter:
1781 .   pc  - the preconditioner
1782 
1783     Output Parameter:
1784 .   flg - boolean indicating whether to use subdomains defined by the DM
1785 
1786     Level: intermediate
1787 
1788 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1789 
1790 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1791           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1792 @*/
1793 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1794 {
1795   PC_ASM         *osm = (PC_ASM*)pc->data;
1796   PetscErrorCode ierr;
1797   PetscBool      match;
1798 
1799   PetscFunctionBegin;
1800   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1801   PetscValidPointer(flg,2);
1802   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1803   if (match) {
1804     if (flg) *flg = osm->dm_subdomains;
1805   }
1806   PetscFunctionReturn(0);
1807 }
1808