xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision a44d2f108bafe6da0bbb003e57fd0fd6f29a3752)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   /* For multiplicative solve */
34   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
35   Vec        lx, ly;              /* work vectors */
36   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "PCView_ASM"
41 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
42 {
43   PC_ASM         *osm = (PC_ASM*)pc->data;
44   PetscErrorCode ierr;
45   PetscMPIInt    rank;
46   PetscInt       i,bsz;
47   PetscBool      iascii,isstring;
48   PetscViewer    sviewer;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
53   if (iascii) {
54     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
55     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
56     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
57     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
58     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
59     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
60     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
61     if (osm->same_local_solves) {
62       if (osm->ksp) {
63         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
64         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
65         if (!rank) {
66           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
68           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
69         }
70         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71       }
72     } else {
73       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
75       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
80       for (i=0; i<osm->n_local_true; i++) {
81         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
82         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
83         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
85       }
86       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
87       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
90     }
91   } else if (isstring) {
92     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
93     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
94     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
95     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96   }
97   PetscFunctionReturn(0);
98 }
99 
100 #undef __FUNCT__
101 #define __FUNCT__ "PCASMPrintSubdomains"
102 static PetscErrorCode PCASMPrintSubdomains(PC pc)
103 {
104   PC_ASM         *osm = (PC_ASM*)pc->data;
105   const char     *prefix;
106   char           fname[PETSC_MAX_PATH_LEN+1];
107   PetscViewer    viewer, sviewer;
108   char           *s;
109   PetscInt       i,j,nidx;
110   const PetscInt *idx;
111   PetscMPIInt    rank, size;
112   PetscErrorCode ierr;
113 
114   PetscFunctionBegin;
115   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
116   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
117   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
118   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
119   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
120   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
121   for (i=0; i<osm->n_local; i++) {
122     if (i < osm->n_local_true) {
123       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
124       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
125       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
126       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
127       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
128       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
129       for (j=0; j<nidx; j++) {
130         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
131       }
132       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
133       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
134       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
137       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
138       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
139       ierr = PetscFree(s);CHKERRQ(ierr);
140       if (osm->is_local) {
141         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
142         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
143         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
144         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
145         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
146         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
147         for (j=0; j<nidx; j++) {
148           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
149         }
150         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
151         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
152         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
153         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
155         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
156         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
157         ierr = PetscFree(s);CHKERRQ(ierr);
158       }
159     } else {
160       /* Participate in collective viewer calls. */
161       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
162       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
163       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
164       /* Assume either all ranks have is_local or none do. */
165       if (osm->is_local) {
166         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
167         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
168         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
169       }
170     }
171   }
172   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
173   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
174   PetscFunctionReturn(0);
175 }
176 
177 #undef __FUNCT__
178 #define __FUNCT__ "PCSetUp_ASM"
179 static PetscErrorCode PCSetUp_ASM(PC pc)
180 {
181   PC_ASM         *osm = (PC_ASM*)pc->data;
182   PetscErrorCode ierr;
183   PetscBool      symset,flg;
184   PetscInt       i,m,m_local;
185   MatReuse       scall = MAT_REUSE_MATRIX;
186   IS             isl;
187   KSP            ksp;
188   PC             subpc;
189   const char     *prefix,*pprefix;
190   Vec            vec;
191   DM             *domain_dm = NULL;
192 
193   PetscFunctionBegin;
194   if (!pc->setupcalled) {
195 
196     if (!osm->type_set) {
197       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
198       if (symset && flg) osm->type = PC_ASM_BASIC;
199     }
200 
201     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
202     if (osm->n_local_true == PETSC_DECIDE) {
203       /* no subdomains given */
204       /* try pc->dm first, if allowed */
205       if (osm->dm_subdomains && pc->dm) {
206         PetscInt  num_domains, d;
207         char      **domain_names;
208         IS        *inner_domain_is, *outer_domain_is;
209         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
210         if (num_domains) {
211           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
212         }
213         for (d = 0; d < num_domains; ++d) {
214           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
215           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
216           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
217         }
218         ierr = PetscFree(domain_names);CHKERRQ(ierr);
219         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
220         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
221       }
222       if (osm->n_local_true == PETSC_DECIDE) {
223         /* still no subdomains; use one subdomain per processor */
224         osm->n_local_true = 1;
225       }
226     }
227     { /* determine the global and max number of subdomains */
228       struct {PetscInt max,sum;} inwork,outwork;
229       inwork.max   = osm->n_local_true;
230       inwork.sum   = osm->n_local_true;
231       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
232       osm->n_local = outwork.max;
233       osm->n       = outwork.sum;
234     }
235     if (!osm->is) { /* create the index sets */
236       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
237     }
238     if (osm->n_local_true > 1 && !osm->is_local) {
239       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
240       for (i=0; i<osm->n_local_true; i++) {
241         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
242           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
243           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
244         } else {
245           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
246           osm->is_local[i] = osm->is[i];
247         }
248       }
249     }
250     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
251     flg  = PETSC_FALSE;
252     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
253     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
254 
255     if (osm->overlap > 0) {
256       /* Extend the "overlapping" regions by a number of steps */
257       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
258     }
259     if (osm->sort_indices) {
260       for (i=0; i<osm->n_local_true; i++) {
261         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
262         if (osm->is_local) {
263           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
264         }
265       }
266     }
267     /* Create the local work vectors and scatter contexts */
268     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
269     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
270     if (osm->is_local) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
271     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
272     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
273     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
274     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
275     for (i=0; i<osm->n_local_true; ++i) {
276       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
277       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
278       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
279       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
280       ierr = ISDestroy(&isl);CHKERRQ(ierr);
281       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
282       if (osm->is_local) {
283         ISLocalToGlobalMapping ltog;
284         IS                     isll;
285         const PetscInt         *idx_local;
286         PetscInt               *idx,nout;
287 
288         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
289         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
290         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
291         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
292         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
293         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
294         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
295         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
296         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
297         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
298         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
299         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
300         ierr = ISDestroy(&isll);CHKERRQ(ierr);
301 
302         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
303         ierr = ISDestroy(&isl);CHKERRQ(ierr);
304       } else {
305         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
306 
307         osm->y_local[i] = osm->y[i];
308 
309         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
310 
311         osm->prolongation[i] = osm->restriction[i];
312 
313         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
314       }
315     }
316     for (i=osm->n_local_true; i<osm->n_local; i++) {
317       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
318       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
319       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
320       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
321       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
322       if (osm->is_local) {
323         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
324         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
325       } else {
326         osm->prolongation[i] = osm->restriction[i];
327         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
328       }
329       ierr = ISDestroy(&isl);CHKERRQ(ierr);
330     }
331     ierr = VecDestroy(&vec);CHKERRQ(ierr);
332 
333     if (!osm->ksp) {
334       /* Create the local solvers */
335       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
336       if (domain_dm) {
337         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
338       }
339       for (i=0; i<osm->n_local_true; i++) {
340         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
341         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
342         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
343         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
344         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
345         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
346         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
347         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
348         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
349         if (domain_dm) {
350           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
351           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
352           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
353         }
354         osm->ksp[i] = ksp;
355       }
356       if (domain_dm) {
357         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
358       }
359     }
360     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
361       PetscInt m;
362 
363       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
364       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
365       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
366       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
367       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
368     }
369     scall = MAT_INITIAL_MATRIX;
370   } else {
371     /*
372        Destroy the blocks from the previous iteration
373     */
374     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
375       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
376       scall = MAT_INITIAL_MATRIX;
377     }
378   }
379 
380   /*
381      Extract out the submatrices
382   */
383   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
384   if (scall == MAT_INITIAL_MATRIX) {
385     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
386     for (i=0; i<osm->n_local_true; i++) {
387       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
388       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
389     }
390   }
391   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
392     IS      *cis;
393     PetscInt c;
394 
395     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
396     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
397     ierr = MatGetSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
398     ierr = PetscFree(cis);CHKERRQ(ierr);
399   }
400 
401   /* Return control to the user so that the submatrices can be modified (e.g., to apply
402      different boundary conditions for the submatrices than for the global problem) */
403   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
404 
405   /*
406      Loop over subdomains putting them into local ksp
407   */
408   for (i=0; i<osm->n_local_true; i++) {
409     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
410     if (!pc->setupcalled) {
411       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
412     }
413   }
414   PetscFunctionReturn(0);
415 }
416 
417 #undef __FUNCT__
418 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
419 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
420 {
421   PC_ASM         *osm = (PC_ASM*)pc->data;
422   PetscErrorCode ierr;
423   PetscInt       i;
424 
425   PetscFunctionBegin;
426   for (i=0; i<osm->n_local_true; i++) {
427     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
428   }
429   PetscFunctionReturn(0);
430 }
431 
432 #undef __FUNCT__
433 #define __FUNCT__ "PCApply_ASM"
434 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
435 {
436   PC_ASM         *osm = (PC_ASM*)pc->data;
437   PetscErrorCode ierr;
438   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
439   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
440 
441   PetscFunctionBegin;
442   /*
443      Support for limiting the restriction or interpolation to only local
444      subdomain values (leaving the other values 0).
445   */
446   if (!(osm->type & PC_ASM_RESTRICT)) {
447     forward = SCATTER_FORWARD_LOCAL;
448     /* have to zero the work RHS since scatter may leave some slots empty */
449     for (i=0; i<n_local_true; i++) {
450       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
451     }
452   }
453   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
454 
455   switch (osm->loctype)
456   {
457   case PC_COMPOSITE_ADDITIVE:
458     for (i=0; i<n_local; i++) {
459       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
460     }
461     ierr = VecZeroEntries(y);CHKERRQ(ierr);
462     /* do the local solves */
463     for (i=0; i<n_local_true; i++) {
464       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
465       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
466       if (osm->localization) {
467         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
468         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
469       }
470       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
471     }
472     /* handle the rest of the scatters that do not have local solves */
473     for (i=n_local_true; i<n_local; i++) {
474       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
475       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
476     }
477     for (i=0; i<n_local; i++) {
478       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
479     }
480     break;
481   case PC_COMPOSITE_MULTIPLICATIVE:
482     ierr = VecZeroEntries(y);CHKERRQ(ierr);
483     /* do the local solves */
484     for (i = 0; i < n_local_true; ++i) {
485       if (i > 0) {
486         /* Update rhs */
487         ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
488         ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
489       } else {
490         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
491       }
492       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
493       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
494       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
495       if (osm->localization) {
496         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
497         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
498       }
499       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
500       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
501       if (i < n_local_true-1) {
502         ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
503         ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
504         ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
505         ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr);
506         ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
507         ierr = VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
508         ierr = VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
509       }
510     }
511     /* handle the rest of the scatters that do not have local solves */
512     for (i = n_local_true; i < n_local; ++i) {
513       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
514       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
515       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
516       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
517     }
518     break;
519   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
520   }
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "PCApplyTranspose_ASM"
526 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
527 {
528   PC_ASM         *osm = (PC_ASM*)pc->data;
529   PetscErrorCode ierr;
530   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
531   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
532 
533   PetscFunctionBegin;
534   /*
535      Support for limiting the restriction or interpolation to only local
536      subdomain values (leaving the other values 0).
537 
538      Note: these are reversed from the PCApply_ASM() because we are applying the
539      transpose of the three terms
540   */
541   if (!(osm->type & PC_ASM_INTERPOLATE)) {
542     forward = SCATTER_FORWARD_LOCAL;
543     /* have to zero the work RHS since scatter may leave some slots empty */
544     for (i=0; i<n_local_true; i++) {
545       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
546     }
547   }
548   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
549 
550   for (i=0; i<n_local; i++) {
551     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
552   }
553   ierr = VecZeroEntries(y);CHKERRQ(ierr);
554   /* do the local solves */
555   for (i=0; i<n_local_true; i++) {
556     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
557     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
558     if (osm->localization) {
559       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
560       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
561     }
562     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
563   }
564   /* handle the rest of the scatters that do not have local solves */
565   for (i=n_local_true; i<n_local; i++) {
566     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
567     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
568   }
569   for (i=0; i<n_local; i++) {
570     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
571   }
572   PetscFunctionReturn(0);
573 }
574 
575 #undef __FUNCT__
576 #define __FUNCT__ "PCReset_ASM"
577 static PetscErrorCode PCReset_ASM(PC pc)
578 {
579   PC_ASM         *osm = (PC_ASM*)pc->data;
580   PetscErrorCode ierr;
581   PetscInt       i;
582 
583   PetscFunctionBegin;
584   if (osm->ksp) {
585     for (i=0; i<osm->n_local_true; i++) {
586       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
587     }
588   }
589   if (osm->pmat) {
590     if (osm->n_local_true > 0) {
591       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
592     }
593   }
594   if (osm->restriction) {
595     for (i=0; i<osm->n_local; i++) {
596       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
597       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
598       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
599       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
600       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
601       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
602     }
603     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
604     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
605     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
606     ierr = PetscFree(osm->x);CHKERRQ(ierr);
607     ierr = PetscFree(osm->y);CHKERRQ(ierr);
608     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
609   }
610   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
611   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
612     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
613     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
614     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
615     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
616   }
617 
618   osm->is       = 0;
619   osm->is_local = 0;
620   PetscFunctionReturn(0);
621 }
622 
623 #undef __FUNCT__
624 #define __FUNCT__ "PCDestroy_ASM"
625 static PetscErrorCode PCDestroy_ASM(PC pc)
626 {
627   PC_ASM         *osm = (PC_ASM*)pc->data;
628   PetscErrorCode ierr;
629   PetscInt       i;
630 
631   PetscFunctionBegin;
632   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
633   if (osm->ksp) {
634     for (i=0; i<osm->n_local_true; i++) {
635       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
636     }
637     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
638   }
639   ierr = PetscFree(pc->data);CHKERRQ(ierr);
640   PetscFunctionReturn(0);
641 }
642 
643 #undef __FUNCT__
644 #define __FUNCT__ "PCSetFromOptions_ASM"
645 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
646 {
647   PC_ASM         *osm = (PC_ASM*)pc->data;
648   PetscErrorCode ierr;
649   PetscInt       blocks,ovl;
650   PetscBool      symset,flg;
651   PCASMType      asmtype;
652   PCCompositeType loctype;
653 
654   PetscFunctionBegin;
655   /* set the type to symmetric if matrix is symmetric */
656   if (!osm->type_set && pc->pmat) {
657     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
658     if (symset && flg) osm->type = PC_ASM_BASIC;
659   }
660   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
661   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
662   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
663   if (flg) {
664     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
665     osm->dm_subdomains = PETSC_FALSE;
666   }
667   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
668   if (flg) {
669     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
670     osm->dm_subdomains = PETSC_FALSE;
671   }
672   flg  = PETSC_FALSE;
673   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
674   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
675   flg  = PETSC_FALSE;
676   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
677   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
678   ierr = PetscOptionsTail();CHKERRQ(ierr);
679   PetscFunctionReturn(0);
680 }
681 
682 /*------------------------------------------------------------------------------------*/
683 
684 #undef __FUNCT__
685 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
686 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
687 {
688   PC_ASM         *osm = (PC_ASM*)pc->data;
689   PetscErrorCode ierr;
690   PetscInt       i;
691 
692   PetscFunctionBegin;
693   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
694   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
695 
696   if (!pc->setupcalled) {
697     if (is) {
698       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
699     }
700     if (is_local) {
701       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
702     }
703     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
704 
705     osm->n_local_true = n;
706     osm->is           = 0;
707     osm->is_local     = 0;
708     if (is) {
709       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
710       for (i=0; i<n; i++) osm->is[i] = is[i];
711       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
712       osm->overlap = -1;
713     }
714     if (is_local) {
715       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
716       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
717       if (!is) {
718         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
719         for (i=0; i<osm->n_local_true; i++) {
720           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
721             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
722             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
723           } else {
724             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
725             osm->is[i] = osm->is_local[i];
726           }
727         }
728       }
729     }
730   }
731   PetscFunctionReturn(0);
732 }
733 
734 #undef __FUNCT__
735 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
736 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
737 {
738   PC_ASM         *osm = (PC_ASM*)pc->data;
739   PetscErrorCode ierr;
740   PetscMPIInt    rank,size;
741   PetscInt       n;
742 
743   PetscFunctionBegin;
744   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
745   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
746 
747   /*
748      Split the subdomains equally among all processors
749   */
750   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
751   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
752   n    = N/size + ((N % size) > rank);
753   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
754   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
755   if (!pc->setupcalled) {
756     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
757 
758     osm->n_local_true = n;
759     osm->is           = 0;
760     osm->is_local     = 0;
761   }
762   PetscFunctionReturn(0);
763 }
764 
765 #undef __FUNCT__
766 #define __FUNCT__ "PCASMSetOverlap_ASM"
767 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
768 {
769   PC_ASM *osm = (PC_ASM*)pc->data;
770 
771   PetscFunctionBegin;
772   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
773   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
774   if (!pc->setupcalled) osm->overlap = ovl;
775   PetscFunctionReturn(0);
776 }
777 
778 #undef __FUNCT__
779 #define __FUNCT__ "PCASMSetType_ASM"
780 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
781 {
782   PC_ASM *osm = (PC_ASM*)pc->data;
783 
784   PetscFunctionBegin;
785   osm->type     = type;
786   osm->type_set = PETSC_TRUE;
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "PCASMGetType_ASM"
792 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
793 {
794   PC_ASM *osm = (PC_ASM*)pc->data;
795 
796   PetscFunctionBegin;
797   *type = osm->type;
798   PetscFunctionReturn(0);
799 }
800 
801 #undef __FUNCT__
802 #define __FUNCT__ "PCASMSetLocalType_ASM"
803 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
804 {
805   PC_ASM *osm = (PC_ASM *) pc->data;
806 
807   PetscFunctionBegin;
808   osm->loctype = type;
809   PetscFunctionReturn(0);
810 }
811 
812 #undef __FUNCT__
813 #define __FUNCT__ "PCASMGetLocalType_ASM"
814 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
815 {
816   PC_ASM *osm = (PC_ASM *) pc->data;
817 
818   PetscFunctionBegin;
819   *type = osm->loctype;
820   PetscFunctionReturn(0);
821 }
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "PCASMSetSortIndices_ASM"
825 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
826 {
827   PC_ASM *osm = (PC_ASM*)pc->data;
828 
829   PetscFunctionBegin;
830   osm->sort_indices = doSort;
831   PetscFunctionReturn(0);
832 }
833 
834 #undef __FUNCT__
835 #define __FUNCT__ "PCASMGetSubKSP_ASM"
836 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
837 {
838   PC_ASM         *osm = (PC_ASM*)pc->data;
839   PetscErrorCode ierr;
840 
841   PetscFunctionBegin;
842   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
843 
844   if (n_local) *n_local = osm->n_local_true;
845   if (first_local) {
846     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
847     *first_local -= osm->n_local_true;
848   }
849   if (ksp) {
850     /* Assume that local solves are now different; not necessarily
851        true though!  This flag is used only for PCView_ASM() */
852     *ksp                   = osm->ksp;
853     osm->same_local_solves = PETSC_FALSE;
854   }
855   PetscFunctionReturn(0);
856 }
857 
858 #undef __FUNCT__
859 #define __FUNCT__ "PCASMSetLocalSubdomains"
860 /*@C
861     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
862 
863     Collective on PC
864 
865     Input Parameters:
866 +   pc - the preconditioner context
867 .   n - the number of subdomains for this processor (default value = 1)
868 .   is - the index set that defines the subdomains for this processor
869          (or NULL for PETSc to determine subdomains)
870 -   is_local - the index sets that define the local part of the subdomains for this processor
871          (or NULL to use the default of 1 subdomain per process)
872 
873     Notes:
874     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
875 
876     By default the ASM preconditioner uses 1 block per processor.
877 
878     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
879 
880     Level: advanced
881 
882 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
883 
884 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
885           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
886 @*/
887 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
888 {
889   PetscErrorCode ierr;
890 
891   PetscFunctionBegin;
892   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
893   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
894   PetscFunctionReturn(0);
895 }
896 
897 #undef __FUNCT__
898 #define __FUNCT__ "PCASMSetTotalSubdomains"
899 /*@C
900     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
901     additive Schwarz preconditioner.  Either all or no processors in the
902     PC communicator must call this routine, with the same index sets.
903 
904     Collective on PC
905 
906     Input Parameters:
907 +   pc - the preconditioner context
908 .   N  - the number of subdomains for all processors
909 .   is - the index sets that define the subdomains for all processors
910          (or NULL to ask PETSc to compe up with subdomains)
911 -   is_local - the index sets that define the local part of the subdomains for this processor
912          (or NULL to use the default of 1 subdomain per process)
913 
914     Options Database Key:
915     To set the total number of subdomain blocks rather than specify the
916     index sets, use the option
917 .    -pc_asm_blocks <blks> - Sets total blocks
918 
919     Notes:
920     Currently you cannot use this to set the actual subdomains with the argument is.
921 
922     By default the ASM preconditioner uses 1 block per processor.
923 
924     These index sets cannot be destroyed until after completion of the
925     linear solves for which the ASM preconditioner is being used.
926 
927     Use PCASMSetLocalSubdomains() to set local subdomains.
928 
929     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
930 
931     Level: advanced
932 
933 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
934 
935 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
936           PCASMCreateSubdomains2D()
937 @*/
938 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
939 {
940   PetscErrorCode ierr;
941 
942   PetscFunctionBegin;
943   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
944   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
945   PetscFunctionReturn(0);
946 }
947 
948 #undef __FUNCT__
949 #define __FUNCT__ "PCASMSetOverlap"
950 /*@
951     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
952     additive Schwarz preconditioner.  Either all or no processors in the
953     PC communicator must call this routine. If MatIncreaseOverlap is used,
954     use option -mat_increase_overlap when the problem size large.
955 
956     Logically Collective on PC
957 
958     Input Parameters:
959 +   pc  - the preconditioner context
960 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
961 
962     Options Database Key:
963 .   -pc_asm_overlap <ovl> - Sets overlap
964 
965     Notes:
966     By default the ASM preconditioner uses 1 block per processor.  To use
967     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
968     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
969 
970     The overlap defaults to 1, so if one desires that no additional
971     overlap be computed beyond what may have been set with a call to
972     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
973     must be set to be 0.  In particular, if one does not explicitly set
974     the subdomains an application code, then all overlap would be computed
975     internally by PETSc, and using an overlap of 0 would result in an ASM
976     variant that is equivalent to the block Jacobi preconditioner.
977 
978     Note that one can define initial index sets with any overlap via
979     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
980     PCASMSetOverlap() merely allows PETSc to extend that overlap further
981     if desired.
982 
983     Level: intermediate
984 
985 .keywords: PC, ASM, set, overlap
986 
987 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
988           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
989 @*/
990 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
991 {
992   PetscErrorCode ierr;
993 
994   PetscFunctionBegin;
995   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
996   PetscValidLogicalCollectiveInt(pc,ovl,2);
997   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
998   PetscFunctionReturn(0);
999 }
1000 
1001 #undef __FUNCT__
1002 #define __FUNCT__ "PCASMSetType"
1003 /*@
1004     PCASMSetType - Sets the type of restriction and interpolation used
1005     for local problems in the additive Schwarz method.
1006 
1007     Logically Collective on PC
1008 
1009     Input Parameters:
1010 +   pc  - the preconditioner context
1011 -   type - variant of ASM, one of
1012 .vb
1013       PC_ASM_BASIC       - full interpolation and restriction
1014       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1015       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1016       PC_ASM_NONE        - local processor restriction and interpolation
1017 .ve
1018 
1019     Options Database Key:
1020 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1021 
1022     Level: intermediate
1023 
1024 .keywords: PC, ASM, set, type
1025 
1026 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1027           PCASMCreateSubdomains2D()
1028 @*/
1029 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1030 {
1031   PetscErrorCode ierr;
1032 
1033   PetscFunctionBegin;
1034   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1035   PetscValidLogicalCollectiveEnum(pc,type,2);
1036   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1037   PetscFunctionReturn(0);
1038 }
1039 
1040 #undef __FUNCT__
1041 #define __FUNCT__ "PCASMGetType"
1042 /*@
1043     PCASMGetType - Gets the type of restriction and interpolation used
1044     for local problems in the additive Schwarz method.
1045 
1046     Logically Collective on PC
1047 
1048     Input Parameter:
1049 .   pc  - the preconditioner context
1050 
1051     Output Parameter:
1052 .   type - variant of ASM, one of
1053 
1054 .vb
1055       PC_ASM_BASIC       - full interpolation and restriction
1056       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1057       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1058       PC_ASM_NONE        - local processor restriction and interpolation
1059 .ve
1060 
1061     Options Database Key:
1062 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1063 
1064     Level: intermediate
1065 
1066 .keywords: PC, ASM, set, type
1067 
1068 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1069           PCASMCreateSubdomains2D()
1070 @*/
1071 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1072 {
1073   PetscErrorCode ierr;
1074 
1075   PetscFunctionBegin;
1076   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1077   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1078   PetscFunctionReturn(0);
1079 }
1080 
1081 #undef __FUNCT__
1082 #define __FUNCT__ "PCASMSetLocalType"
1083 /*@
1084   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1085 
1086   Logically Collective on PC
1087 
1088   Input Parameters:
1089 + pc  - the preconditioner context
1090 - type - type of composition, one of
1091 .vb
1092   PC_COMPOSITE_ADDITIVE       - local additive combination
1093   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1094 .ve
1095 
1096   Options Database Key:
1097 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1098 
1099   Level: intermediate
1100 
1101 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASMCreate()
1102 @*/
1103 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1104 {
1105   PetscErrorCode ierr;
1106 
1107   PetscFunctionBegin;
1108   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1109   PetscValidLogicalCollectiveEnum(pc, type, 2);
1110   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1111   PetscFunctionReturn(0);
1112 }
1113 
1114 #undef __FUNCT__
1115 #define __FUNCT__ "PCASMGetLocalType"
1116 /*@
1117   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1118 
1119   Logically Collective on PC
1120 
1121   Input Parameter:
1122 . pc  - the preconditioner context
1123 
1124   Output Parameter:
1125 . type - type of composition, one of
1126 .vb
1127   PC_COMPOSITE_ADDITIVE       - local additive combination
1128   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1129 .ve
1130 
1131   Options Database Key:
1132 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1133 
1134   Level: intermediate
1135 
1136 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate()
1137 @*/
1138 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1139 {
1140   PetscErrorCode ierr;
1141 
1142   PetscFunctionBegin;
1143   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1144   PetscValidPointer(type, 2);
1145   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1146   PetscFunctionReturn(0);
1147 }
1148 
1149 #undef __FUNCT__
1150 #define __FUNCT__ "PCASMSetSortIndices"
1151 /*@
1152     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1153 
1154     Logically Collective on PC
1155 
1156     Input Parameters:
1157 +   pc  - the preconditioner context
1158 -   doSort - sort the subdomain indices
1159 
1160     Level: intermediate
1161 
1162 .keywords: PC, ASM, set, type
1163 
1164 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1165           PCASMCreateSubdomains2D()
1166 @*/
1167 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1168 {
1169   PetscErrorCode ierr;
1170 
1171   PetscFunctionBegin;
1172   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1173   PetscValidLogicalCollectiveBool(pc,doSort,2);
1174   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1175   PetscFunctionReturn(0);
1176 }
1177 
1178 #undef __FUNCT__
1179 #define __FUNCT__ "PCASMGetSubKSP"
1180 /*@C
1181    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1182    this processor.
1183 
1184    Collective on PC iff first_local is requested
1185 
1186    Input Parameter:
1187 .  pc - the preconditioner context
1188 
1189    Output Parameters:
1190 +  n_local - the number of blocks on this processor or NULL
1191 .  first_local - the global number of the first block on this processor or NULL,
1192                  all processors must request or all must pass NULL
1193 -  ksp - the array of KSP contexts
1194 
1195    Note:
1196    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1197 
1198    Currently for some matrix implementations only 1 block per processor
1199    is supported.
1200 
1201    You must call KSPSetUp() before calling PCASMGetSubKSP().
1202 
1203    Fortran note:
1204    The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.
1205 
1206    Level: advanced
1207 
1208 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1209 
1210 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1211           PCASMCreateSubdomains2D(),
1212 @*/
1213 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1214 {
1215   PetscErrorCode ierr;
1216 
1217   PetscFunctionBegin;
1218   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1219   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1220   PetscFunctionReturn(0);
1221 }
1222 
1223 /* -------------------------------------------------------------------------------------*/
1224 /*MC
1225    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1226            its own KSP object.
1227 
1228    Options Database Keys:
1229 +  -pc_asm_blocks <blks> - Sets total blocks
1230 .  -pc_asm_overlap <ovl> - Sets overlap
1231 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1232 
1233      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1234       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1235       -pc_asm_type basic to use the standard ASM.
1236 
1237    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1238      than one processor. Defaults to one block per processor.
1239 
1240      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1241         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1242 
1243      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1244          and set the options directly on the resulting KSP object (you can access its PC
1245          with KSPGetPC())
1246 
1247 
1248    Level: beginner
1249 
1250    Concepts: additive Schwarz method
1251 
1252     References:
1253     An additive variant of the Schwarz alternating method for the case of many subregions
1254     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1255 
1256     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1257     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1258 
1259 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1260            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1261            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1262 
1263 M*/
1264 
1265 #undef __FUNCT__
1266 #define __FUNCT__ "PCCreate_ASM"
1267 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1268 {
1269   PetscErrorCode ierr;
1270   PC_ASM         *osm;
1271 
1272   PetscFunctionBegin;
1273   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1274 
1275   osm->n                 = PETSC_DECIDE;
1276   osm->n_local           = 0;
1277   osm->n_local_true      = PETSC_DECIDE;
1278   osm->overlap           = 1;
1279   osm->ksp               = 0;
1280   osm->restriction       = 0;
1281   osm->localization      = 0;
1282   osm->prolongation      = 0;
1283   osm->x                 = 0;
1284   osm->y                 = 0;
1285   osm->y_local           = 0;
1286   osm->is                = 0;
1287   osm->is_local          = 0;
1288   osm->mat               = 0;
1289   osm->pmat              = 0;
1290   osm->type              = PC_ASM_RESTRICT;
1291   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1292   osm->same_local_solves = PETSC_TRUE;
1293   osm->sort_indices      = PETSC_TRUE;
1294   osm->dm_subdomains     = PETSC_FALSE;
1295 
1296   pc->data                 = (void*)osm;
1297   pc->ops->apply           = PCApply_ASM;
1298   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1299   pc->ops->setup           = PCSetUp_ASM;
1300   pc->ops->reset           = PCReset_ASM;
1301   pc->ops->destroy         = PCDestroy_ASM;
1302   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1303   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1304   pc->ops->view            = PCView_ASM;
1305   pc->ops->applyrichardson = 0;
1306 
1307   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1308   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1309   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1310   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1311   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1312   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1313   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1314   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1315   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1316   PetscFunctionReturn(0);
1317 }
1318 
1319 #undef __FUNCT__
1320 #define __FUNCT__ "PCASMCreateSubdomains"
1321 /*@C
1322    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1323    preconditioner for a any problem on a general grid.
1324 
1325    Collective
1326 
1327    Input Parameters:
1328 +  A - The global matrix operator
1329 -  n - the number of local blocks
1330 
1331    Output Parameters:
1332 .  outis - the array of index sets defining the subdomains
1333 
1334    Level: advanced
1335 
1336    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1337     from these if you use PCASMSetLocalSubdomains()
1338 
1339     In the Fortran version you must provide the array outis[] already allocated of length n.
1340 
1341 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1342 
1343 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1344 @*/
1345 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1346 {
1347   MatPartitioning mpart;
1348   const char      *prefix;
1349   PetscErrorCode  (*f)(Mat,Mat*);
1350   PetscMPIInt     size;
1351   PetscInt        i,j,rstart,rend,bs;
1352   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1353   Mat             Ad     = NULL, adj;
1354   IS              ispart,isnumb,*is;
1355   PetscErrorCode  ierr;
1356 
1357   PetscFunctionBegin;
1358   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1359   PetscValidPointer(outis,3);
1360   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1361 
1362   /* Get prefix, row distribution, and block size */
1363   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1364   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1365   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1366   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1367 
1368   /* Get diagonal block from matrix if possible */
1369   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1370   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
1371   if (f) {
1372     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1373   } else if (size == 1) {
1374     Ad = A;
1375   }
1376   if (Ad) {
1377     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1378     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1379   }
1380   if (Ad && n > 1) {
1381     PetscBool match,done;
1382     /* Try to setup a good matrix partitioning if available */
1383     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1384     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1385     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1386     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1387     if (!match) {
1388       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1389     }
1390     if (!match) { /* assume a "good" partitioner is available */
1391       PetscInt       na;
1392       const PetscInt *ia,*ja;
1393       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1394       if (done) {
1395         /* Build adjacency matrix by hand. Unfortunately a call to
1396            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1397            remove the block-aij structure and we cannot expect
1398            MatPartitioning to split vertices as we need */
1399         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1400         const PetscInt *row;
1401         nnz = 0;
1402         for (i=0; i<na; i++) { /* count number of nonzeros */
1403           len = ia[i+1] - ia[i];
1404           row = ja + ia[i];
1405           for (j=0; j<len; j++) {
1406             if (row[j] == i) { /* don't count diagonal */
1407               len--; break;
1408             }
1409           }
1410           nnz += len;
1411         }
1412         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1413         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1414         nnz    = 0;
1415         iia[0] = 0;
1416         for (i=0; i<na; i++) { /* fill adjacency */
1417           cnt = 0;
1418           len = ia[i+1] - ia[i];
1419           row = ja + ia[i];
1420           for (j=0; j<len; j++) {
1421             if (row[j] != i) { /* if not diagonal */
1422               jja[nnz+cnt++] = row[j];
1423             }
1424           }
1425           nnz     += cnt;
1426           iia[i+1] = nnz;
1427         }
1428         /* Partitioning of the adjacency matrix */
1429         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1430         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1431         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1432         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1433         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1434         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1435         foundpart = PETSC_TRUE;
1436       }
1437       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1438     }
1439     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1440   }
1441 
1442   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1443   *outis = is;
1444 
1445   if (!foundpart) {
1446 
1447     /* Partitioning by contiguous chunks of rows */
1448 
1449     PetscInt mbs   = (rend-rstart)/bs;
1450     PetscInt start = rstart;
1451     for (i=0; i<n; i++) {
1452       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1453       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1454       start += count;
1455     }
1456 
1457   } else {
1458 
1459     /* Partitioning by adjacency of diagonal block  */
1460 
1461     const PetscInt *numbering;
1462     PetscInt       *count,nidx,*indices,*newidx,start=0;
1463     /* Get node count in each partition */
1464     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1465     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1466     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1467       for (i=0; i<n; i++) count[i] *= bs;
1468     }
1469     /* Build indices from node numbering */
1470     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1471     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1472     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1473     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1474     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1475     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1476     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1477       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1478       for (i=0; i<nidx; i++) {
1479         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1480       }
1481       ierr    = PetscFree(indices);CHKERRQ(ierr);
1482       nidx   *= bs;
1483       indices = newidx;
1484     }
1485     /* Shift to get global indices */
1486     for (i=0; i<nidx; i++) indices[i] += rstart;
1487 
1488     /* Build the index sets for each block */
1489     for (i=0; i<n; i++) {
1490       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1491       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1492       start += count[i];
1493     }
1494 
1495     ierr = PetscFree(count);CHKERRQ(ierr);
1496     ierr = PetscFree(indices);CHKERRQ(ierr);
1497     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1498     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1499 
1500   }
1501   PetscFunctionReturn(0);
1502 }
1503 
1504 #undef __FUNCT__
1505 #define __FUNCT__ "PCASMDestroySubdomains"
1506 /*@C
1507    PCASMDestroySubdomains - Destroys the index sets created with
1508    PCASMCreateSubdomains(). Should be called after setting subdomains
1509    with PCASMSetLocalSubdomains().
1510 
1511    Collective
1512 
1513    Input Parameters:
1514 +  n - the number of index sets
1515 .  is - the array of index sets
1516 -  is_local - the array of local index sets, can be NULL
1517 
1518    Level: advanced
1519 
1520 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1521 
1522 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1523 @*/
1524 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1525 {
1526   PetscInt       i;
1527   PetscErrorCode ierr;
1528 
1529   PetscFunctionBegin;
1530   if (n <= 0) PetscFunctionReturn(0);
1531   if (is) {
1532     PetscValidPointer(is,2);
1533     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1534     ierr = PetscFree(is);CHKERRQ(ierr);
1535   }
1536   if (is_local) {
1537     PetscValidPointer(is_local,3);
1538     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1539     ierr = PetscFree(is_local);CHKERRQ(ierr);
1540   }
1541   PetscFunctionReturn(0);
1542 }
1543 
1544 #undef __FUNCT__
1545 #define __FUNCT__ "PCASMCreateSubdomains2D"
1546 /*@
1547    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1548    preconditioner for a two-dimensional problem on a regular grid.
1549 
1550    Not Collective
1551 
1552    Input Parameters:
1553 +  m, n - the number of mesh points in the x and y directions
1554 .  M, N - the number of subdomains in the x and y directions
1555 .  dof - degrees of freedom per node
1556 -  overlap - overlap in mesh lines
1557 
1558    Output Parameters:
1559 +  Nsub - the number of subdomains created
1560 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1561 -  is_local - array of index sets defining non-overlapping subdomains
1562 
1563    Note:
1564    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1565    preconditioners.  More general related routines are
1566    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1567 
1568    Level: advanced
1569 
1570 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1571 
1572 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1573           PCASMSetOverlap()
1574 @*/
1575 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1576 {
1577   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1578   PetscErrorCode ierr;
1579   PetscInt       nidx,*idx,loc,ii,jj,count;
1580 
1581   PetscFunctionBegin;
1582   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1583 
1584   *Nsub     = N*M;
1585   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1586   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1587   ystart    = 0;
1588   loc_outer = 0;
1589   for (i=0; i<N; i++) {
1590     height = n/N + ((n % N) > i); /* height of subdomain */
1591     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1592     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1593     yright = ystart + height + overlap; if (yright > n) yright = n;
1594     xstart = 0;
1595     for (j=0; j<M; j++) {
1596       width = m/M + ((m % M) > j); /* width of subdomain */
1597       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1598       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1599       xright = xstart + width + overlap; if (xright > m) xright = m;
1600       nidx   = (xright - xleft)*(yright - yleft);
1601       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1602       loc    = 0;
1603       for (ii=yleft; ii<yright; ii++) {
1604         count = m*ii + xleft;
1605         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1606       }
1607       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1608       if (overlap == 0) {
1609         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1610 
1611         (*is_local)[loc_outer] = (*is)[loc_outer];
1612       } else {
1613         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1614           for (jj=xstart; jj<xstart+width; jj++) {
1615             idx[loc++] = m*ii + jj;
1616           }
1617         }
1618         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1619       }
1620       ierr    = PetscFree(idx);CHKERRQ(ierr);
1621       xstart += width;
1622       loc_outer++;
1623     }
1624     ystart += height;
1625   }
1626   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1627   PetscFunctionReturn(0);
1628 }
1629 
1630 #undef __FUNCT__
1631 #define __FUNCT__ "PCASMGetLocalSubdomains"
1632 /*@C
1633     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1634     only) for the additive Schwarz preconditioner.
1635 
1636     Not Collective
1637 
1638     Input Parameter:
1639 .   pc - the preconditioner context
1640 
1641     Output Parameters:
1642 +   n - the number of subdomains for this processor (default value = 1)
1643 .   is - the index sets that define the subdomains for this processor
1644 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1645 
1646 
1647     Notes:
1648     The IS numbering is in the parallel, global numbering of the vector.
1649 
1650     Level: advanced
1651 
1652 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1653 
1654 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1655           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1656 @*/
1657 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1658 {
1659   PC_ASM         *osm;
1660   PetscErrorCode ierr;
1661   PetscBool      match;
1662 
1663   PetscFunctionBegin;
1664   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1665   PetscValidIntPointer(n,2);
1666   if (is) PetscValidPointer(is,3);
1667   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1668   if (!match) {
1669     if (n) *n = 0;
1670     if (is) *is = NULL;
1671   } else {
1672     osm = (PC_ASM*)pc->data;
1673     if (n) *n = osm->n_local_true;
1674     if (is) *is = osm->is;
1675     if (is_local) *is_local = osm->is_local;
1676   }
1677   PetscFunctionReturn(0);
1678 }
1679 
1680 #undef __FUNCT__
1681 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1682 /*@C
1683     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1684     only) for the additive Schwarz preconditioner.
1685 
1686     Not Collective
1687 
1688     Input Parameter:
1689 .   pc - the preconditioner context
1690 
1691     Output Parameters:
1692 +   n - the number of matrices for this processor (default value = 1)
1693 -   mat - the matrices
1694 
1695 
1696     Level: advanced
1697 
1698     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1699 
1700            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1701 
1702 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1703 
1704 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1705           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1706 @*/
1707 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1708 {
1709   PC_ASM         *osm;
1710   PetscErrorCode ierr;
1711   PetscBool      match;
1712 
1713   PetscFunctionBegin;
1714   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1715   PetscValidIntPointer(n,2);
1716   if (mat) PetscValidPointer(mat,3);
1717   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1718   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1719   if (!match) {
1720     if (n) *n = 0;
1721     if (mat) *mat = NULL;
1722   } else {
1723     osm = (PC_ASM*)pc->data;
1724     if (n) *n = osm->n_local_true;
1725     if (mat) *mat = osm->pmat;
1726   }
1727   PetscFunctionReturn(0);
1728 }
1729 
1730 #undef __FUNCT__
1731 #define __FUNCT__ "PCASMSetDMSubdomains"
1732 /*@
1733     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1734     Logically Collective
1735 
1736     Input Parameter:
1737 +   pc  - the preconditioner
1738 -   flg - boolean indicating whether to use subdomains defined by the DM
1739 
1740     Options Database Key:
1741 .   -pc_asm_dm_subdomains
1742 
1743     Level: intermediate
1744 
1745     Notes:
1746     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1747     so setting either of the first two effectively turns the latter off.
1748 
1749 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1750 
1751 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1752           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1753 @*/
1754 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1755 {
1756   PC_ASM         *osm = (PC_ASM*)pc->data;
1757   PetscErrorCode ierr;
1758   PetscBool      match;
1759 
1760   PetscFunctionBegin;
1761   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1762   PetscValidLogicalCollectiveBool(pc,flg,2);
1763   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1764   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1765   if (match) {
1766     osm->dm_subdomains = flg;
1767   }
1768   PetscFunctionReturn(0);
1769 }
1770 
1771 #undef __FUNCT__
1772 #define __FUNCT__ "PCASMGetDMSubdomains"
1773 /*@
1774     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1775     Not Collective
1776 
1777     Input Parameter:
1778 .   pc  - the preconditioner
1779 
1780     Output Parameter:
1781 .   flg - boolean indicating whether to use subdomains defined by the DM
1782 
1783     Level: intermediate
1784 
1785 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1786 
1787 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1788           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1789 @*/
1790 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1791 {
1792   PC_ASM         *osm = (PC_ASM*)pc->data;
1793   PetscErrorCode ierr;
1794   PetscBool      match;
1795 
1796   PetscFunctionBegin;
1797   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1798   PetscValidPointer(flg,2);
1799   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1800   if (match) {
1801     if (flg) *flg = osm->dm_subdomains;
1802   }
1803   PetscFunctionReturn(0);
1804 }
1805