xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision fa8916fe1eb763ac2d64c53b10fd6cf64d67acc1)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   /* For multiplicative solve */
34   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
35   Vec        lx, ly;              /* work vectors */
36   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "PCView_ASM"
41 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
42 {
43   PC_ASM         *osm = (PC_ASM*)pc->data;
44   PetscErrorCode ierr;
45   PetscMPIInt    rank;
46   PetscInt       i,bsz;
47   PetscBool      iascii,isstring;
48   PetscViewer    sviewer;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
53   if (iascii) {
54     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
55     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
56     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
57     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
58     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
59     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
60     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
61     if (osm->same_local_solves) {
62       if (osm->ksp) {
63         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
64         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
65         if (!rank) {
66           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
68           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
69         }
70         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71       }
72     } else {
73       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
75       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
80       for (i=0; i<osm->n_local_true; i++) {
81         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
82         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
83         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
85       }
86       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
87       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
90     }
91   } else if (isstring) {
92     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
93     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
94     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
95     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96   }
97   PetscFunctionReturn(0);
98 }
99 
100 #undef __FUNCT__
101 #define __FUNCT__ "PCASMPrintSubdomains"
102 static PetscErrorCode PCASMPrintSubdomains(PC pc)
103 {
104   PC_ASM         *osm = (PC_ASM*)pc->data;
105   const char     *prefix;
106   char           fname[PETSC_MAX_PATH_LEN+1];
107   PetscViewer    viewer, sviewer;
108   char           *s;
109   PetscInt       i,j,nidx;
110   const PetscInt *idx;
111   PetscMPIInt    rank, size;
112   PetscErrorCode ierr;
113 
114   PetscFunctionBegin;
115   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
116   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
117   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
118   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
119   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
120   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
121   for (i=0; i<osm->n_local; i++) {
122     if (i < osm->n_local_true) {
123       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
124       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
125       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
126       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
127       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
128       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
129       for (j=0; j<nidx; j++) {
130         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
131       }
132       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
133       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
134       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
137       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
138       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
139       ierr = PetscFree(s);CHKERRQ(ierr);
140       if (osm->is_local) {
141         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
142         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
143         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
144         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
145         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
146         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
147         for (j=0; j<nidx; j++) {
148           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
149         }
150         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
151         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
152         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
153         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
155         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
156         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
157         ierr = PetscFree(s);CHKERRQ(ierr);
158       }
159     } else {
160       /* Participate in collective viewer calls. */
161       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
162       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
163       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
164       /* Assume either all ranks have is_local or none do. */
165       if (osm->is_local) {
166         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
167         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
168         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
169       }
170     }
171   }
172   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
173   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
174   PetscFunctionReturn(0);
175 }
176 
177 #undef __FUNCT__
178 #define __FUNCT__ "PCSetUp_ASM"
179 static PetscErrorCode PCSetUp_ASM(PC pc)
180 {
181   PC_ASM         *osm = (PC_ASM*)pc->data;
182   PetscErrorCode ierr;
183   PetscBool      symset,flg;
184   PetscInt       i,m,m_local;
185   MatReuse       scall = MAT_REUSE_MATRIX;
186   IS             isl;
187   KSP            ksp;
188   PC             subpc;
189   const char     *prefix,*pprefix;
190   Vec            vec;
191   DM             *domain_dm = NULL;
192 
193   PetscFunctionBegin;
194   if (!pc->setupcalled) {
195 
196     if (!osm->type_set) {
197       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
198       if (symset && flg) osm->type = PC_ASM_BASIC;
199     }
200 
201     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
202     if (osm->n_local_true == PETSC_DECIDE) {
203       /* no subdomains given */
204       /* try pc->dm first, if allowed */
205       if (osm->dm_subdomains && pc->dm) {
206         PetscInt  num_domains, d;
207         char      **domain_names;
208         IS        *inner_domain_is, *outer_domain_is;
209         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
210         if (num_domains) {
211           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
212         }
213         for (d = 0; d < num_domains; ++d) {
214           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
215           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
216           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
217         }
218         ierr = PetscFree(domain_names);CHKERRQ(ierr);
219         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
220         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
221       }
222       if (osm->n_local_true == PETSC_DECIDE) {
223         /* still no subdomains; use one subdomain per processor */
224         osm->n_local_true = 1;
225       }
226     }
227     { /* determine the global and max number of subdomains */
228       struct {PetscInt max,sum;} inwork,outwork;
229       inwork.max   = osm->n_local_true;
230       inwork.sum   = osm->n_local_true;
231       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
232       osm->n_local = outwork.max;
233       osm->n       = outwork.sum;
234     }
235     if (!osm->is) { /* create the index sets */
236       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
237     }
238     if (osm->n_local_true > 1 && !osm->is_local) {
239       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
240       for (i=0; i<osm->n_local_true; i++) {
241         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
242           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
243           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
244         } else {
245           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
246           osm->is_local[i] = osm->is[i];
247         }
248       }
249     }
250     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
251     flg  = PETSC_FALSE;
252     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
253     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
254 
255     if (osm->overlap > 0) {
256       /* Extend the "overlapping" regions by a number of steps */
257       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
258     }
259     if (osm->sort_indices) {
260       for (i=0; i<osm->n_local_true; i++) {
261         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
262         if (osm->is_local) {
263           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
264         }
265       }
266     }
267     /* Create the local work vectors and scatter contexts */
268     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
269     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
270     if (osm->is_local) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
271     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
272     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
273     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
274     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
275     for (i=0; i<osm->n_local_true; ++i) {
276       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
277       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
278       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
279       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
280       ierr = ISDestroy(&isl);CHKERRQ(ierr);
281       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
282       if (osm->is_local) {
283         ISLocalToGlobalMapping ltog;
284         IS                     isll;
285         const PetscInt         *idx_local;
286         PetscInt               *idx,nout;
287 
288         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
289         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
290         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
291         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
292         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
293         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
294         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
295         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
296         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
297         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
298         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
299         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
300         ierr = ISDestroy(&isll);CHKERRQ(ierr);
301 
302         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
303         ierr = ISDestroy(&isl);CHKERRQ(ierr);
304       } else {
305         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
306 
307         osm->y_local[i] = osm->y[i];
308 
309         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
310 
311         osm->prolongation[i] = osm->restriction[i];
312 
313         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
314       }
315     }
316     for (i=osm->n_local_true; i<osm->n_local; i++) {
317       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
318       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
319       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
320       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
321       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
322       if (osm->is_local) {
323         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
324         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
325       } else {
326         osm->prolongation[i] = osm->restriction[i];
327         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
328       }
329       ierr = ISDestroy(&isl);CHKERRQ(ierr);
330     }
331     ierr = VecDestroy(&vec);CHKERRQ(ierr);
332 
333     if (!osm->ksp) {
334       /* Create the local solvers */
335       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
336       if (domain_dm) {
337         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
338       }
339       for (i=0; i<osm->n_local_true; i++) {
340         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
341         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
342         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
343         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
344         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
345         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
346         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
347         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
348         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
349         if (domain_dm) {
350           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
351           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
352           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
353         }
354         osm->ksp[i] = ksp;
355       }
356       if (domain_dm) {
357         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
358       }
359     }
360     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
361       PetscInt m;
362 
363       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
364       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
365       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
366       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
367       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
368     }
369     scall = MAT_INITIAL_MATRIX;
370   } else {
371     /*
372        Destroy the blocks from the previous iteration
373     */
374     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
375       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
376       scall = MAT_INITIAL_MATRIX;
377     }
378   }
379 
380   /*
381      Extract out the submatrices
382   */
383   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
384   if (scall == MAT_INITIAL_MATRIX) {
385     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
386     for (i=0; i<osm->n_local_true; i++) {
387       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
388       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
389     }
390   }
391   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
392     IS      *cis;
393     PetscInt c;
394 
395     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
396     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
397     ierr = MatGetSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
398     ierr = PetscFree(cis);CHKERRQ(ierr);
399   }
400 
401   /* Return control to the user so that the submatrices can be modified (e.g., to apply
402      different boundary conditions for the submatrices than for the global problem) */
403   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
404 
405   /*
406      Loop over subdomains putting them into local ksp
407   */
408   for (i=0; i<osm->n_local_true; i++) {
409     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
410     if (!pc->setupcalled) {
411       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
412     }
413   }
414   PetscFunctionReturn(0);
415 }
416 
417 #undef __FUNCT__
418 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
419 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
420 {
421   PC_ASM             *osm = (PC_ASM*)pc->data;
422   PetscErrorCode     ierr;
423   PetscInt           i;
424   KSPConvergedReason reason;
425 
426   PetscFunctionBegin;
427   for (i=0; i<osm->n_local_true; i++) {
428     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
429     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
430     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
431       pc->failedreason = PC_SUBPC_ERROR;
432     }
433   }
434   PetscFunctionReturn(0);
435 }
436 
437 #undef __FUNCT__
438 #define __FUNCT__ "PCApply_ASM"
439 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
440 {
441   PC_ASM         *osm = (PC_ASM*)pc->data;
442   PetscErrorCode ierr;
443   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
444   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
445 
446   PetscFunctionBegin;
447   /*
448      Support for limiting the restriction or interpolation to only local
449      subdomain values (leaving the other values 0).
450   */
451   if (!(osm->type & PC_ASM_RESTRICT)) {
452     forward = SCATTER_FORWARD_LOCAL;
453     /* have to zero the work RHS since scatter may leave some slots empty */
454     for (i=0; i<n_local_true; i++) {
455       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
456     }
457   }
458   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
459 
460   switch (osm->loctype)
461   {
462   case PC_COMPOSITE_ADDITIVE:
463     for (i=0; i<n_local; i++) {
464       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
465     }
466     ierr = VecZeroEntries(y);CHKERRQ(ierr);
467     /* do the local solves */
468     for (i=0; i<n_local_true; i++) {
469       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
470       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
471       if (osm->localization) {
472         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
473         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
474       }
475       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
476     }
477     /* handle the rest of the scatters that do not have local solves */
478     for (i=n_local_true; i<n_local; i++) {
479       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
480       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
481     }
482     for (i=0; i<n_local; i++) {
483       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
484     }
485     break;
486   case PC_COMPOSITE_MULTIPLICATIVE:
487     ierr = VecZeroEntries(y);CHKERRQ(ierr);
488     /* do the local solves */
489     for (i = 0; i < n_local_true; ++i) {
490       if (i > 0) {
491         /* Update rhs */
492         ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
493         ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
494       } else {
495         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
496       }
497       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
498       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
499       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
500       if (osm->localization) {
501         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
502         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
503       }
504       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
505       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
506       if (i < n_local_true-1) {
507         ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
508         ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
509         ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
510         ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr);
511         ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
512         ierr = VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
513         ierr = VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
514       }
515     }
516     /* handle the rest of the scatters that do not have local solves */
517     for (i = n_local_true; i < n_local; ++i) {
518       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
519       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
520       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
521       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
522     }
523     break;
524   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
525   }
526   PetscFunctionReturn(0);
527 }
528 
529 #undef __FUNCT__
530 #define __FUNCT__ "PCApplyTranspose_ASM"
531 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
532 {
533   PC_ASM         *osm = (PC_ASM*)pc->data;
534   PetscErrorCode ierr;
535   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
536   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
537 
538   PetscFunctionBegin;
539   /*
540      Support for limiting the restriction or interpolation to only local
541      subdomain values (leaving the other values 0).
542 
543      Note: these are reversed from the PCApply_ASM() because we are applying the
544      transpose of the three terms
545   */
546   if (!(osm->type & PC_ASM_INTERPOLATE)) {
547     forward = SCATTER_FORWARD_LOCAL;
548     /* have to zero the work RHS since scatter may leave some slots empty */
549     for (i=0; i<n_local_true; i++) {
550       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
551     }
552   }
553   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
554 
555   for (i=0; i<n_local; i++) {
556     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
557   }
558   ierr = VecZeroEntries(y);CHKERRQ(ierr);
559   /* do the local solves */
560   for (i=0; i<n_local_true; i++) {
561     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
562     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
563     if (osm->localization) {
564       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
565       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
566     }
567     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
568   }
569   /* handle the rest of the scatters that do not have local solves */
570   for (i=n_local_true; i<n_local; i++) {
571     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
572     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
573   }
574   for (i=0; i<n_local; i++) {
575     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
576   }
577   PetscFunctionReturn(0);
578 }
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "PCReset_ASM"
582 static PetscErrorCode PCReset_ASM(PC pc)
583 {
584   PC_ASM         *osm = (PC_ASM*)pc->data;
585   PetscErrorCode ierr;
586   PetscInt       i;
587 
588   PetscFunctionBegin;
589   if (osm->ksp) {
590     for (i=0; i<osm->n_local_true; i++) {
591       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
592     }
593   }
594   if (osm->pmat) {
595     if (osm->n_local_true > 0) {
596       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
597     }
598   }
599   if (osm->restriction) {
600     for (i=0; i<osm->n_local; i++) {
601       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
602       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
603       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
604       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
605       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
606       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
607     }
608     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
609     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
610     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
611     ierr = PetscFree(osm->x);CHKERRQ(ierr);
612     ierr = PetscFree(osm->y);CHKERRQ(ierr);
613     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
614   }
615   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
616   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
617     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
618     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
619     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
620     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
621   }
622 
623   osm->is       = 0;
624   osm->is_local = 0;
625   PetscFunctionReturn(0);
626 }
627 
628 #undef __FUNCT__
629 #define __FUNCT__ "PCDestroy_ASM"
630 static PetscErrorCode PCDestroy_ASM(PC pc)
631 {
632   PC_ASM         *osm = (PC_ASM*)pc->data;
633   PetscErrorCode ierr;
634   PetscInt       i;
635 
636   PetscFunctionBegin;
637   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
638   if (osm->ksp) {
639     for (i=0; i<osm->n_local_true; i++) {
640       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
641     }
642     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
643   }
644   ierr = PetscFree(pc->data);CHKERRQ(ierr);
645   PetscFunctionReturn(0);
646 }
647 
648 #undef __FUNCT__
649 #define __FUNCT__ "PCSetFromOptions_ASM"
650 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
651 {
652   PC_ASM         *osm = (PC_ASM*)pc->data;
653   PetscErrorCode ierr;
654   PetscInt       blocks,ovl;
655   PetscBool      symset,flg;
656   PCASMType      asmtype;
657   PCCompositeType loctype;
658 
659   PetscFunctionBegin;
660   /* set the type to symmetric if matrix is symmetric */
661   if (!osm->type_set && pc->pmat) {
662     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
663     if (symset && flg) osm->type = PC_ASM_BASIC;
664   }
665   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
666   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
667   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
668   if (flg) {
669     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
670     osm->dm_subdomains = PETSC_FALSE;
671   }
672   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
673   if (flg) {
674     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
675     osm->dm_subdomains = PETSC_FALSE;
676   }
677   flg  = PETSC_FALSE;
678   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
679   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
680   flg  = PETSC_FALSE;
681   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
682   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
683   ierr = PetscOptionsTail();CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 /*------------------------------------------------------------------------------------*/
688 
689 #undef __FUNCT__
690 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
691 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
692 {
693   PC_ASM         *osm = (PC_ASM*)pc->data;
694   PetscErrorCode ierr;
695   PetscInt       i;
696 
697   PetscFunctionBegin;
698   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
699   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
700 
701   if (!pc->setupcalled) {
702     if (is) {
703       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
704     }
705     if (is_local) {
706       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
707     }
708     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
709 
710     osm->n_local_true = n;
711     osm->is           = 0;
712     osm->is_local     = 0;
713     if (is) {
714       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
715       for (i=0; i<n; i++) osm->is[i] = is[i];
716       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
717       osm->overlap = -1;
718     }
719     if (is_local) {
720       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
721       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
722       if (!is) {
723         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
724         for (i=0; i<osm->n_local_true; i++) {
725           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
726             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
727             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
728           } else {
729             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
730             osm->is[i] = osm->is_local[i];
731           }
732         }
733       }
734     }
735   }
736   PetscFunctionReturn(0);
737 }
738 
739 #undef __FUNCT__
740 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
741 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
742 {
743   PC_ASM         *osm = (PC_ASM*)pc->data;
744   PetscErrorCode ierr;
745   PetscMPIInt    rank,size;
746   PetscInt       n;
747 
748   PetscFunctionBegin;
749   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
750   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
751 
752   /*
753      Split the subdomains equally among all processors
754   */
755   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
756   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
757   n    = N/size + ((N % size) > rank);
758   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
759   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
760   if (!pc->setupcalled) {
761     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
762 
763     osm->n_local_true = n;
764     osm->is           = 0;
765     osm->is_local     = 0;
766   }
767   PetscFunctionReturn(0);
768 }
769 
770 #undef __FUNCT__
771 #define __FUNCT__ "PCASMSetOverlap_ASM"
772 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
773 {
774   PC_ASM *osm = (PC_ASM*)pc->data;
775 
776   PetscFunctionBegin;
777   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
778   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
779   if (!pc->setupcalled) osm->overlap = ovl;
780   PetscFunctionReturn(0);
781 }
782 
783 #undef __FUNCT__
784 #define __FUNCT__ "PCASMSetType_ASM"
785 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
786 {
787   PC_ASM *osm = (PC_ASM*)pc->data;
788 
789   PetscFunctionBegin;
790   osm->type     = type;
791   osm->type_set = PETSC_TRUE;
792   PetscFunctionReturn(0);
793 }
794 
795 #undef __FUNCT__
796 #define __FUNCT__ "PCASMGetType_ASM"
797 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
798 {
799   PC_ASM *osm = (PC_ASM*)pc->data;
800 
801   PetscFunctionBegin;
802   *type = osm->type;
803   PetscFunctionReturn(0);
804 }
805 
806 #undef __FUNCT__
807 #define __FUNCT__ "PCASMSetLocalType_ASM"
808 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
809 {
810   PC_ASM *osm = (PC_ASM *) pc->data;
811 
812   PetscFunctionBegin;
813   osm->loctype = type;
814   PetscFunctionReturn(0);
815 }
816 
817 #undef __FUNCT__
818 #define __FUNCT__ "PCASMGetLocalType_ASM"
819 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
820 {
821   PC_ASM *osm = (PC_ASM *) pc->data;
822 
823   PetscFunctionBegin;
824   *type = osm->loctype;
825   PetscFunctionReturn(0);
826 }
827 
828 #undef __FUNCT__
829 #define __FUNCT__ "PCASMSetSortIndices_ASM"
830 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
831 {
832   PC_ASM *osm = (PC_ASM*)pc->data;
833 
834   PetscFunctionBegin;
835   osm->sort_indices = doSort;
836   PetscFunctionReturn(0);
837 }
838 
839 #undef __FUNCT__
840 #define __FUNCT__ "PCASMGetSubKSP_ASM"
841 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
842 {
843   PC_ASM         *osm = (PC_ASM*)pc->data;
844   PetscErrorCode ierr;
845 
846   PetscFunctionBegin;
847   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
848 
849   if (n_local) *n_local = osm->n_local_true;
850   if (first_local) {
851     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
852     *first_local -= osm->n_local_true;
853   }
854   if (ksp) {
855     /* Assume that local solves are now different; not necessarily
856        true though!  This flag is used only for PCView_ASM() */
857     *ksp                   = osm->ksp;
858     osm->same_local_solves = PETSC_FALSE;
859   }
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "PCASMSetLocalSubdomains"
865 /*@C
866     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
867 
868     Collective on PC
869 
870     Input Parameters:
871 +   pc - the preconditioner context
872 .   n - the number of subdomains for this processor (default value = 1)
873 .   is - the index set that defines the subdomains for this processor
874          (or NULL for PETSc to determine subdomains)
875 -   is_local - the index sets that define the local part of the subdomains for this processor
876          (or NULL to use the default of 1 subdomain per process)
877 
878     Notes:
879     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
880 
881     By default the ASM preconditioner uses 1 block per processor.
882 
883     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
884 
885     Level: advanced
886 
887 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
888 
889 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
890           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
891 @*/
892 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
893 {
894   PetscErrorCode ierr;
895 
896   PetscFunctionBegin;
897   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
898   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
899   PetscFunctionReturn(0);
900 }
901 
902 #undef __FUNCT__
903 #define __FUNCT__ "PCASMSetTotalSubdomains"
904 /*@C
905     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
906     additive Schwarz preconditioner.  Either all or no processors in the
907     PC communicator must call this routine, with the same index sets.
908 
909     Collective on PC
910 
911     Input Parameters:
912 +   pc - the preconditioner context
913 .   N  - the number of subdomains for all processors
914 .   is - the index sets that define the subdomains for all processors
915          (or NULL to ask PETSc to compe up with subdomains)
916 -   is_local - the index sets that define the local part of the subdomains for this processor
917          (or NULL to use the default of 1 subdomain per process)
918 
919     Options Database Key:
920     To set the total number of subdomain blocks rather than specify the
921     index sets, use the option
922 .    -pc_asm_blocks <blks> - Sets total blocks
923 
924     Notes:
925     Currently you cannot use this to set the actual subdomains with the argument is.
926 
927     By default the ASM preconditioner uses 1 block per processor.
928 
929     These index sets cannot be destroyed until after completion of the
930     linear solves for which the ASM preconditioner is being used.
931 
932     Use PCASMSetLocalSubdomains() to set local subdomains.
933 
934     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
935 
936     Level: advanced
937 
938 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
939 
940 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
941           PCASMCreateSubdomains2D()
942 @*/
943 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
944 {
945   PetscErrorCode ierr;
946 
947   PetscFunctionBegin;
948   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
949   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
950   PetscFunctionReturn(0);
951 }
952 
953 #undef __FUNCT__
954 #define __FUNCT__ "PCASMSetOverlap"
955 /*@
956     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
957     additive Schwarz preconditioner.  Either all or no processors in the
958     PC communicator must call this routine. If MatIncreaseOverlap is used,
959     use option -mat_increase_overlap when the problem size large.
960 
961     Logically Collective on PC
962 
963     Input Parameters:
964 +   pc  - the preconditioner context
965 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
966 
967     Options Database Key:
968 .   -pc_asm_overlap <ovl> - Sets overlap
969 
970     Notes:
971     By default the ASM preconditioner uses 1 block per processor.  To use
972     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
973     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
974 
975     The overlap defaults to 1, so if one desires that no additional
976     overlap be computed beyond what may have been set with a call to
977     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
978     must be set to be 0.  In particular, if one does not explicitly set
979     the subdomains an application code, then all overlap would be computed
980     internally by PETSc, and using an overlap of 0 would result in an ASM
981     variant that is equivalent to the block Jacobi preconditioner.
982 
983     Note that one can define initial index sets with any overlap via
984     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
985     PCASMSetOverlap() merely allows PETSc to extend that overlap further
986     if desired.
987 
988     Level: intermediate
989 
990 .keywords: PC, ASM, set, overlap
991 
992 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
993           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
994 @*/
995 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
996 {
997   PetscErrorCode ierr;
998 
999   PetscFunctionBegin;
1000   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1001   PetscValidLogicalCollectiveInt(pc,ovl,2);
1002   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 #undef __FUNCT__
1007 #define __FUNCT__ "PCASMSetType"
1008 /*@
1009     PCASMSetType - Sets the type of restriction and interpolation used
1010     for local problems in the additive Schwarz method.
1011 
1012     Logically Collective on PC
1013 
1014     Input Parameters:
1015 +   pc  - the preconditioner context
1016 -   type - variant of ASM, one of
1017 .vb
1018       PC_ASM_BASIC       - full interpolation and restriction
1019       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1020       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1021       PC_ASM_NONE        - local processor restriction and interpolation
1022 .ve
1023 
1024     Options Database Key:
1025 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1026 
1027     Level: intermediate
1028 
1029 .keywords: PC, ASM, set, type
1030 
1031 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1032           PCASMCreateSubdomains2D()
1033 @*/
1034 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1035 {
1036   PetscErrorCode ierr;
1037 
1038   PetscFunctionBegin;
1039   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1040   PetscValidLogicalCollectiveEnum(pc,type,2);
1041   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1042   PetscFunctionReturn(0);
1043 }
1044 
1045 #undef __FUNCT__
1046 #define __FUNCT__ "PCASMGetType"
1047 /*@
1048     PCASMGetType - Gets the type of restriction and interpolation used
1049     for local problems in the additive Schwarz method.
1050 
1051     Logically Collective on PC
1052 
1053     Input Parameter:
1054 .   pc  - the preconditioner context
1055 
1056     Output Parameter:
1057 .   type - variant of ASM, one of
1058 
1059 .vb
1060       PC_ASM_BASIC       - full interpolation and restriction
1061       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1062       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1063       PC_ASM_NONE        - local processor restriction and interpolation
1064 .ve
1065 
1066     Options Database Key:
1067 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1068 
1069     Level: intermediate
1070 
1071 .keywords: PC, ASM, set, type
1072 
1073 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1074           PCASMCreateSubdomains2D()
1075 @*/
1076 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1077 {
1078   PetscErrorCode ierr;
1079 
1080   PetscFunctionBegin;
1081   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1082   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1083   PetscFunctionReturn(0);
1084 }
1085 
1086 #undef __FUNCT__
1087 #define __FUNCT__ "PCASMSetLocalType"
1088 /*@
1089   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1090 
1091   Logically Collective on PC
1092 
1093   Input Parameters:
1094 + pc  - the preconditioner context
1095 - type - type of composition, one of
1096 .vb
1097   PC_COMPOSITE_ADDITIVE       - local additive combination
1098   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1099 .ve
1100 
1101   Options Database Key:
1102 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1103 
1104   Level: intermediate
1105 
1106 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASMCreate()
1107 @*/
1108 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1109 {
1110   PetscErrorCode ierr;
1111 
1112   PetscFunctionBegin;
1113   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1114   PetscValidLogicalCollectiveEnum(pc, type, 2);
1115   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1116   PetscFunctionReturn(0);
1117 }
1118 
1119 #undef __FUNCT__
1120 #define __FUNCT__ "PCASMGetLocalType"
1121 /*@
1122   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1123 
1124   Logically Collective on PC
1125 
1126   Input Parameter:
1127 . pc  - the preconditioner context
1128 
1129   Output Parameter:
1130 . type - type of composition, one of
1131 .vb
1132   PC_COMPOSITE_ADDITIVE       - local additive combination
1133   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1134 .ve
1135 
1136   Options Database Key:
1137 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1138 
1139   Level: intermediate
1140 
1141 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate()
1142 @*/
1143 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1144 {
1145   PetscErrorCode ierr;
1146 
1147   PetscFunctionBegin;
1148   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1149   PetscValidPointer(type, 2);
1150   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1151   PetscFunctionReturn(0);
1152 }
1153 
1154 #undef __FUNCT__
1155 #define __FUNCT__ "PCASMSetSortIndices"
1156 /*@
1157     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1158 
1159     Logically Collective on PC
1160 
1161     Input Parameters:
1162 +   pc  - the preconditioner context
1163 -   doSort - sort the subdomain indices
1164 
1165     Level: intermediate
1166 
1167 .keywords: PC, ASM, set, type
1168 
1169 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1170           PCASMCreateSubdomains2D()
1171 @*/
1172 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1173 {
1174   PetscErrorCode ierr;
1175 
1176   PetscFunctionBegin;
1177   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1178   PetscValidLogicalCollectiveBool(pc,doSort,2);
1179   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1180   PetscFunctionReturn(0);
1181 }
1182 
1183 #undef __FUNCT__
1184 #define __FUNCT__ "PCASMGetSubKSP"
1185 /*@C
1186    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1187    this processor.
1188 
1189    Collective on PC iff first_local is requested
1190 
1191    Input Parameter:
1192 .  pc - the preconditioner context
1193 
1194    Output Parameters:
1195 +  n_local - the number of blocks on this processor or NULL
1196 .  first_local - the global number of the first block on this processor or NULL,
1197                  all processors must request or all must pass NULL
1198 -  ksp - the array of KSP contexts
1199 
1200    Note:
1201    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1202 
1203    Currently for some matrix implementations only 1 block per processor
1204    is supported.
1205 
1206    You must call KSPSetUp() before calling PCASMGetSubKSP().
1207 
1208    Fortran note:
1209    The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.
1210 
1211    Level: advanced
1212 
1213 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1214 
1215 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1216           PCASMCreateSubdomains2D(),
1217 @*/
1218 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1219 {
1220   PetscErrorCode ierr;
1221 
1222   PetscFunctionBegin;
1223   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1224   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1225   PetscFunctionReturn(0);
1226 }
1227 
1228 /* -------------------------------------------------------------------------------------*/
1229 /*MC
1230    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1231            its own KSP object.
1232 
1233    Options Database Keys:
1234 +  -pc_asm_blocks <blks> - Sets total blocks
1235 .  -pc_asm_overlap <ovl> - Sets overlap
1236 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1237 
1238      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1239       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1240       -pc_asm_type basic to use the standard ASM.
1241 
1242    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1243      than one processor. Defaults to one block per processor.
1244 
1245      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1246         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1247 
1248      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1249          and set the options directly on the resulting KSP object (you can access its PC
1250          with KSPGetPC())
1251 
1252 
1253    Level: beginner
1254 
1255    Concepts: additive Schwarz method
1256 
1257     References:
1258 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1259      Courant Institute, New York University Technical report
1260 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1261     Cambridge University Press.
1262 
1263 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1264            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1265            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1266 
1267 M*/
1268 
1269 #undef __FUNCT__
1270 #define __FUNCT__ "PCCreate_ASM"
1271 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1272 {
1273   PetscErrorCode ierr;
1274   PC_ASM         *osm;
1275 
1276   PetscFunctionBegin;
1277   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1278 
1279   osm->n                 = PETSC_DECIDE;
1280   osm->n_local           = 0;
1281   osm->n_local_true      = PETSC_DECIDE;
1282   osm->overlap           = 1;
1283   osm->ksp               = 0;
1284   osm->restriction       = 0;
1285   osm->localization      = 0;
1286   osm->prolongation      = 0;
1287   osm->x                 = 0;
1288   osm->y                 = 0;
1289   osm->y_local           = 0;
1290   osm->is                = 0;
1291   osm->is_local          = 0;
1292   osm->mat               = 0;
1293   osm->pmat              = 0;
1294   osm->type              = PC_ASM_RESTRICT;
1295   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1296   osm->same_local_solves = PETSC_TRUE;
1297   osm->sort_indices      = PETSC_TRUE;
1298   osm->dm_subdomains     = PETSC_FALSE;
1299 
1300   pc->data                 = (void*)osm;
1301   pc->ops->apply           = PCApply_ASM;
1302   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1303   pc->ops->setup           = PCSetUp_ASM;
1304   pc->ops->reset           = PCReset_ASM;
1305   pc->ops->destroy         = PCDestroy_ASM;
1306   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1307   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1308   pc->ops->view            = PCView_ASM;
1309   pc->ops->applyrichardson = 0;
1310 
1311   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1312   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1313   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1314   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1315   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1316   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1317   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1318   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1319   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 #undef __FUNCT__
1324 #define __FUNCT__ "PCASMCreateSubdomains"
1325 /*@C
1326    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1327    preconditioner for a any problem on a general grid.
1328 
1329    Collective
1330 
1331    Input Parameters:
1332 +  A - The global matrix operator
1333 -  n - the number of local blocks
1334 
1335    Output Parameters:
1336 .  outis - the array of index sets defining the subdomains
1337 
1338    Level: advanced
1339 
1340    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1341     from these if you use PCASMSetLocalSubdomains()
1342 
1343     In the Fortran version you must provide the array outis[] already allocated of length n.
1344 
1345 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1346 
1347 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1348 @*/
1349 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1350 {
1351   MatPartitioning mpart;
1352   const char      *prefix;
1353   PetscErrorCode  (*f)(Mat,Mat*);
1354   PetscMPIInt     size;
1355   PetscInt        i,j,rstart,rend,bs;
1356   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1357   Mat             Ad     = NULL, adj;
1358   IS              ispart,isnumb,*is;
1359   PetscErrorCode  ierr;
1360 
1361   PetscFunctionBegin;
1362   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1363   PetscValidPointer(outis,3);
1364   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1365 
1366   /* Get prefix, row distribution, and block size */
1367   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1368   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1369   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1370   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1371 
1372   /* Get diagonal block from matrix if possible */
1373   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1374   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
1375   if (f) {
1376     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1377   } else if (size == 1) {
1378     Ad = A;
1379   }
1380   if (Ad) {
1381     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1382     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1383   }
1384   if (Ad && n > 1) {
1385     PetscBool match,done;
1386     /* Try to setup a good matrix partitioning if available */
1387     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1388     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1389     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1390     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1391     if (!match) {
1392       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1393     }
1394     if (!match) { /* assume a "good" partitioner is available */
1395       PetscInt       na;
1396       const PetscInt *ia,*ja;
1397       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1398       if (done) {
1399         /* Build adjacency matrix by hand. Unfortunately a call to
1400            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1401            remove the block-aij structure and we cannot expect
1402            MatPartitioning to split vertices as we need */
1403         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1404         const PetscInt *row;
1405         nnz = 0;
1406         for (i=0; i<na; i++) { /* count number of nonzeros */
1407           len = ia[i+1] - ia[i];
1408           row = ja + ia[i];
1409           for (j=0; j<len; j++) {
1410             if (row[j] == i) { /* don't count diagonal */
1411               len--; break;
1412             }
1413           }
1414           nnz += len;
1415         }
1416         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1417         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1418         nnz    = 0;
1419         iia[0] = 0;
1420         for (i=0; i<na; i++) { /* fill adjacency */
1421           cnt = 0;
1422           len = ia[i+1] - ia[i];
1423           row = ja + ia[i];
1424           for (j=0; j<len; j++) {
1425             if (row[j] != i) { /* if not diagonal */
1426               jja[nnz+cnt++] = row[j];
1427             }
1428           }
1429           nnz     += cnt;
1430           iia[i+1] = nnz;
1431         }
1432         /* Partitioning of the adjacency matrix */
1433         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1434         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1435         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1436         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1437         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1438         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1439         foundpart = PETSC_TRUE;
1440       }
1441       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1442     }
1443     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1444   }
1445 
1446   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1447   *outis = is;
1448 
1449   if (!foundpart) {
1450 
1451     /* Partitioning by contiguous chunks of rows */
1452 
1453     PetscInt mbs   = (rend-rstart)/bs;
1454     PetscInt start = rstart;
1455     for (i=0; i<n; i++) {
1456       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1457       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1458       start += count;
1459     }
1460 
1461   } else {
1462 
1463     /* Partitioning by adjacency of diagonal block  */
1464 
1465     const PetscInt *numbering;
1466     PetscInt       *count,nidx,*indices,*newidx,start=0;
1467     /* Get node count in each partition */
1468     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1469     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1470     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1471       for (i=0; i<n; i++) count[i] *= bs;
1472     }
1473     /* Build indices from node numbering */
1474     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1475     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1476     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1477     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1478     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1479     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1480     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1481       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1482       for (i=0; i<nidx; i++) {
1483         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1484       }
1485       ierr    = PetscFree(indices);CHKERRQ(ierr);
1486       nidx   *= bs;
1487       indices = newidx;
1488     }
1489     /* Shift to get global indices */
1490     for (i=0; i<nidx; i++) indices[i] += rstart;
1491 
1492     /* Build the index sets for each block */
1493     for (i=0; i<n; i++) {
1494       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1495       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1496       start += count[i];
1497     }
1498 
1499     ierr = PetscFree(count);CHKERRQ(ierr);
1500     ierr = PetscFree(indices);CHKERRQ(ierr);
1501     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1502     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1503 
1504   }
1505   PetscFunctionReturn(0);
1506 }
1507 
1508 #undef __FUNCT__
1509 #define __FUNCT__ "PCASMDestroySubdomains"
1510 /*@C
1511    PCASMDestroySubdomains - Destroys the index sets created with
1512    PCASMCreateSubdomains(). Should be called after setting subdomains
1513    with PCASMSetLocalSubdomains().
1514 
1515    Collective
1516 
1517    Input Parameters:
1518 +  n - the number of index sets
1519 .  is - the array of index sets
1520 -  is_local - the array of local index sets, can be NULL
1521 
1522    Level: advanced
1523 
1524 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1525 
1526 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1527 @*/
1528 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1529 {
1530   PetscInt       i;
1531   PetscErrorCode ierr;
1532 
1533   PetscFunctionBegin;
1534   if (n <= 0) PetscFunctionReturn(0);
1535   if (is) {
1536     PetscValidPointer(is,2);
1537     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1538     ierr = PetscFree(is);CHKERRQ(ierr);
1539   }
1540   if (is_local) {
1541     PetscValidPointer(is_local,3);
1542     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1543     ierr = PetscFree(is_local);CHKERRQ(ierr);
1544   }
1545   PetscFunctionReturn(0);
1546 }
1547 
1548 #undef __FUNCT__
1549 #define __FUNCT__ "PCASMCreateSubdomains2D"
1550 /*@
1551    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1552    preconditioner for a two-dimensional problem on a regular grid.
1553 
1554    Not Collective
1555 
1556    Input Parameters:
1557 +  m, n - the number of mesh points in the x and y directions
1558 .  M, N - the number of subdomains in the x and y directions
1559 .  dof - degrees of freedom per node
1560 -  overlap - overlap in mesh lines
1561 
1562    Output Parameters:
1563 +  Nsub - the number of subdomains created
1564 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1565 -  is_local - array of index sets defining non-overlapping subdomains
1566 
1567    Note:
1568    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1569    preconditioners.  More general related routines are
1570    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1571 
1572    Level: advanced
1573 
1574 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1575 
1576 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1577           PCASMSetOverlap()
1578 @*/
1579 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1580 {
1581   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1582   PetscErrorCode ierr;
1583   PetscInt       nidx,*idx,loc,ii,jj,count;
1584 
1585   PetscFunctionBegin;
1586   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1587 
1588   *Nsub     = N*M;
1589   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1590   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1591   ystart    = 0;
1592   loc_outer = 0;
1593   for (i=0; i<N; i++) {
1594     height = n/N + ((n % N) > i); /* height of subdomain */
1595     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1596     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1597     yright = ystart + height + overlap; if (yright > n) yright = n;
1598     xstart = 0;
1599     for (j=0; j<M; j++) {
1600       width = m/M + ((m % M) > j); /* width of subdomain */
1601       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1602       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1603       xright = xstart + width + overlap; if (xright > m) xright = m;
1604       nidx   = (xright - xleft)*(yright - yleft);
1605       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1606       loc    = 0;
1607       for (ii=yleft; ii<yright; ii++) {
1608         count = m*ii + xleft;
1609         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1610       }
1611       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1612       if (overlap == 0) {
1613         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1614 
1615         (*is_local)[loc_outer] = (*is)[loc_outer];
1616       } else {
1617         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1618           for (jj=xstart; jj<xstart+width; jj++) {
1619             idx[loc++] = m*ii + jj;
1620           }
1621         }
1622         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1623       }
1624       ierr    = PetscFree(idx);CHKERRQ(ierr);
1625       xstart += width;
1626       loc_outer++;
1627     }
1628     ystart += height;
1629   }
1630   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1631   PetscFunctionReturn(0);
1632 }
1633 
1634 #undef __FUNCT__
1635 #define __FUNCT__ "PCASMGetLocalSubdomains"
1636 /*@C
1637     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1638     only) for the additive Schwarz preconditioner.
1639 
1640     Not Collective
1641 
1642     Input Parameter:
1643 .   pc - the preconditioner context
1644 
1645     Output Parameters:
1646 +   n - the number of subdomains for this processor (default value = 1)
1647 .   is - the index sets that define the subdomains for this processor
1648 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1649 
1650 
1651     Notes:
1652     The IS numbering is in the parallel, global numbering of the vector.
1653 
1654     Level: advanced
1655 
1656 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1657 
1658 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1659           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1660 @*/
1661 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1662 {
1663   PC_ASM         *osm = (PC_ASM*)pc->data;
1664   PetscErrorCode ierr;
1665   PetscBool      match;
1666 
1667   PetscFunctionBegin;
1668   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1669   PetscValidIntPointer(n,2);
1670   if (is) PetscValidPointer(is,3);
1671   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1672   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1673   if (n) *n = osm->n_local_true;
1674   if (is) *is = osm->is;
1675   if (is_local) *is_local = osm->is_local;
1676   PetscFunctionReturn(0);
1677 }
1678 
1679 #undef __FUNCT__
1680 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1681 /*@C
1682     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1683     only) for the additive Schwarz preconditioner.
1684 
1685     Not Collective
1686 
1687     Input Parameter:
1688 .   pc - the preconditioner context
1689 
1690     Output Parameters:
1691 +   n - the number of matrices for this processor (default value = 1)
1692 -   mat - the matrices
1693 
1694 
1695     Level: advanced
1696 
1697     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1698 
1699            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1700 
1701 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1702 
1703 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1704           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1705 @*/
1706 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1707 {
1708   PC_ASM         *osm;
1709   PetscErrorCode ierr;
1710   PetscBool      match;
1711 
1712   PetscFunctionBegin;
1713   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1714   PetscValidIntPointer(n,2);
1715   if (mat) PetscValidPointer(mat,3);
1716   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1717   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1718   if (!match) {
1719     if (n) *n = 0;
1720     if (mat) *mat = NULL;
1721   } else {
1722     osm = (PC_ASM*)pc->data;
1723     if (n) *n = osm->n_local_true;
1724     if (mat) *mat = osm->pmat;
1725   }
1726   PetscFunctionReturn(0);
1727 }
1728 
1729 #undef __FUNCT__
1730 #define __FUNCT__ "PCASMSetDMSubdomains"
1731 /*@
1732     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1733     Logically Collective
1734 
1735     Input Parameter:
1736 +   pc  - the preconditioner
1737 -   flg - boolean indicating whether to use subdomains defined by the DM
1738 
1739     Options Database Key:
1740 .   -pc_asm_dm_subdomains
1741 
1742     Level: intermediate
1743 
1744     Notes:
1745     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1746     so setting either of the first two effectively turns the latter off.
1747 
1748 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1749 
1750 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1751           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1752 @*/
1753 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1754 {
1755   PC_ASM         *osm = (PC_ASM*)pc->data;
1756   PetscErrorCode ierr;
1757   PetscBool      match;
1758 
1759   PetscFunctionBegin;
1760   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1761   PetscValidLogicalCollectiveBool(pc,flg,2);
1762   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1763   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1764   if (match) {
1765     osm->dm_subdomains = flg;
1766   }
1767   PetscFunctionReturn(0);
1768 }
1769 
1770 #undef __FUNCT__
1771 #define __FUNCT__ "PCASMGetDMSubdomains"
1772 /*@
1773     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1774     Not Collective
1775 
1776     Input Parameter:
1777 .   pc  - the preconditioner
1778 
1779     Output Parameter:
1780 .   flg - boolean indicating whether to use subdomains defined by the DM
1781 
1782     Level: intermediate
1783 
1784 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1785 
1786 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1787           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1788 @*/
1789 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1790 {
1791   PC_ASM         *osm = (PC_ASM*)pc->data;
1792   PetscErrorCode ierr;
1793   PetscBool      match;
1794 
1795   PetscFunctionBegin;
1796   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1797   PetscValidPointer(flg,2);
1798   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1799   if (match) {
1800     if (flg) *flg = osm->dm_subdomains;
1801   }
1802   PetscFunctionReturn(0);
1803 }
1804