xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision dec1416f15364d8a66cef6f4b2a5a2aba5192d13)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
13 #include <petscdm.h>
14 
15 typedef struct {
16   PetscInt   n, n_local, n_local_true;
17   PetscInt   overlap;             /* overlap requested by user */
18   KSP        *ksp;                /* linear solvers for each block */
19   VecScatter restriction;         /* mapping from global to overlapping (process) subdomain*/
20   VecScatter *lrestriction;       /* mapping from subregion to overlapping (process) subdomain */
21   VecScatter *lprolongation;      /* mapping from non-overlapping subregion to overlapping (process) subdomain; used for restrict additive version of algorithms */
22   Vec        lx, ly;              /* work vectors */
23   Vec        *x,*y;               /* work vectors */
24   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
25   IS         *is;                 /* index set that defines each overlapping subdomain */
26   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
27   Mat        *mat,*pmat;          /* mat is not currently used */
28   PCASMType  type;                /* use reduced interpolation, restriction or both */
29   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
30   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
31   PetscBool  sort_indices;        /* flag to sort subdomain indices */
32   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
33   PCCompositeType loctype;        /* the type of composition for local solves */
34   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
35   /* For multiplicative solve */
36   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
40 {
41   PC_ASM         *osm = (PC_ASM*)pc->data;
42   PetscErrorCode ierr;
43   PetscMPIInt    rank;
44   PetscInt       i,bsz;
45   PetscBool      iascii,isstring;
46   PetscViewer    sviewer;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
51   if (iascii) {
52     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
53     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
54     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
55     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
56     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
57     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
58     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
59     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
60     if (osm->same_local_solves) {
61       if (osm->ksp) {
62         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
63         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
64         if (!rank) {
65           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
66           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
67           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
68         }
69         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
70       }
71     } else {
72       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
73       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
74       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
75       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
79       for (i=0; i<osm->n_local_true; i++) {
80         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
81         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
82         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
83         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
84       }
85       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
86       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
87       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
89     }
90   } else if (isstring) {
91     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
92     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
93     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
94     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
95   }
96   PetscFunctionReturn(0);
97 }
98 
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer, sviewer;
105   char           *s;
106   PetscInt       i,j,nidx;
107   const PetscInt *idx;
108   PetscMPIInt    rank, size;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
113   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
114   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
115   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
116   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
117   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
118   for (i=0; i<osm->n_local; i++) {
119     if (i < osm->n_local_true) {
120       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
121       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
122       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
123 #define len  16*(nidx+1)+512
124       ierr = PetscMalloc1(len,&s);CHKERRQ(ierr);
125       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
126 #undef len
127       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
128       for (j=0; j<nidx; j++) {
129         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
130       }
131       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
132       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
133       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
136       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
137       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
138       ierr = PetscFree(s);CHKERRQ(ierr);
139       if (osm->is_local) {
140         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
141 #define len  16*(nidx+1)+512
142         ierr = PetscMalloc1(len, &s);CHKERRQ(ierr);
143         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
144 #undef len
145         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
146         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
147         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
148         for (j=0; j<nidx; j++) {
149           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
150         }
151         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
152         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
153         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
155         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
156         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
157         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
158         ierr = PetscFree(s);CHKERRQ(ierr);
159       }
160     } else {
161       /* Participate in collective viewer calls. */
162       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
163       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
164       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
165       /* Assume either all ranks have is_local or none do. */
166       if (osm->is_local) {
167         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
168         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
169         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
170       }
171     }
172   }
173   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
174   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
175   PetscFunctionReturn(0);
176 }
177 
178 static PetscErrorCode PCSetUp_ASM(PC pc)
179 {
180   PC_ASM         *osm = (PC_ASM*)pc->data;
181   PetscErrorCode ierr;
182   PetscBool      symset,flg;
183   PetscInt       i,m,m_local;
184   MatReuse       scall = MAT_REUSE_MATRIX;
185   IS             isl;
186   KSP            ksp;
187   PC             subpc;
188   const char     *prefix,*pprefix;
189   Vec            vec;
190   DM             *domain_dm = NULL;
191 
192   PetscFunctionBegin;
193   if (!pc->setupcalled) {
194     PetscInt m;
195 
196     if (!osm->type_set) {
197       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
198       if (symset && flg) osm->type = PC_ASM_BASIC;
199     }
200 
201     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
202     if (osm->n_local_true == PETSC_DECIDE) {
203       /* no subdomains given */
204       /* try pc->dm first, if allowed */
205       if (osm->dm_subdomains && pc->dm) {
206         PetscInt  num_domains, d;
207         char      **domain_names;
208         IS        *inner_domain_is, *outer_domain_is;
209         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
210         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
211                               A future improvement of this code might allow one to use
212                               DM-defined subdomains and also increase the overlap,
213                               but that is not currently supported */
214         if (num_domains) {
215           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
216         }
217         for (d = 0; d < num_domains; ++d) {
218           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
219           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
220           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
221         }
222         ierr = PetscFree(domain_names);CHKERRQ(ierr);
223         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
224         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
225       }
226       if (osm->n_local_true == PETSC_DECIDE) {
227         /* still no subdomains; use one subdomain per processor */
228         osm->n_local_true = 1;
229       }
230     }
231     { /* determine the global and max number of subdomains */
232       struct {PetscInt max,sum;} inwork,outwork;
233       PetscMPIInt size;
234 
235       inwork.max   = osm->n_local_true;
236       inwork.sum   = osm->n_local_true;
237       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
238       osm->n_local = outwork.max;
239       osm->n       = outwork.sum;
240 
241       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
242       if (outwork.max == 1 && outwork.sum == size) {
243         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
244         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
245       }
246     }
247     if (!osm->is) { /* create the index sets */
248       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
249     }
250     if (osm->n_local_true > 1 && !osm->is_local) {
251       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
252       for (i=0; i<osm->n_local_true; i++) {
253         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
254           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
255           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
256         } else {
257           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
258           osm->is_local[i] = osm->is[i];
259         }
260       }
261     }
262     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
263     flg  = PETSC_FALSE;
264     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
265     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
266 
267     if (osm->overlap > 0) {
268       /* Extend the "overlapping" regions by a number of steps */
269       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
270     }
271     if (osm->sort_indices) {
272       for (i=0; i<osm->n_local_true; i++) {
273         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
274         if (osm->is_local) {
275           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
276         }
277       }
278     }
279 
280     if (!osm->ksp) {
281       /* Create the local solvers */
282       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
283       if (domain_dm) {
284         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
285       }
286       for (i=0; i<osm->n_local_true; i++) {
287         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
288         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
289         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
290         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
291         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
292         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
293         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
294         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
295         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
296         if (domain_dm) {
297           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
298           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
299           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
300         }
301         osm->ksp[i] = ksp;
302       }
303       if (domain_dm) {
304         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
305       }
306     }
307 
308     ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
309     ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
310     ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
311     ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
312     ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
313 
314     scall = MAT_INITIAL_MATRIX;
315   } else {
316     /*
317        Destroy the blocks from the previous iteration
318     */
319     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
320       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
321       scall = MAT_INITIAL_MATRIX;
322     }
323   }
324 
325   /*
326      Extract out the submatrices
327   */
328   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
329   if (scall == MAT_INITIAL_MATRIX) {
330     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
331     for (i=0; i<osm->n_local_true; i++) {
332       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
333       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
334     }
335   }
336 
337   /* Convert the types of the submatrices (if needbe) */
338   if (osm->sub_mat_type) {
339     for (i=0; i<osm->n_local_true; i++) {
340       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
341     }
342   }
343 
344   if(!pc->setupcalled){
345     /* Create the local work vectors (from the local matrices) and scatter contexts */
346     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
347 
348     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
349     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
350       ierr = PetscMalloc1(osm->n_local_true,&osm->lprolongation);CHKERRQ(ierr);
351     }
352     ierr = PetscMalloc1(osm->n_local_true,&osm->lrestriction);CHKERRQ(ierr);
353     ierr = PetscMalloc1(osm->n_local_true,&osm->x);CHKERRQ(ierr);
354     ierr = PetscMalloc1(osm->n_local_true,&osm->y);CHKERRQ(ierr);
355 
356     ierr = ISGetLocalSize(osm->lis,&m);CHKERRQ(ierr);
357     ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
358     ierr = VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);CHKERRQ(ierr);
359     ierr = ISDestroy(&isl);CHKERRQ(ierr);
360 
361 
362     for (i=0; i<osm->n_local_true; ++i) {
363       ISLocalToGlobalMapping ltog;
364       IS                     isll;
365       const PetscInt         *idx_is;
366       PetscInt               *idx_lis,nout;
367 
368       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
369       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
370       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
371 
372       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
373       ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
374       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
375       ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
376       ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
377       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
378       if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
379       ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
380       ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
381       ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
382       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
383       ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);CHKERRQ(ierr);
384       ierr = ISDestroy(&isll);CHKERRQ(ierr);
385       ierr = ISDestroy(&isl);CHKERRQ(ierr);
386       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overalapping is_local[i] entries */
387         ISLocalToGlobalMapping ltog;
388         IS                     isll,isll_local;
389         const PetscInt         *idx_local;
390         PetscInt               *idx1, *idx2, nout;
391 
392         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
393         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
394 
395         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
396         ierr = PetscMalloc1(m_local,&idx1);CHKERRQ(ierr);
397         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);CHKERRQ(ierr);
398         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
399         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
400         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
401 
402         ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
403         ierr = PetscMalloc1(m_local,&idx2);CHKERRQ(ierr);
404         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);CHKERRQ(ierr);
405         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
406         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
407         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);CHKERRQ(ierr);
408 
409         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
410         ierr = VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);CHKERRQ(ierr);
411 
412         ierr = ISDestroy(&isll);CHKERRQ(ierr);
413         ierr = ISDestroy(&isll_local);CHKERRQ(ierr);
414       }
415     }
416     ierr = VecDestroy(&vec);CHKERRQ(ierr);
417   }
418 
419   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
420     IS      *cis;
421     PetscInt c;
422 
423     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
424     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
425     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
426     ierr = PetscFree(cis);CHKERRQ(ierr);
427   }
428 
429   /* Return control to the user so that the submatrices can be modified (e.g., to apply
430      different boundary conditions for the submatrices than for the global problem) */
431   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
432 
433   /*
434      Loop over subdomains putting them into local ksp
435   */
436   for (i=0; i<osm->n_local_true; i++) {
437     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
438     if (!pc->setupcalled) {
439       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
440     }
441   }
442   PetscFunctionReturn(0);
443 }
444 
445 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
446 {
447   PC_ASM             *osm = (PC_ASM*)pc->data;
448   PetscErrorCode     ierr;
449   PetscInt           i;
450   KSPConvergedReason reason;
451 
452   PetscFunctionBegin;
453   for (i=0; i<osm->n_local_true; i++) {
454     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
455     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
456     if (reason == KSP_DIVERGED_PC_FAILED) {
457       pc->failedreason = PC_SUBPC_ERROR;
458     }
459   }
460   PetscFunctionReturn(0);
461 }
462 
463 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
464 {
465   PC_ASM         *osm = (PC_ASM*)pc->data;
466   PetscErrorCode ierr;
467   PetscInt       i,n_local_true = osm->n_local_true;
468   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
469 
470   PetscFunctionBegin;
471   /*
472      Support for limiting the restriction or interpolation to only local
473      subdomain values (leaving the other values 0).
474   */
475   if (!(osm->type & PC_ASM_RESTRICT)) {
476     forward = SCATTER_FORWARD_LOCAL;
477     /* have to zero the work RHS since scatter may leave some slots empty */
478     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
479   }
480   if (!(osm->type & PC_ASM_INTERPOLATE)) {
481     reverse = SCATTER_REVERSE_LOCAL;
482   }
483 
484   if(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE){
485     /* zero the global and the local solutions */
486     ierr = VecZeroEntries(y);CHKERRQ(ierr);
487     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
488 
489     /* Copy the global RHS to local RHS including the ghost nodes */
490     ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
491     ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
492 
493     /* Restrict local RHS to the overlapping 0-block RHS */
494     ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
495     ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
496 
497     /* do the local solves */
498     for (i = 0; i < n_local_true; ++i) {
499 
500       /* solve the overlapping i-block */
501       ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
502       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
503       ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr);
504       ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
505 
506       if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution (only for restrictive additive) */
507         ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
508         ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
509       }
510       else{ /* interpolate the overalapping i-block solution to the local solution */
511         ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
512         ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
513       }
514 
515       if (i < n_local_true-1) {
516         /* Restrict local RHS to the overlapping (i+1)-block RHS */
517         ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
518         ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
519 
520         if ( osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
521           /* udpdate the overlapping (i+1)-block RHS using the current local solution */
522           ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
523           ierr = VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]); CHKERRQ(ierr);
524         }
525       }
526     }
527     /* Add the local solution to the global solution including the ghost nodes */
528     ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
529     ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
530   }else{
531     SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
532   }
533   PetscFunctionReturn(0);
534 }
535 
536 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
537 {
538   PC_ASM         *osm = (PC_ASM*)pc->data;
539   PetscErrorCode ierr;
540   PetscInt       i,n_local_true = osm->n_local_true;
541   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
542 
543   PetscFunctionBegin;
544   /*
545      Support for limiting the restriction or interpolation to only local
546      subdomain values (leaving the other values 0).
547 
548      Note: these are reversed from the PCApply_ASM() because we are applying the
549      transpose of the three terms
550   */
551 
552   if (!(osm->type & PC_ASM_INTERPOLATE)) {
553     forward = SCATTER_FORWARD_LOCAL;
554     /* have to zero the work RHS since scatter may leave some slots empty */
555     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
556   }
557   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
558 
559   /* zero the global and the local solutions */
560   ierr = VecZeroEntries(y);CHKERRQ(ierr);
561   ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
562 
563   /* Copy the global RHS to local RHS including the ghost nodes */
564   ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
565   ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
566 
567   /* Restrict local RHS to the overlapping 0-block RHS */
568   ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
569   ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
570 
571   /* do the local solves */
572   for (i = 0; i < n_local_true; ++i) {
573 
574     /* solve the overlapping i-block */
575     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
576     ierr = KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
577     ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr);
578     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
579 
580     if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution */
581      ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
582      ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
583     }
584     else{ /* interpolate the overalapping i-block solution to the local solution */
585       ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
586       ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
587     }
588 
589     if (i < n_local_true-1) {
590       /* Restrict local RHS to the overlapping (i+1)-block RHS */
591       ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
592       ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
593     }
594   }
595   /* Add the local solution to the global solution including the ghost nodes */
596   ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
597   ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
598 
599   PetscFunctionReturn(0);
600 
601 }
602 
603 static PetscErrorCode PCReset_ASM(PC pc)
604 {
605   PC_ASM         *osm = (PC_ASM*)pc->data;
606   PetscErrorCode ierr;
607   PetscInt       i;
608 
609   PetscFunctionBegin;
610   if (osm->ksp) {
611     for (i=0; i<osm->n_local_true; i++) {
612       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
613     }
614   }
615   if (osm->pmat) {
616     if (osm->n_local_true > 0) {
617       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
618     }
619   }
620   if (osm->lrestriction) {
621     ierr = VecScatterDestroy(&osm->restriction);CHKERRQ(ierr);
622     for (i=0; i<osm->n_local_true; i++) {
623       ierr = VecScatterDestroy(&osm->lrestriction[i]);CHKERRQ(ierr);
624       if (osm->lprolongation) {ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);}
625       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
626       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
627     }
628     ierr = PetscFree(osm->lrestriction);CHKERRQ(ierr);
629     if (osm->lprolongation) {ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);}
630     ierr = PetscFree(osm->x);CHKERRQ(ierr);
631     ierr = PetscFree(osm->y);CHKERRQ(ierr);
632 
633   }
634   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
635   ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
636   ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
637   ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
638   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
639     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
640   }
641 
642   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
643 
644   osm->is       = 0;
645   osm->is_local = 0;
646   PetscFunctionReturn(0);
647 }
648 
649 static PetscErrorCode PCDestroy_ASM(PC pc)
650 {
651   PC_ASM         *osm = (PC_ASM*)pc->data;
652   PetscErrorCode ierr;
653   PetscInt       i;
654 
655   PetscFunctionBegin;
656   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
657   if (osm->ksp) {
658     for (i=0; i<osm->n_local_true; i++) {
659       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
660     }
661     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
662   }
663   ierr = PetscFree(pc->data);CHKERRQ(ierr);
664   PetscFunctionReturn(0);
665 }
666 
667 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
668 {
669   PC_ASM         *osm = (PC_ASM*)pc->data;
670   PetscErrorCode ierr;
671   PetscInt       blocks,ovl;
672   PetscBool      symset,flg;
673   PCASMType      asmtype;
674   PCCompositeType loctype;
675   char           sub_mat_type[256];
676 
677   PetscFunctionBegin;
678   /* set the type to symmetric if matrix is symmetric */
679   if (!osm->type_set && pc->pmat) {
680     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
681     if (symset && flg) osm->type = PC_ASM_BASIC;
682   }
683   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
684   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
685   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
686   if (flg) {
687     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
688     osm->dm_subdomains = PETSC_FALSE;
689   }
690   ierr = PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);CHKERRQ(ierr);
691   if (flg) {
692     ierr = PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
693     osm->dm_subdomains = PETSC_FALSE;
694   }
695   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
696   if (flg) {
697     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
698     osm->dm_subdomains = PETSC_FALSE;
699   }
700   flg  = PETSC_FALSE;
701   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
702   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
703   flg  = PETSC_FALSE;
704   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
705   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
706   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
707   if(flg){
708     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
709   }
710   ierr = PetscOptionsTail();CHKERRQ(ierr);
711   PetscFunctionReturn(0);
712 }
713 
714 /*------------------------------------------------------------------------------------*/
715 
716 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
717 {
718   PC_ASM         *osm = (PC_ASM*)pc->data;
719   PetscErrorCode ierr;
720   PetscInt       i;
721 
722   PetscFunctionBegin;
723   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
724   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
725 
726   if (!pc->setupcalled) {
727     if (is) {
728       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
729     }
730     if (is_local) {
731       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
732     }
733     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
734 
735     osm->n_local_true = n;
736     osm->is           = 0;
737     osm->is_local     = 0;
738     if (is) {
739       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
740       for (i=0; i<n; i++) osm->is[i] = is[i];
741       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
742       osm->overlap = -1;
743     }
744     if (is_local) {
745       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
746       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
747       if (!is) {
748         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
749         for (i=0; i<osm->n_local_true; i++) {
750           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
751             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
752             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
753           } else {
754             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
755             osm->is[i] = osm->is_local[i];
756           }
757         }
758       }
759     }
760   }
761   PetscFunctionReturn(0);
762 }
763 
764 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
765 {
766   PC_ASM         *osm = (PC_ASM*)pc->data;
767   PetscErrorCode ierr;
768   PetscMPIInt    rank,size;
769   PetscInt       n;
770 
771   PetscFunctionBegin;
772   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
773   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
774 
775   /*
776      Split the subdomains equally among all processors
777   */
778   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
779   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
780   n    = N/size + ((N % size) > rank);
781   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
782   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
783   if (!pc->setupcalled) {
784     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
785 
786     osm->n_local_true = n;
787     osm->is           = 0;
788     osm->is_local     = 0;
789   }
790   PetscFunctionReturn(0);
791 }
792 
793 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
794 {
795   PC_ASM *osm = (PC_ASM*)pc->data;
796 
797   PetscFunctionBegin;
798   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
799   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
800   if (!pc->setupcalled) osm->overlap = ovl;
801   PetscFunctionReturn(0);
802 }
803 
804 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
805 {
806   PC_ASM *osm = (PC_ASM*)pc->data;
807 
808   PetscFunctionBegin;
809   osm->type     = type;
810   osm->type_set = PETSC_TRUE;
811   PetscFunctionReturn(0);
812 }
813 
814 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
815 {
816   PC_ASM *osm = (PC_ASM*)pc->data;
817 
818   PetscFunctionBegin;
819   *type = osm->type;
820   PetscFunctionReturn(0);
821 }
822 
823 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
824 {
825   PC_ASM *osm = (PC_ASM *) pc->data;
826 
827   PetscFunctionBegin;
828   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
829   osm->loctype = type;
830   PetscFunctionReturn(0);
831 }
832 
833 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
834 {
835   PC_ASM *osm = (PC_ASM *) pc->data;
836 
837   PetscFunctionBegin;
838   *type = osm->loctype;
839   PetscFunctionReturn(0);
840 }
841 
842 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
843 {
844   PC_ASM *osm = (PC_ASM*)pc->data;
845 
846   PetscFunctionBegin;
847   osm->sort_indices = doSort;
848   PetscFunctionReturn(0);
849 }
850 
851 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
852 {
853   PC_ASM         *osm = (PC_ASM*)pc->data;
854   PetscErrorCode ierr;
855 
856   PetscFunctionBegin;
857   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
858 
859   if (n_local) *n_local = osm->n_local_true;
860   if (first_local) {
861     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
862     *first_local -= osm->n_local_true;
863   }
864   if (ksp) {
865     /* Assume that local solves are now different; not necessarily
866        true though!  This flag is used only for PCView_ASM() */
867     *ksp                   = osm->ksp;
868     osm->same_local_solves = PETSC_FALSE;
869   }
870   PetscFunctionReturn(0);
871 }
872 
873 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
874 {
875   PC_ASM         *osm = (PC_ASM*)pc->data;
876 
877   PetscFunctionBegin;
878   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
879   PetscValidPointer(sub_mat_type,2);
880   *sub_mat_type = osm->sub_mat_type;
881   PetscFunctionReturn(0);
882 }
883 
884 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
885 {
886   PetscErrorCode    ierr;
887   PC_ASM            *osm = (PC_ASM*)pc->data;
888 
889   PetscFunctionBegin;
890   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
891   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
892   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
893   PetscFunctionReturn(0);
894 }
895 
896 /*@C
897     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
898 
899     Collective on pc
900 
901     Input Parameters:
902 +   pc - the preconditioner context
903 .   n - the number of subdomains for this processor (default value = 1)
904 .   is - the index set that defines the subdomains for this processor
905          (or NULL for PETSc to determine subdomains)
906 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
907          (or NULL to not provide these)
908 
909     Options Database Key:
910     To set the total number of subdomain blocks rather than specify the
911     index sets, use the option
912 .    -pc_asm_local_blocks <blks> - Sets local blocks
913 
914     Notes:
915     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
916 
917     By default the ASM preconditioner uses 1 block per processor.
918 
919     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
920 
921     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
922     back to form the global solution (this is the standard restricted additive Schwarz method)
923 
924     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
925     no code to handle that case.
926 
927     Level: advanced
928 
929 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
930           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
931 @*/
932 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
933 {
934   PetscErrorCode ierr;
935 
936   PetscFunctionBegin;
937   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
938   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941 
942 /*@C
943     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
944     additive Schwarz preconditioner.  Either all or no processors in the
945     PC communicator must call this routine, with the same index sets.
946 
947     Collective on pc
948 
949     Input Parameters:
950 +   pc - the preconditioner context
951 .   N  - the number of subdomains for all processors
952 .   is - the index sets that define the subdomains for all processors
953          (or NULL to ask PETSc to determine the subdomains)
954 -   is_local - the index sets that define the local part of the subdomains for this processor
955          (or NULL to not provide this information)
956 
957     Options Database Key:
958     To set the total number of subdomain blocks rather than specify the
959     index sets, use the option
960 .    -pc_asm_blocks <blks> - Sets total blocks
961 
962     Notes:
963     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
964 
965     By default the ASM preconditioner uses 1 block per processor.
966 
967     These index sets cannot be destroyed until after completion of the
968     linear solves for which the ASM preconditioner is being used.
969 
970     Use PCASMSetLocalSubdomains() to set local subdomains.
971 
972     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
973 
974     Level: advanced
975 
976 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
977           PCASMCreateSubdomains2D()
978 @*/
979 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
980 {
981   PetscErrorCode ierr;
982 
983   PetscFunctionBegin;
984   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
985   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
986   PetscFunctionReturn(0);
987 }
988 
989 /*@
990     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
991     additive Schwarz preconditioner.  Either all or no processors in the
992     PC communicator must call this routine.
993 
994     Logically Collective on pc
995 
996     Input Parameters:
997 +   pc  - the preconditioner context
998 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
999 
1000     Options Database Key:
1001 .   -pc_asm_overlap <ovl> - Sets overlap
1002 
1003     Notes:
1004     By default the ASM preconditioner uses 1 block per processor.  To use
1005     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1006     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1007 
1008     The overlap defaults to 1, so if one desires that no additional
1009     overlap be computed beyond what may have been set with a call to
1010     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1011     must be set to be 0.  In particular, if one does not explicitly set
1012     the subdomains an application code, then all overlap would be computed
1013     internally by PETSc, and using an overlap of 0 would result in an ASM
1014     variant that is equivalent to the block Jacobi preconditioner.
1015 
1016     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1017     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1018 
1019     Note that one can define initial index sets with any overlap via
1020     PCASMSetLocalSubdomains(); the routine
1021     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1022     if desired.
1023 
1024     Level: intermediate
1025 
1026 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1027           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1028 @*/
1029 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1030 {
1031   PetscErrorCode ierr;
1032 
1033   PetscFunctionBegin;
1034   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1035   PetscValidLogicalCollectiveInt(pc,ovl,2);
1036   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1037   PetscFunctionReturn(0);
1038 }
1039 
1040 /*@
1041     PCASMSetType - Sets the type of restriction and interpolation used
1042     for local problems in the additive Schwarz method.
1043 
1044     Logically Collective on pc
1045 
1046     Input Parameters:
1047 +   pc  - the preconditioner context
1048 -   type - variant of ASM, one of
1049 .vb
1050       PC_ASM_BASIC       - full interpolation and restriction
1051       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1052       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1053       PC_ASM_NONE        - local processor restriction and interpolation
1054 .ve
1055 
1056     Options Database Key:
1057 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1058 
1059     Notes:
1060     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1061     to limit the local processor interpolation
1062 
1063     Level: intermediate
1064 
1065 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1066           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1067 @*/
1068 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1069 {
1070   PetscErrorCode ierr;
1071 
1072   PetscFunctionBegin;
1073   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1074   PetscValidLogicalCollectiveEnum(pc,type,2);
1075   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 /*@
1080     PCASMGetType - Gets the type of restriction and interpolation used
1081     for local problems in the additive Schwarz method.
1082 
1083     Logically Collective on pc
1084 
1085     Input Parameter:
1086 .   pc  - the preconditioner context
1087 
1088     Output Parameter:
1089 .   type - variant of ASM, one of
1090 
1091 .vb
1092       PC_ASM_BASIC       - full interpolation and restriction
1093       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1094       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1095       PC_ASM_NONE        - local processor restriction and interpolation
1096 .ve
1097 
1098     Options Database Key:
1099 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1100 
1101     Level: intermediate
1102 
1103 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1104           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1105 @*/
1106 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1107 {
1108   PetscErrorCode ierr;
1109 
1110   PetscFunctionBegin;
1111   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1112   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1113   PetscFunctionReturn(0);
1114 }
1115 
1116 /*@
1117   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1118 
1119   Logically Collective on pc
1120 
1121   Input Parameters:
1122 + pc  - the preconditioner context
1123 - type - type of composition, one of
1124 .vb
1125   PC_COMPOSITE_ADDITIVE       - local additive combination
1126   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1127 .ve
1128 
1129   Options Database Key:
1130 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1131 
1132   Level: intermediate
1133 
1134 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1135 @*/
1136 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1137 {
1138   PetscErrorCode ierr;
1139 
1140   PetscFunctionBegin;
1141   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1142   PetscValidLogicalCollectiveEnum(pc, type, 2);
1143   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1144   PetscFunctionReturn(0);
1145 }
1146 
1147 /*@
1148   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1149 
1150   Logically Collective on pc
1151 
1152   Input Parameter:
1153 . pc  - the preconditioner context
1154 
1155   Output Parameter:
1156 . type - type of composition, one of
1157 .vb
1158   PC_COMPOSITE_ADDITIVE       - local additive combination
1159   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1160 .ve
1161 
1162   Options Database Key:
1163 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1164 
1165   Level: intermediate
1166 
1167 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1168 @*/
1169 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1170 {
1171   PetscErrorCode ierr;
1172 
1173   PetscFunctionBegin;
1174   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1175   PetscValidPointer(type, 2);
1176   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 /*@
1181     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1182 
1183     Logically Collective on pc
1184 
1185     Input Parameters:
1186 +   pc  - the preconditioner context
1187 -   doSort - sort the subdomain indices
1188 
1189     Level: intermediate
1190 
1191 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1192           PCASMCreateSubdomains2D()
1193 @*/
1194 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1195 {
1196   PetscErrorCode ierr;
1197 
1198   PetscFunctionBegin;
1199   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1200   PetscValidLogicalCollectiveBool(pc,doSort,2);
1201   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 /*@C
1206    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1207    this processor.
1208 
1209    Collective on pc iff first_local is requested
1210 
1211    Input Parameter:
1212 .  pc - the preconditioner context
1213 
1214    Output Parameters:
1215 +  n_local - the number of blocks on this processor or NULL
1216 .  first_local - the global number of the first block on this processor or NULL,
1217                  all processors must request or all must pass NULL
1218 -  ksp - the array of KSP contexts
1219 
1220    Note:
1221    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1222 
1223    You must call KSPSetUp() before calling PCASMGetSubKSP().
1224 
1225    Fortran note:
1226    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1227 
1228    Level: advanced
1229 
1230 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1231           PCASMCreateSubdomains2D(),
1232 @*/
1233 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1234 {
1235   PetscErrorCode ierr;
1236 
1237   PetscFunctionBegin;
1238   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1239   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1240   PetscFunctionReturn(0);
1241 }
1242 
1243 /* -------------------------------------------------------------------------------------*/
1244 /*MC
1245    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1246            its own KSP object.
1247 
1248    Options Database Keys:
1249 +  -pc_asm_blocks <blks> - Sets total blocks
1250 .  -pc_asm_overlap <ovl> - Sets overlap
1251 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1252 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1253 
1254      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1255       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1256       -pc_asm_type basic to use the standard ASM.
1257 
1258    Notes:
1259     Each processor can have one or more blocks, but a block cannot be shared by more
1260      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1261 
1262      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1263         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1264 
1265      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1266          and set the options directly on the resulting KSP object (you can access its PC
1267          with KSPGetPC())
1268 
1269    Level: beginner
1270 
1271     References:
1272 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1273      Courant Institute, New York University Technical report
1274 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1275     Cambridge University Press.
1276 
1277 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1278            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1279            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1280 
1281 M*/
1282 
1283 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1284 {
1285   PetscErrorCode ierr;
1286   PC_ASM         *osm;
1287 
1288   PetscFunctionBegin;
1289   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1290 
1291   osm->n                 = PETSC_DECIDE;
1292   osm->n_local           = 0;
1293   osm->n_local_true      = PETSC_DECIDE;
1294   osm->overlap           = 1;
1295   osm->ksp               = 0;
1296   osm->restriction       = 0;
1297   osm->lprolongation     = 0;
1298   osm->lrestriction      = 0;
1299   osm->x                 = 0;
1300   osm->y                 = 0;
1301   osm->is                = 0;
1302   osm->is_local          = 0;
1303   osm->mat               = 0;
1304   osm->pmat              = 0;
1305   osm->type              = PC_ASM_RESTRICT;
1306   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1307   osm->same_local_solves = PETSC_TRUE;
1308   osm->sort_indices      = PETSC_TRUE;
1309   osm->dm_subdomains     = PETSC_FALSE;
1310   osm->sub_mat_type      = NULL;
1311 
1312   pc->data                 = (void*)osm;
1313   pc->ops->apply           = PCApply_ASM;
1314   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1315   pc->ops->setup           = PCSetUp_ASM;
1316   pc->ops->reset           = PCReset_ASM;
1317   pc->ops->destroy         = PCDestroy_ASM;
1318   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1319   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1320   pc->ops->view            = PCView_ASM;
1321   pc->ops->applyrichardson = 0;
1322 
1323   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1324   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1325   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1326   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1327   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1328   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1329   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1330   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1331   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1332   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1333   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1334   PetscFunctionReturn(0);
1335 }
1336 
1337 /*@C
1338    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1339    preconditioner for a any problem on a general grid.
1340 
1341    Collective
1342 
1343    Input Parameters:
1344 +  A - The global matrix operator
1345 -  n - the number of local blocks
1346 
1347    Output Parameters:
1348 .  outis - the array of index sets defining the subdomains
1349 
1350    Level: advanced
1351 
1352    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1353     from these if you use PCASMSetLocalSubdomains()
1354 
1355     In the Fortran version you must provide the array outis[] already allocated of length n.
1356 
1357 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1358 @*/
1359 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1360 {
1361   MatPartitioning mpart;
1362   const char      *prefix;
1363   PetscInt        i,j,rstart,rend,bs;
1364   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1365   Mat             Ad     = NULL, adj;
1366   IS              ispart,isnumb,*is;
1367   PetscErrorCode  ierr;
1368 
1369   PetscFunctionBegin;
1370   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1371   PetscValidPointer(outis,3);
1372   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1373 
1374   /* Get prefix, row distribution, and block size */
1375   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1376   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1377   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1378   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1379 
1380   /* Get diagonal block from matrix if possible */
1381   ierr = MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
1382   if (hasop) {
1383     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1384   }
1385   if (Ad) {
1386     ierr = PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1387     if (!isbaij) {ierr = PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1388   }
1389   if (Ad && n > 1) {
1390     PetscBool match,done;
1391     /* Try to setup a good matrix partitioning if available */
1392     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1393     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1394     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1395     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1396     if (!match) {
1397       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1398     }
1399     if (!match) { /* assume a "good" partitioner is available */
1400       PetscInt       na;
1401       const PetscInt *ia,*ja;
1402       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1403       if (done) {
1404         /* Build adjacency matrix by hand. Unfortunately a call to
1405            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1406            remove the block-aij structure and we cannot expect
1407            MatPartitioning to split vertices as we need */
1408         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1409         const PetscInt *row;
1410         nnz = 0;
1411         for (i=0; i<na; i++) { /* count number of nonzeros */
1412           len = ia[i+1] - ia[i];
1413           row = ja + ia[i];
1414           for (j=0; j<len; j++) {
1415             if (row[j] == i) { /* don't count diagonal */
1416               len--; break;
1417             }
1418           }
1419           nnz += len;
1420         }
1421         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1422         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1423         nnz    = 0;
1424         iia[0] = 0;
1425         for (i=0; i<na; i++) { /* fill adjacency */
1426           cnt = 0;
1427           len = ia[i+1] - ia[i];
1428           row = ja + ia[i];
1429           for (j=0; j<len; j++) {
1430             if (row[j] != i) { /* if not diagonal */
1431               jja[nnz+cnt++] = row[j];
1432             }
1433           }
1434           nnz     += cnt;
1435           iia[i+1] = nnz;
1436         }
1437         /* Partitioning of the adjacency matrix */
1438         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1439         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1440         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1441         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1442         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1443         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1444         foundpart = PETSC_TRUE;
1445       }
1446       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1447     }
1448     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1449   }
1450 
1451   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1452   *outis = is;
1453 
1454   if (!foundpart) {
1455 
1456     /* Partitioning by contiguous chunks of rows */
1457 
1458     PetscInt mbs   = (rend-rstart)/bs;
1459     PetscInt start = rstart;
1460     for (i=0; i<n; i++) {
1461       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1462       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1463       start += count;
1464     }
1465 
1466   } else {
1467 
1468     /* Partitioning by adjacency of diagonal block  */
1469 
1470     const PetscInt *numbering;
1471     PetscInt       *count,nidx,*indices,*newidx,start=0;
1472     /* Get node count in each partition */
1473     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1474     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1475     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1476       for (i=0; i<n; i++) count[i] *= bs;
1477     }
1478     /* Build indices from node numbering */
1479     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1480     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1481     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1482     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1483     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1484     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1485     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1486       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1487       for (i=0; i<nidx; i++) {
1488         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1489       }
1490       ierr    = PetscFree(indices);CHKERRQ(ierr);
1491       nidx   *= bs;
1492       indices = newidx;
1493     }
1494     /* Shift to get global indices */
1495     for (i=0; i<nidx; i++) indices[i] += rstart;
1496 
1497     /* Build the index sets for each block */
1498     for (i=0; i<n; i++) {
1499       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1500       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1501       start += count[i];
1502     }
1503 
1504     ierr = PetscFree(count);CHKERRQ(ierr);
1505     ierr = PetscFree(indices);CHKERRQ(ierr);
1506     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1507     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1508 
1509   }
1510   PetscFunctionReturn(0);
1511 }
1512 
1513 /*@C
1514    PCASMDestroySubdomains - Destroys the index sets created with
1515    PCASMCreateSubdomains(). Should be called after setting subdomains
1516    with PCASMSetLocalSubdomains().
1517 
1518    Collective
1519 
1520    Input Parameters:
1521 +  n - the number of index sets
1522 .  is - the array of index sets
1523 -  is_local - the array of local index sets, can be NULL
1524 
1525    Level: advanced
1526 
1527 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1528 @*/
1529 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1530 {
1531   PetscInt       i;
1532   PetscErrorCode ierr;
1533 
1534   PetscFunctionBegin;
1535   if (n <= 0) PetscFunctionReturn(0);
1536   if (is) {
1537     PetscValidPointer(is,2);
1538     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1539     ierr = PetscFree(is);CHKERRQ(ierr);
1540   }
1541   if (is_local) {
1542     PetscValidPointer(is_local,3);
1543     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1544     ierr = PetscFree(is_local);CHKERRQ(ierr);
1545   }
1546   PetscFunctionReturn(0);
1547 }
1548 
1549 /*@
1550    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1551    preconditioner for a two-dimensional problem on a regular grid.
1552 
1553    Not Collective
1554 
1555    Input Parameters:
1556 +  m, n - the number of mesh points in the x and y directions
1557 .  M, N - the number of subdomains in the x and y directions
1558 .  dof - degrees of freedom per node
1559 -  overlap - overlap in mesh lines
1560 
1561    Output Parameters:
1562 +  Nsub - the number of subdomains created
1563 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1564 -  is_local - array of index sets defining non-overlapping subdomains
1565 
1566    Note:
1567    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1568    preconditioners.  More general related routines are
1569    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1570 
1571    Level: advanced
1572 
1573 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1574           PCASMSetOverlap()
1575 @*/
1576 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1577 {
1578   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1579   PetscErrorCode ierr;
1580   PetscInt       nidx,*idx,loc,ii,jj,count;
1581 
1582   PetscFunctionBegin;
1583   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1584 
1585   *Nsub     = N*M;
1586   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1587   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1588   ystart    = 0;
1589   loc_outer = 0;
1590   for (i=0; i<N; i++) {
1591     height = n/N + ((n % N) > i); /* height of subdomain */
1592     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1593     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1594     yright = ystart + height + overlap; if (yright > n) yright = n;
1595     xstart = 0;
1596     for (j=0; j<M; j++) {
1597       width = m/M + ((m % M) > j); /* width of subdomain */
1598       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1599       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1600       xright = xstart + width + overlap; if (xright > m) xright = m;
1601       nidx   = (xright - xleft)*(yright - yleft);
1602       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1603       loc    = 0;
1604       for (ii=yleft; ii<yright; ii++) {
1605         count = m*ii + xleft;
1606         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1607       }
1608       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1609       if (overlap == 0) {
1610         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1611 
1612         (*is_local)[loc_outer] = (*is)[loc_outer];
1613       } else {
1614         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1615           for (jj=xstart; jj<xstart+width; jj++) {
1616             idx[loc++] = m*ii + jj;
1617           }
1618         }
1619         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1620       }
1621       ierr    = PetscFree(idx);CHKERRQ(ierr);
1622       xstart += width;
1623       loc_outer++;
1624     }
1625     ystart += height;
1626   }
1627   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1628   PetscFunctionReturn(0);
1629 }
1630 
1631 /*@C
1632     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1633     only) for the additive Schwarz preconditioner.
1634 
1635     Not Collective
1636 
1637     Input Parameter:
1638 .   pc - the preconditioner context
1639 
1640     Output Parameters:
1641 +   n - the number of subdomains for this processor (default value = 1)
1642 .   is - the index sets that define the subdomains for this processor
1643 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1644 
1645 
1646     Notes:
1647     The IS numbering is in the parallel, global numbering of the vector.
1648 
1649     Level: advanced
1650 
1651 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1652           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1653 @*/
1654 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1655 {
1656   PC_ASM         *osm = (PC_ASM*)pc->data;
1657   PetscErrorCode ierr;
1658   PetscBool      match;
1659 
1660   PetscFunctionBegin;
1661   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1662   PetscValidIntPointer(n,2);
1663   if (is) PetscValidPointer(is,3);
1664   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1665   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1666   if (n) *n = osm->n_local_true;
1667   if (is) *is = osm->is;
1668   if (is_local) *is_local = osm->is_local;
1669   PetscFunctionReturn(0);
1670 }
1671 
1672 /*@C
1673     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1674     only) for the additive Schwarz preconditioner.
1675 
1676     Not Collective
1677 
1678     Input Parameter:
1679 .   pc - the preconditioner context
1680 
1681     Output Parameters:
1682 +   n - the number of matrices for this processor (default value = 1)
1683 -   mat - the matrices
1684 
1685     Level: advanced
1686 
1687     Notes:
1688     Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1689 
1690            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1691 
1692 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1693           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1694 @*/
1695 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1696 {
1697   PC_ASM         *osm;
1698   PetscErrorCode ierr;
1699   PetscBool      match;
1700 
1701   PetscFunctionBegin;
1702   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1703   PetscValidIntPointer(n,2);
1704   if (mat) PetscValidPointer(mat,3);
1705   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1706   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1707   if (!match) {
1708     if (n) *n = 0;
1709     if (mat) *mat = NULL;
1710   } else {
1711     osm = (PC_ASM*)pc->data;
1712     if (n) *n = osm->n_local_true;
1713     if (mat) *mat = osm->pmat;
1714   }
1715   PetscFunctionReturn(0);
1716 }
1717 
1718 /*@
1719     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1720 
1721     Logically Collective
1722 
1723     Input Parameter:
1724 +   pc  - the preconditioner
1725 -   flg - boolean indicating whether to use subdomains defined by the DM
1726 
1727     Options Database Key:
1728 .   -pc_asm_dm_subdomains
1729 
1730     Level: intermediate
1731 
1732     Notes:
1733     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1734     so setting either of the first two effectively turns the latter off.
1735 
1736 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1737           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1738 @*/
1739 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1740 {
1741   PC_ASM         *osm = (PC_ASM*)pc->data;
1742   PetscErrorCode ierr;
1743   PetscBool      match;
1744 
1745   PetscFunctionBegin;
1746   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1747   PetscValidLogicalCollectiveBool(pc,flg,2);
1748   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1749   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1750   if (match) {
1751     osm->dm_subdomains = flg;
1752   }
1753   PetscFunctionReturn(0);
1754 }
1755 
1756 /*@
1757     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1758     Not Collective
1759 
1760     Input Parameter:
1761 .   pc  - the preconditioner
1762 
1763     Output Parameter:
1764 .   flg - boolean indicating whether to use subdomains defined by the DM
1765 
1766     Level: intermediate
1767 
1768 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1769           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1770 @*/
1771 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1772 {
1773   PC_ASM         *osm = (PC_ASM*)pc->data;
1774   PetscErrorCode ierr;
1775   PetscBool      match;
1776 
1777   PetscFunctionBegin;
1778   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1779   PetscValidPointer(flg,2);
1780   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1781   if (match) {
1782     if (flg) *flg = osm->dm_subdomains;
1783   }
1784   PetscFunctionReturn(0);
1785 }
1786 
1787 /*@
1788      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1789 
1790    Not Collective
1791 
1792    Input Parameter:
1793 .  pc - the PC
1794 
1795    Output Parameter:
1796 .  -pc_asm_sub_mat_type - name of matrix type
1797 
1798    Level: advanced
1799 
1800 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1801 @*/
1802 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1803   PetscErrorCode ierr;
1804 
1805   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1806   PetscFunctionReturn(0);
1807 }
1808 
1809 /*@
1810      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1811 
1812    Collective on Mat
1813 
1814    Input Parameters:
1815 +  pc             - the PC object
1816 -  sub_mat_type   - matrix type
1817 
1818    Options Database Key:
1819 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1820 
1821    Notes:
1822    See "${PETSC_DIR}/include/petscmat.h" for available types
1823 
1824   Level: advanced
1825 
1826 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1827 @*/
1828 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1829 {
1830   PetscErrorCode ierr;
1831 
1832   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1833   PetscFunctionReturn(0);
1834 }
1835