xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision fc8a9adeb7fcdc98711d755fa2dc544ddccf0f3e)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
13 #include <petscdm.h>
14 
15 typedef struct {
16   PetscInt   n, n_local, n_local_true;
17   PetscInt   overlap;             /* overlap requested by user */
18   KSP        *ksp;                /* linear solvers for each block */
19   VecScatter restriction;         /* mapping from global to overlapping (process) subdomain*/
20   VecScatter *lrestriction;       /* mapping from subregion to overlapping (process) subdomain */
21   VecScatter *lprolongation;      /* mapping from non-overlapping subregion to overlapping (process) subdomain; used for restrict additive version of algorithms */
22   Vec        lx, ly;              /* work vectors */
23   Vec        *x,*y;               /* work vectors */
24   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
25   IS         *is;                 /* index set that defines each overlapping subdomain */
26   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
27   Mat        *mat,*pmat;          /* mat is not currently used */
28   PCASMType  type;                /* use reduced interpolation, restriction or both */
29   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
30   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
31   PetscBool  sort_indices;        /* flag to sort subdomain indices */
32   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
33   PCCompositeType loctype;        /* the type of composition for local solves */
34   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
35   /* For multiplicative solve */
36   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
40 {
41   PC_ASM         *osm = (PC_ASM*)pc->data;
42   PetscErrorCode ierr;
43   PetscMPIInt    rank;
44   PetscInt       i,bsz;
45   PetscBool      iascii,isstring;
46   PetscViewer    sviewer;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
51   if (iascii) {
52     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
53     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
54     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
55     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
56     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
57     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
58     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
59     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
60     if (osm->same_local_solves) {
61       if (osm->ksp) {
62         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
63         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
64         if (!rank) {
65           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
66           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
67           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
68         }
69         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
70       }
71     } else {
72       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
73       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
74       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
75       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
79       for (i=0; i<osm->n_local_true; i++) {
80         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
81         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
82         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
83         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
84       }
85       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
86       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
87       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
89     }
90   } else if (isstring) {
91     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
92     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
93     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
94     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
95   }
96   PetscFunctionReturn(0);
97 }
98 
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer, sviewer;
105   char           *s;
106   PetscInt       i,j,nidx;
107   const PetscInt *idx;
108   PetscMPIInt    rank, size;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
113   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
114   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
115   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
116   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
117   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
118   for (i=0; i<osm->n_local; i++) {
119     if (i < osm->n_local_true) {
120       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
121       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
122       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
123 #define len  16*(nidx+1)+512
124       ierr = PetscMalloc1(len,&s);CHKERRQ(ierr);
125       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
126 #undef len
127       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
128       for (j=0; j<nidx; j++) {
129         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
130       }
131       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
132       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
133       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
136       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
137       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
138       ierr = PetscFree(s);CHKERRQ(ierr);
139       if (osm->is_local) {
140         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
141 #define len  16*(nidx+1)+512
142         ierr = PetscMalloc1(len, &s);CHKERRQ(ierr);
143         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
144 #undef len
145         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
146         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
147         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
148         for (j=0; j<nidx; j++) {
149           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
150         }
151         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
152         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
153         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
155         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
156         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
157         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
158         ierr = PetscFree(s);CHKERRQ(ierr);
159       }
160     } else {
161       /* Participate in collective viewer calls. */
162       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
163       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
164       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
165       /* Assume either all ranks have is_local or none do. */
166       if (osm->is_local) {
167         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
168         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
169         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
170       }
171     }
172   }
173   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
174   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
175   PetscFunctionReturn(0);
176 }
177 
178 static PetscErrorCode PCSetUp_ASM(PC pc)
179 {
180   PC_ASM         *osm = (PC_ASM*)pc->data;
181   PetscErrorCode ierr;
182   PetscBool      symset,flg;
183   PetscInt       i,m,m_local;
184   MatReuse       scall = MAT_REUSE_MATRIX;
185   IS             isl;
186   KSP            ksp;
187   PC             subpc;
188   const char     *prefix,*pprefix;
189   Vec            vec;
190   DM             *domain_dm = NULL;
191 
192   PetscFunctionBegin;
193   if (!pc->setupcalled) {
194     PetscInt m;
195 
196     if (!osm->type_set) {
197       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
198       if (symset && flg) osm->type = PC_ASM_BASIC;
199     }
200 
201     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
202     if (osm->n_local_true == PETSC_DECIDE) {
203       /* no subdomains given */
204       /* try pc->dm first, if allowed */
205       if (osm->dm_subdomains && pc->dm) {
206         PetscInt  num_domains, d;
207         char      **domain_names;
208         IS        *inner_domain_is, *outer_domain_is;
209         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
210         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
211                               A future improvement of this code might allow one to use
212                               DM-defined subdomains and also increase the overlap,
213                               but that is not currently supported */
214         if (num_domains) {
215           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
216         }
217         for (d = 0; d < num_domains; ++d) {
218           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
219           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
220           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
221         }
222         ierr = PetscFree(domain_names);CHKERRQ(ierr);
223         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
224         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
225       }
226       if (osm->n_local_true == PETSC_DECIDE) {
227         /* still no subdomains; use one subdomain per processor */
228         osm->n_local_true = 1;
229       }
230     }
231     { /* determine the global and max number of subdomains */
232       struct {PetscInt max,sum;} inwork,outwork;
233       PetscMPIInt size;
234 
235       inwork.max   = osm->n_local_true;
236       inwork.sum   = osm->n_local_true;
237       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
238       osm->n_local = outwork.max;
239       osm->n       = outwork.sum;
240 
241       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
242       if (outwork.max == 1 && outwork.sum == size) {
243         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
244         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
245       }
246     }
247     if (!osm->is) { /* create the index sets */
248       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
249     }
250     if (osm->n_local_true > 1 && !osm->is_local) {
251       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
252       for (i=0; i<osm->n_local_true; i++) {
253         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
254           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
255           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
256         } else {
257           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
258           osm->is_local[i] = osm->is[i];
259         }
260       }
261     }
262     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
263     flg  = PETSC_FALSE;
264     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
265     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
266 
267     if (osm->overlap > 0) {
268       /* Extend the "overlapping" regions by a number of steps */
269       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
270     }
271     if (osm->sort_indices) {
272       for (i=0; i<osm->n_local_true; i++) {
273         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
274         if (osm->is_local) {
275           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
276         }
277       }
278     }
279 
280     if (!osm->ksp) {
281       /* Create the local solvers */
282       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
283       if (domain_dm) {
284         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
285       }
286       for (i=0; i<osm->n_local_true; i++) {
287         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
288         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
289         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
290         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
291         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
292         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
293         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
294         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
295         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
296         if (domain_dm) {
297           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
298           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
299           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
300         }
301         osm->ksp[i] = ksp;
302       }
303       if (domain_dm) {
304         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
305       }
306     }
307 
308     ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
309     ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
310     ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
311     ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
312     ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
313 
314     scall = MAT_INITIAL_MATRIX;
315   } else {
316     /*
317        Destroy the blocks from the previous iteration
318     */
319     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
320       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
321       scall = MAT_INITIAL_MATRIX;
322     }
323   }
324 
325   /*
326      Extract out the submatrices
327   */
328   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
329   if (scall == MAT_INITIAL_MATRIX) {
330     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
331     for (i=0; i<osm->n_local_true; i++) {
332       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
333       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
334     }
335   }
336 
337   /* Convert the types of the submatrices (if needbe) */
338   if (osm->sub_mat_type) {
339     for (i=0; i<osm->n_local_true; i++) {
340       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
341     }
342   }
343 
344   if(!pc->setupcalled){
345     /* Create the local work vectors (from the local matrices) and scatter contexts */
346     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
347 
348     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
349     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
350       ierr = PetscMalloc1(osm->n_local_true,&osm->lprolongation);CHKERRQ(ierr);
351     }
352     ierr = PetscMalloc1(osm->n_local_true,&osm->lrestriction);CHKERRQ(ierr);
353     ierr = PetscMalloc1(osm->n_local_true,&osm->x);CHKERRQ(ierr);
354     ierr = PetscMalloc1(osm->n_local_true,&osm->y);CHKERRQ(ierr);
355 
356     ierr = ISGetLocalSize(osm->lis,&m);CHKERRQ(ierr);
357     ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
358     ierr = VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);CHKERRQ(ierr);
359     ierr = ISDestroy(&isl);CHKERRQ(ierr);
360 
361 
362     for (i=0; i<osm->n_local_true; ++i) {
363       ISLocalToGlobalMapping ltog;
364       IS                     isll;
365       const PetscInt         *idx_is;
366       PetscInt               *idx_lis,nout;
367 
368       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
369       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
370       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
371 
372       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
373       ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
374       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
375       ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
376       ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
377       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
378       if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
379       ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
380       ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
381       ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
382       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
383       ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);CHKERRQ(ierr);
384       ierr = ISDestroy(&isll);CHKERRQ(ierr);
385       ierr = ISDestroy(&isl);CHKERRQ(ierr);
386       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overalapping is_local[i] entries */
387         ISLocalToGlobalMapping ltog;
388         IS                     isll,isll_local;
389         const PetscInt         *idx_local;
390         PetscInt               *idx1, *idx2, nout;
391 
392         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
393         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
394 
395         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
396         ierr = PetscMalloc1(m_local,&idx1);CHKERRQ(ierr);
397         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);CHKERRQ(ierr);
398         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
399         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
400         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
401 
402         ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
403         ierr = PetscMalloc1(m_local,&idx2);CHKERRQ(ierr);
404         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);CHKERRQ(ierr);
405         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
406         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
407         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);CHKERRQ(ierr);
408 
409         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
410         ierr = VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);CHKERRQ(ierr);
411 
412         ierr = ISDestroy(&isll);CHKERRQ(ierr);
413         ierr = ISDestroy(&isll_local);CHKERRQ(ierr);
414       }
415     }
416     ierr = VecDestroy(&vec);CHKERRQ(ierr);
417   }
418 
419   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
420     IS      *cis;
421     PetscInt c;
422 
423     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
424     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
425     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
426     ierr = PetscFree(cis);CHKERRQ(ierr);
427   }
428 
429   /* Return control to the user so that the submatrices can be modified (e.g., to apply
430      different boundary conditions for the submatrices than for the global problem) */
431   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
432 
433   /*
434      Loop over subdomains putting them into local ksp
435   */
436   for (i=0; i<osm->n_local_true; i++) {
437     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
438     if (!pc->setupcalled) {
439       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
440     }
441   }
442   PetscFunctionReturn(0);
443 }
444 
445 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
446 {
447   PC_ASM             *osm = (PC_ASM*)pc->data;
448   PetscErrorCode     ierr;
449   PetscInt           i;
450   KSPConvergedReason reason;
451 
452   PetscFunctionBegin;
453   for (i=0; i<osm->n_local_true; i++) {
454     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
455     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
456     if (reason == KSP_DIVERGED_PC_FAILED) {
457       pc->failedreason = PC_SUBPC_ERROR;
458     }
459   }
460   PetscFunctionReturn(0);
461 }
462 
463 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
464 {
465   PC_ASM         *osm = (PC_ASM*)pc->data;
466   PetscErrorCode ierr;
467   PetscInt       i,n_local_true = osm->n_local_true;
468   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
469 
470   PetscFunctionBegin;
471   /*
472      Support for limiting the restriction or interpolation to only local
473      subdomain values (leaving the other values 0).
474   */
475   if (!(osm->type & PC_ASM_RESTRICT)) {
476     forward = SCATTER_FORWARD_LOCAL;
477     /* have to zero the work RHS since scatter may leave some slots empty */
478     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
479   }
480   if (!(osm->type & PC_ASM_INTERPOLATE)) {
481     reverse = SCATTER_REVERSE_LOCAL;
482   }
483 
484   if(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE){
485     /* zero the global and the local solutions */
486     ierr = VecZeroEntries(y);CHKERRQ(ierr);
487     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
488 
489     /* Copy the global RHS to local RHS including the ghost nodes */
490     ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
491     ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
492 
493     /* Restrict local RHS to the overlapping 0-block RHS */
494     ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
495     ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
496 
497     /* do the local solves */
498     for (i = 0; i < n_local_true; ++i) {
499 
500       /* solve the overlapping i-block */
501       ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
502       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
503       ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr);
504       ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
505 
506       if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution (only for restrictive additive) */
507         ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
508         ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
509       }
510       else{ /* interpolate the overalapping i-block solution to the local solution */
511         ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
512         ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
513       }
514 
515       if (i < n_local_true-1) {
516         /* Restrict local RHS to the overlapping (i+1)-block RHS */
517         ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
518         ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
519 
520         if ( osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
521           /* udpdate the overlapping (i+1)-block RHS using the current local solution */
522           ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
523           ierr = VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]); CHKERRQ(ierr);
524         }
525       }
526     }
527     /* Add the local solution to the global solution including the ghost nodes */
528     ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
529     ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
530   }else{
531     SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
532   }
533   PetscFunctionReturn(0);
534 }
535 
536 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
537 {
538   PC_ASM         *osm = (PC_ASM*)pc->data;
539   PetscErrorCode ierr;
540   PetscInt       i,n_local_true = osm->n_local_true;
541   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
542 
543   PetscFunctionBegin;
544   /*
545      Support for limiting the restriction or interpolation to only local
546      subdomain values (leaving the other values 0).
547 
548      Note: these are reversed from the PCApply_ASM() because we are applying the
549      transpose of the three terms
550   */
551 
552   if (!(osm->type & PC_ASM_INTERPOLATE)) {
553     forward = SCATTER_FORWARD_LOCAL;
554     /* have to zero the work RHS since scatter may leave some slots empty */
555     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
556   }
557   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
558 
559   /* zero the global and the local solutions */
560   ierr = VecZeroEntries(y);CHKERRQ(ierr);
561   ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
562 
563   /* Copy the global RHS to local RHS including the ghost nodes */
564   ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
565   ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
566 
567   /* Restrict local RHS to the overlapping 0-block RHS */
568   ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
569   ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
570 
571   /* do the local solves */
572   for (i = 0; i < n_local_true; ++i) {
573 
574     /* solve the overlapping i-block */
575     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
576     ierr = KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
577     ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr);
578     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
579 
580     if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution */
581      ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
582      ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
583     }
584     else{ /* interpolate the overalapping i-block solution to the local solution */
585       ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
586       ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
587     }
588 
589     if (i < n_local_true-1) {
590       /* Restrict local RHS to the overlapping (i+1)-block RHS */
591       ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
592       ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
593     }
594   }
595   /* Add the local solution to the global solution including the ghost nodes */
596   ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
597   ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
598 
599   PetscFunctionReturn(0);
600 
601 }
602 
603 static PetscErrorCode PCReset_ASM(PC pc)
604 {
605   PC_ASM         *osm = (PC_ASM*)pc->data;
606   PetscErrorCode ierr;
607   PetscInt       i;
608 
609   PetscFunctionBegin;
610   if (osm->ksp) {
611     for (i=0; i<osm->n_local_true; i++) {
612       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
613     }
614   }
615   if (osm->pmat) {
616     if (osm->n_local_true > 0) {
617       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
618     }
619   }
620   if (osm->lrestriction) {
621     ierr = VecScatterDestroy(&osm->restriction);CHKERRQ(ierr);
622     for (i=0; i<osm->n_local_true; i++) {
623       ierr = VecScatterDestroy(&osm->lrestriction[i]);CHKERRQ(ierr);
624       if (osm->lprolongation) {ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);}
625       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
626       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
627     }
628     ierr = PetscFree(osm->lrestriction);CHKERRQ(ierr);
629     if (osm->lprolongation) {ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);}
630     ierr = PetscFree(osm->x);CHKERRQ(ierr);
631     ierr = PetscFree(osm->y);CHKERRQ(ierr);
632 
633   }
634   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
635   ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
636   ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
637   ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
638   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
639     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
640   }
641 
642   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
643 
644   osm->is       = 0;
645   osm->is_local = 0;
646   PetscFunctionReturn(0);
647 }
648 
649 static PetscErrorCode PCDestroy_ASM(PC pc)
650 {
651   PC_ASM         *osm = (PC_ASM*)pc->data;
652   PetscErrorCode ierr;
653   PetscInt       i;
654 
655   PetscFunctionBegin;
656   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
657   if (osm->ksp) {
658     for (i=0; i<osm->n_local_true; i++) {
659       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
660     }
661     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
662   }
663   ierr = PetscFree(pc->data);CHKERRQ(ierr);
664   PetscFunctionReturn(0);
665 }
666 
667 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
668 {
669   PC_ASM         *osm = (PC_ASM*)pc->data;
670   PetscErrorCode ierr;
671   PetscInt       blocks,ovl;
672   PetscBool      symset,flg;
673   PCASMType      asmtype;
674   PCCompositeType loctype;
675   char           sub_mat_type[256];
676 
677   PetscFunctionBegin;
678   /* set the type to symmetric if matrix is symmetric */
679   if (!osm->type_set && pc->pmat) {
680     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
681     if (symset && flg) osm->type = PC_ASM_BASIC;
682   }
683   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
684   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
685   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
686   if (flg) {
687     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
688     osm->dm_subdomains = PETSC_FALSE;
689   }
690   ierr = PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);CHKERRQ(ierr);
691   if (flg) {
692     ierr = PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
693     osm->dm_subdomains = PETSC_FALSE;
694   }
695   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
696   if (flg) {
697     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
698     osm->dm_subdomains = PETSC_FALSE;
699   }
700   flg  = PETSC_FALSE;
701   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
702   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
703   flg  = PETSC_FALSE;
704   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
705   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
706   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
707   if(flg){
708     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
709   }
710   ierr = PetscOptionsTail();CHKERRQ(ierr);
711   PetscFunctionReturn(0);
712 }
713 
714 /*------------------------------------------------------------------------------------*/
715 
716 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
717 {
718   PC_ASM         *osm = (PC_ASM*)pc->data;
719   PetscErrorCode ierr;
720   PetscInt       i;
721 
722   PetscFunctionBegin;
723   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
724   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
725 
726   if (!pc->setupcalled) {
727     if (is) {
728       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
729     }
730     if (is_local) {
731       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
732     }
733     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
734 
735     osm->n_local_true = n;
736     osm->is           = 0;
737     osm->is_local     = 0;
738     if (is) {
739       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
740       for (i=0; i<n; i++) osm->is[i] = is[i];
741       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
742       osm->overlap = -1;
743     }
744     if (is_local) {
745       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
746       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
747       if (!is) {
748         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
749         for (i=0; i<osm->n_local_true; i++) {
750           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
751             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
752             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
753           } else {
754             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
755             osm->is[i] = osm->is_local[i];
756           }
757         }
758       }
759     }
760   }
761   PetscFunctionReturn(0);
762 }
763 
764 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
765 {
766   PC_ASM         *osm = (PC_ASM*)pc->data;
767   PetscErrorCode ierr;
768   PetscMPIInt    rank,size;
769   PetscInt       n;
770 
771   PetscFunctionBegin;
772   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
773   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
774 
775   /*
776      Split the subdomains equally among all processors
777   */
778   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
779   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
780   n    = N/size + ((N % size) > rank);
781   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
782   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
783   if (!pc->setupcalled) {
784     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
785 
786     osm->n_local_true = n;
787     osm->is           = 0;
788     osm->is_local     = 0;
789   }
790   PetscFunctionReturn(0);
791 }
792 
793 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
794 {
795   PC_ASM *osm = (PC_ASM*)pc->data;
796 
797   PetscFunctionBegin;
798   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
799   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
800   if (!pc->setupcalled) osm->overlap = ovl;
801   PetscFunctionReturn(0);
802 }
803 
804 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
805 {
806   PC_ASM *osm = (PC_ASM*)pc->data;
807 
808   PetscFunctionBegin;
809   osm->type     = type;
810   osm->type_set = PETSC_TRUE;
811   PetscFunctionReturn(0);
812 }
813 
814 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
815 {
816   PC_ASM *osm = (PC_ASM*)pc->data;
817 
818   PetscFunctionBegin;
819   *type = osm->type;
820   PetscFunctionReturn(0);
821 }
822 
823 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
824 {
825   PC_ASM *osm = (PC_ASM *) pc->data;
826 
827   PetscFunctionBegin;
828   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
829   osm->loctype = type;
830   PetscFunctionReturn(0);
831 }
832 
833 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
834 {
835   PC_ASM *osm = (PC_ASM *) pc->data;
836 
837   PetscFunctionBegin;
838   *type = osm->loctype;
839   PetscFunctionReturn(0);
840 }
841 
842 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
843 {
844   PC_ASM *osm = (PC_ASM*)pc->data;
845 
846   PetscFunctionBegin;
847   osm->sort_indices = doSort;
848   PetscFunctionReturn(0);
849 }
850 
851 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
852 {
853   PC_ASM         *osm = (PC_ASM*)pc->data;
854   PetscErrorCode ierr;
855 
856   PetscFunctionBegin;
857   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
858 
859   if (n_local) *n_local = osm->n_local_true;
860   if (first_local) {
861     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
862     *first_local -= osm->n_local_true;
863   }
864   if (ksp) {
865     /* Assume that local solves are now different; not necessarily
866        true though!  This flag is used only for PCView_ASM() */
867     *ksp                   = osm->ksp;
868     osm->same_local_solves = PETSC_FALSE;
869   }
870   PetscFunctionReturn(0);
871 }
872 
873 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
874 {
875   PC_ASM         *osm = (PC_ASM*)pc->data;
876 
877   PetscFunctionBegin;
878   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
879   PetscValidPointer(sub_mat_type,2);
880   *sub_mat_type = osm->sub_mat_type;
881   PetscFunctionReturn(0);
882 }
883 
884 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
885 {
886   PetscErrorCode    ierr;
887   PC_ASM            *osm = (PC_ASM*)pc->data;
888 
889   PetscFunctionBegin;
890   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
891   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
892   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
893   PetscFunctionReturn(0);
894 }
895 
896 /*@C
897     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
898 
899     Collective on PC
900 
901     Input Parameters:
902 +   pc - the preconditioner context
903 .   n - the number of subdomains for this processor (default value = 1)
904 .   is - the index set that defines the subdomains for this processor
905          (or NULL for PETSc to determine subdomains)
906 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
907          (or NULL to not provide these)
908 
909     Options Database Key:
910     To set the total number of subdomain blocks rather than specify the
911     index sets, use the option
912 .    -pc_asm_local_blocks <blks> - Sets local blocks
913 
914     Notes:
915     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
916 
917     By default the ASM preconditioner uses 1 block per processor.
918 
919     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
920 
921     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
922     back to form the global solution (this is the standard restricted additive Schwarz method)
923 
924     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
925     no code to handle that case.
926 
927     Level: advanced
928 
929 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
930 
931 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
932           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
933 @*/
934 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
935 {
936   PetscErrorCode ierr;
937 
938   PetscFunctionBegin;
939   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
940   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
941   PetscFunctionReturn(0);
942 }
943 
944 /*@C
945     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
946     additive Schwarz preconditioner.  Either all or no processors in the
947     PC communicator must call this routine, with the same index sets.
948 
949     Collective on PC
950 
951     Input Parameters:
952 +   pc - the preconditioner context
953 .   N  - the number of subdomains for all processors
954 .   is - the index sets that define the subdomains for all processors
955          (or NULL to ask PETSc to determine the subdomains)
956 -   is_local - the index sets that define the local part of the subdomains for this processor
957          (or NULL to not provide this information)
958 
959     Options Database Key:
960     To set the total number of subdomain blocks rather than specify the
961     index sets, use the option
962 .    -pc_asm_blocks <blks> - Sets total blocks
963 
964     Notes:
965     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
966 
967     By default the ASM preconditioner uses 1 block per processor.
968 
969     These index sets cannot be destroyed until after completion of the
970     linear solves for which the ASM preconditioner is being used.
971 
972     Use PCASMSetLocalSubdomains() to set local subdomains.
973 
974     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
975 
976     Level: advanced
977 
978 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
979 
980 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
981           PCASMCreateSubdomains2D()
982 @*/
983 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
984 {
985   PetscErrorCode ierr;
986 
987   PetscFunctionBegin;
988   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
989   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
990   PetscFunctionReturn(0);
991 }
992 
993 /*@
994     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
995     additive Schwarz preconditioner.  Either all or no processors in the
996     PC communicator must call this routine.
997 
998     Logically Collective on PC
999 
1000     Input Parameters:
1001 +   pc  - the preconditioner context
1002 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1003 
1004     Options Database Key:
1005 .   -pc_asm_overlap <ovl> - Sets overlap
1006 
1007     Notes:
1008     By default the ASM preconditioner uses 1 block per processor.  To use
1009     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1010     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1011 
1012     The overlap defaults to 1, so if one desires that no additional
1013     overlap be computed beyond what may have been set with a call to
1014     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1015     must be set to be 0.  In particular, if one does not explicitly set
1016     the subdomains an application code, then all overlap would be computed
1017     internally by PETSc, and using an overlap of 0 would result in an ASM
1018     variant that is equivalent to the block Jacobi preconditioner.
1019 
1020     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1021     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1022 
1023     Note that one can define initial index sets with any overlap via
1024     PCASMSetLocalSubdomains(); the routine
1025     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1026     if desired.
1027 
1028     Level: intermediate
1029 
1030 .keywords: PC, ASM, set, overlap
1031 
1032 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1033           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1034 @*/
1035 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1036 {
1037   PetscErrorCode ierr;
1038 
1039   PetscFunctionBegin;
1040   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1041   PetscValidLogicalCollectiveInt(pc,ovl,2);
1042   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1043   PetscFunctionReturn(0);
1044 }
1045 
1046 /*@
1047     PCASMSetType - Sets the type of restriction and interpolation used
1048     for local problems in the additive Schwarz method.
1049 
1050     Logically Collective on PC
1051 
1052     Input Parameters:
1053 +   pc  - the preconditioner context
1054 -   type - variant of ASM, one of
1055 .vb
1056       PC_ASM_BASIC       - full interpolation and restriction
1057       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1058       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1059       PC_ASM_NONE        - local processor restriction and interpolation
1060 .ve
1061 
1062     Options Database Key:
1063 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1064 
1065     Notes:
1066     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1067     to limit the local processor interpolation
1068 
1069     Level: intermediate
1070 
1071 .keywords: PC, ASM, set, type
1072 
1073 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1074           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1075 @*/
1076 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1077 {
1078   PetscErrorCode ierr;
1079 
1080   PetscFunctionBegin;
1081   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1082   PetscValidLogicalCollectiveEnum(pc,type,2);
1083   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1084   PetscFunctionReturn(0);
1085 }
1086 
1087 /*@
1088     PCASMGetType - Gets the type of restriction and interpolation used
1089     for local problems in the additive Schwarz method.
1090 
1091     Logically Collective on PC
1092 
1093     Input Parameter:
1094 .   pc  - the preconditioner context
1095 
1096     Output Parameter:
1097 .   type - variant of ASM, one of
1098 
1099 .vb
1100       PC_ASM_BASIC       - full interpolation and restriction
1101       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1102       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1103       PC_ASM_NONE        - local processor restriction and interpolation
1104 .ve
1105 
1106     Options Database Key:
1107 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1108 
1109     Level: intermediate
1110 
1111 .keywords: PC, ASM, set, type
1112 
1113 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1114           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1115 @*/
1116 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1117 {
1118   PetscErrorCode ierr;
1119 
1120   PetscFunctionBegin;
1121   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1122   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1123   PetscFunctionReturn(0);
1124 }
1125 
1126 /*@
1127   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1128 
1129   Logically Collective on PC
1130 
1131   Input Parameters:
1132 + pc  - the preconditioner context
1133 - type - type of composition, one of
1134 .vb
1135   PC_COMPOSITE_ADDITIVE       - local additive combination
1136   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1137 .ve
1138 
1139   Options Database Key:
1140 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1141 
1142   Level: intermediate
1143 
1144 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1145 @*/
1146 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1147 {
1148   PetscErrorCode ierr;
1149 
1150   PetscFunctionBegin;
1151   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1152   PetscValidLogicalCollectiveEnum(pc, type, 2);
1153   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1154   PetscFunctionReturn(0);
1155 }
1156 
1157 /*@
1158   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1159 
1160   Logically Collective on PC
1161 
1162   Input Parameter:
1163 . pc  - the preconditioner context
1164 
1165   Output Parameter:
1166 . type - type of composition, one of
1167 .vb
1168   PC_COMPOSITE_ADDITIVE       - local additive combination
1169   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1170 .ve
1171 
1172   Options Database Key:
1173 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1174 
1175   Level: intermediate
1176 
1177 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1178 @*/
1179 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1180 {
1181   PetscErrorCode ierr;
1182 
1183   PetscFunctionBegin;
1184   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1185   PetscValidPointer(type, 2);
1186   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1187   PetscFunctionReturn(0);
1188 }
1189 
1190 /*@
1191     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1192 
1193     Logically Collective on PC
1194 
1195     Input Parameters:
1196 +   pc  - the preconditioner context
1197 -   doSort - sort the subdomain indices
1198 
1199     Level: intermediate
1200 
1201 .keywords: PC, ASM, set, type
1202 
1203 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1204           PCASMCreateSubdomains2D()
1205 @*/
1206 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1207 {
1208   PetscErrorCode ierr;
1209 
1210   PetscFunctionBegin;
1211   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1212   PetscValidLogicalCollectiveBool(pc,doSort,2);
1213   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1214   PetscFunctionReturn(0);
1215 }
1216 
1217 /*@C
1218    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1219    this processor.
1220 
1221    Collective on PC iff first_local is requested
1222 
1223    Input Parameter:
1224 .  pc - the preconditioner context
1225 
1226    Output Parameters:
1227 +  n_local - the number of blocks on this processor or NULL
1228 .  first_local - the global number of the first block on this processor or NULL,
1229                  all processors must request or all must pass NULL
1230 -  ksp - the array of KSP contexts
1231 
1232    Note:
1233    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1234 
1235    You must call KSPSetUp() before calling PCASMGetSubKSP().
1236 
1237    Fortran note:
1238    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1239 
1240    Level: advanced
1241 
1242 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1243 
1244 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1245           PCASMCreateSubdomains2D(),
1246 @*/
1247 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1248 {
1249   PetscErrorCode ierr;
1250 
1251   PetscFunctionBegin;
1252   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1253   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 /* -------------------------------------------------------------------------------------*/
1258 /*MC
1259    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1260            its own KSP object.
1261 
1262    Options Database Keys:
1263 +  -pc_asm_blocks <blks> - Sets total blocks
1264 .  -pc_asm_overlap <ovl> - Sets overlap
1265 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1266 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1267 
1268      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1269       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1270       -pc_asm_type basic to use the standard ASM.
1271 
1272    Notes:
1273     Each processor can have one or more blocks, but a block cannot be shared by more
1274      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1275 
1276      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1277         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1278 
1279      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1280          and set the options directly on the resulting KSP object (you can access its PC
1281          with KSPGetPC())
1282 
1283    Level: beginner
1284 
1285    Concepts: additive Schwarz method
1286 
1287     References:
1288 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1289      Courant Institute, New York University Technical report
1290 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1291     Cambridge University Press.
1292 
1293 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1294            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1295            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1296 
1297 M*/
1298 
1299 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1300 {
1301   PetscErrorCode ierr;
1302   PC_ASM         *osm;
1303 
1304   PetscFunctionBegin;
1305   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1306 
1307   osm->n                 = PETSC_DECIDE;
1308   osm->n_local           = 0;
1309   osm->n_local_true      = PETSC_DECIDE;
1310   osm->overlap           = 1;
1311   osm->ksp               = 0;
1312   osm->restriction       = 0;
1313   osm->lprolongation     = 0;
1314   osm->lrestriction      = 0;
1315   osm->x                 = 0;
1316   osm->y                 = 0;
1317   osm->is                = 0;
1318   osm->is_local          = 0;
1319   osm->mat               = 0;
1320   osm->pmat              = 0;
1321   osm->type              = PC_ASM_RESTRICT;
1322   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1323   osm->same_local_solves = PETSC_TRUE;
1324   osm->sort_indices      = PETSC_TRUE;
1325   osm->dm_subdomains     = PETSC_FALSE;
1326   osm->sub_mat_type      = NULL;
1327 
1328   pc->data                 = (void*)osm;
1329   pc->ops->apply           = PCApply_ASM;
1330   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1331   pc->ops->setup           = PCSetUp_ASM;
1332   pc->ops->reset           = PCReset_ASM;
1333   pc->ops->destroy         = PCDestroy_ASM;
1334   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1335   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1336   pc->ops->view            = PCView_ASM;
1337   pc->ops->applyrichardson = 0;
1338 
1339   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1340   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1341   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1342   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1343   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1344   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1345   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1346   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1347   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1348   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1349   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1350   PetscFunctionReturn(0);
1351 }
1352 
1353 /*@C
1354    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1355    preconditioner for a any problem on a general grid.
1356 
1357    Collective
1358 
1359    Input Parameters:
1360 +  A - The global matrix operator
1361 -  n - the number of local blocks
1362 
1363    Output Parameters:
1364 .  outis - the array of index sets defining the subdomains
1365 
1366    Level: advanced
1367 
1368    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1369     from these if you use PCASMSetLocalSubdomains()
1370 
1371     In the Fortran version you must provide the array outis[] already allocated of length n.
1372 
1373 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1374 
1375 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1376 @*/
1377 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1378 {
1379   MatPartitioning mpart;
1380   const char      *prefix;
1381   PetscInt        i,j,rstart,rend,bs;
1382   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1383   Mat             Ad     = NULL, adj;
1384   IS              ispart,isnumb,*is;
1385   PetscErrorCode  ierr;
1386 
1387   PetscFunctionBegin;
1388   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1389   PetscValidPointer(outis,3);
1390   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1391 
1392   /* Get prefix, row distribution, and block size */
1393   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1394   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1395   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1396   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1397 
1398   /* Get diagonal block from matrix if possible */
1399   ierr = MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
1400   if (hasop) {
1401     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1402   }
1403   if (Ad) {
1404     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1405     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1406   }
1407   if (Ad && n > 1) {
1408     PetscBool match,done;
1409     /* Try to setup a good matrix partitioning if available */
1410     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1411     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1412     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1413     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1414     if (!match) {
1415       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1416     }
1417     if (!match) { /* assume a "good" partitioner is available */
1418       PetscInt       na;
1419       const PetscInt *ia,*ja;
1420       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1421       if (done) {
1422         /* Build adjacency matrix by hand. Unfortunately a call to
1423            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1424            remove the block-aij structure and we cannot expect
1425            MatPartitioning to split vertices as we need */
1426         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1427         const PetscInt *row;
1428         nnz = 0;
1429         for (i=0; i<na; i++) { /* count number of nonzeros */
1430           len = ia[i+1] - ia[i];
1431           row = ja + ia[i];
1432           for (j=0; j<len; j++) {
1433             if (row[j] == i) { /* don't count diagonal */
1434               len--; break;
1435             }
1436           }
1437           nnz += len;
1438         }
1439         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1440         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1441         nnz    = 0;
1442         iia[0] = 0;
1443         for (i=0; i<na; i++) { /* fill adjacency */
1444           cnt = 0;
1445           len = ia[i+1] - ia[i];
1446           row = ja + ia[i];
1447           for (j=0; j<len; j++) {
1448             if (row[j] != i) { /* if not diagonal */
1449               jja[nnz+cnt++] = row[j];
1450             }
1451           }
1452           nnz     += cnt;
1453           iia[i+1] = nnz;
1454         }
1455         /* Partitioning of the adjacency matrix */
1456         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1457         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1458         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1459         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1460         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1461         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1462         foundpart = PETSC_TRUE;
1463       }
1464       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1465     }
1466     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1467   }
1468 
1469   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1470   *outis = is;
1471 
1472   if (!foundpart) {
1473 
1474     /* Partitioning by contiguous chunks of rows */
1475 
1476     PetscInt mbs   = (rend-rstart)/bs;
1477     PetscInt start = rstart;
1478     for (i=0; i<n; i++) {
1479       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1480       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1481       start += count;
1482     }
1483 
1484   } else {
1485 
1486     /* Partitioning by adjacency of diagonal block  */
1487 
1488     const PetscInt *numbering;
1489     PetscInt       *count,nidx,*indices,*newidx,start=0;
1490     /* Get node count in each partition */
1491     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1492     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1493     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1494       for (i=0; i<n; i++) count[i] *= bs;
1495     }
1496     /* Build indices from node numbering */
1497     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1498     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1499     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1500     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1501     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1502     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1503     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1504       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1505       for (i=0; i<nidx; i++) {
1506         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1507       }
1508       ierr    = PetscFree(indices);CHKERRQ(ierr);
1509       nidx   *= bs;
1510       indices = newidx;
1511     }
1512     /* Shift to get global indices */
1513     for (i=0; i<nidx; i++) indices[i] += rstart;
1514 
1515     /* Build the index sets for each block */
1516     for (i=0; i<n; i++) {
1517       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1518       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1519       start += count[i];
1520     }
1521 
1522     ierr = PetscFree(count);CHKERRQ(ierr);
1523     ierr = PetscFree(indices);CHKERRQ(ierr);
1524     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1525     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1526 
1527   }
1528   PetscFunctionReturn(0);
1529 }
1530 
1531 /*@C
1532    PCASMDestroySubdomains - Destroys the index sets created with
1533    PCASMCreateSubdomains(). Should be called after setting subdomains
1534    with PCASMSetLocalSubdomains().
1535 
1536    Collective
1537 
1538    Input Parameters:
1539 +  n - the number of index sets
1540 .  is - the array of index sets
1541 -  is_local - the array of local index sets, can be NULL
1542 
1543    Level: advanced
1544 
1545 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1546 
1547 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1548 @*/
1549 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1550 {
1551   PetscInt       i;
1552   PetscErrorCode ierr;
1553 
1554   PetscFunctionBegin;
1555   if (n <= 0) PetscFunctionReturn(0);
1556   if (is) {
1557     PetscValidPointer(is,2);
1558     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1559     ierr = PetscFree(is);CHKERRQ(ierr);
1560   }
1561   if (is_local) {
1562     PetscValidPointer(is_local,3);
1563     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1564     ierr = PetscFree(is_local);CHKERRQ(ierr);
1565   }
1566   PetscFunctionReturn(0);
1567 }
1568 
1569 /*@
1570    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1571    preconditioner for a two-dimensional problem on a regular grid.
1572 
1573    Not Collective
1574 
1575    Input Parameters:
1576 +  m, n - the number of mesh points in the x and y directions
1577 .  M, N - the number of subdomains in the x and y directions
1578 .  dof - degrees of freedom per node
1579 -  overlap - overlap in mesh lines
1580 
1581    Output Parameters:
1582 +  Nsub - the number of subdomains created
1583 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1584 -  is_local - array of index sets defining non-overlapping subdomains
1585 
1586    Note:
1587    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1588    preconditioners.  More general related routines are
1589    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1590 
1591    Level: advanced
1592 
1593 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1594 
1595 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1596           PCASMSetOverlap()
1597 @*/
1598 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1599 {
1600   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1601   PetscErrorCode ierr;
1602   PetscInt       nidx,*idx,loc,ii,jj,count;
1603 
1604   PetscFunctionBegin;
1605   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1606 
1607   *Nsub     = N*M;
1608   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1609   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1610   ystart    = 0;
1611   loc_outer = 0;
1612   for (i=0; i<N; i++) {
1613     height = n/N + ((n % N) > i); /* height of subdomain */
1614     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1615     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1616     yright = ystart + height + overlap; if (yright > n) yright = n;
1617     xstart = 0;
1618     for (j=0; j<M; j++) {
1619       width = m/M + ((m % M) > j); /* width of subdomain */
1620       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1621       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1622       xright = xstart + width + overlap; if (xright > m) xright = m;
1623       nidx   = (xright - xleft)*(yright - yleft);
1624       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1625       loc    = 0;
1626       for (ii=yleft; ii<yright; ii++) {
1627         count = m*ii + xleft;
1628         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1629       }
1630       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1631       if (overlap == 0) {
1632         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1633 
1634         (*is_local)[loc_outer] = (*is)[loc_outer];
1635       } else {
1636         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1637           for (jj=xstart; jj<xstart+width; jj++) {
1638             idx[loc++] = m*ii + jj;
1639           }
1640         }
1641         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1642       }
1643       ierr    = PetscFree(idx);CHKERRQ(ierr);
1644       xstart += width;
1645       loc_outer++;
1646     }
1647     ystart += height;
1648   }
1649   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1650   PetscFunctionReturn(0);
1651 }
1652 
1653 /*@C
1654     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1655     only) for the additive Schwarz preconditioner.
1656 
1657     Not Collective
1658 
1659     Input Parameter:
1660 .   pc - the preconditioner context
1661 
1662     Output Parameters:
1663 +   n - the number of subdomains for this processor (default value = 1)
1664 .   is - the index sets that define the subdomains for this processor
1665 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1666 
1667 
1668     Notes:
1669     The IS numbering is in the parallel, global numbering of the vector.
1670 
1671     Level: advanced
1672 
1673 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1674 
1675 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1676           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1677 @*/
1678 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1679 {
1680   PC_ASM         *osm = (PC_ASM*)pc->data;
1681   PetscErrorCode ierr;
1682   PetscBool      match;
1683 
1684   PetscFunctionBegin;
1685   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1686   PetscValidIntPointer(n,2);
1687   if (is) PetscValidPointer(is,3);
1688   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1689   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1690   if (n) *n = osm->n_local_true;
1691   if (is) *is = osm->is;
1692   if (is_local) *is_local = osm->is_local;
1693   PetscFunctionReturn(0);
1694 }
1695 
1696 /*@C
1697     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1698     only) for the additive Schwarz preconditioner.
1699 
1700     Not Collective
1701 
1702     Input Parameter:
1703 .   pc - the preconditioner context
1704 
1705     Output Parameters:
1706 +   n - the number of matrices for this processor (default value = 1)
1707 -   mat - the matrices
1708 
1709     Level: advanced
1710 
1711     Notes:
1712     Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1713 
1714            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1715 
1716 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1717 
1718 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1719           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1720 @*/
1721 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1722 {
1723   PC_ASM         *osm;
1724   PetscErrorCode ierr;
1725   PetscBool      match;
1726 
1727   PetscFunctionBegin;
1728   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1729   PetscValidIntPointer(n,2);
1730   if (mat) PetscValidPointer(mat,3);
1731   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1732   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1733   if (!match) {
1734     if (n) *n = 0;
1735     if (mat) *mat = NULL;
1736   } else {
1737     osm = (PC_ASM*)pc->data;
1738     if (n) *n = osm->n_local_true;
1739     if (mat) *mat = osm->pmat;
1740   }
1741   PetscFunctionReturn(0);
1742 }
1743 
1744 /*@
1745     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1746 
1747     Logically Collective
1748 
1749     Input Parameter:
1750 +   pc  - the preconditioner
1751 -   flg - boolean indicating whether to use subdomains defined by the DM
1752 
1753     Options Database Key:
1754 .   -pc_asm_dm_subdomains
1755 
1756     Level: intermediate
1757 
1758     Notes:
1759     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1760     so setting either of the first two effectively turns the latter off.
1761 
1762 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1763 
1764 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1765           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1766 @*/
1767 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1768 {
1769   PC_ASM         *osm = (PC_ASM*)pc->data;
1770   PetscErrorCode ierr;
1771   PetscBool      match;
1772 
1773   PetscFunctionBegin;
1774   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1775   PetscValidLogicalCollectiveBool(pc,flg,2);
1776   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1777   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1778   if (match) {
1779     osm->dm_subdomains = flg;
1780   }
1781   PetscFunctionReturn(0);
1782 }
1783 
1784 /*@
1785     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1786     Not Collective
1787 
1788     Input Parameter:
1789 .   pc  - the preconditioner
1790 
1791     Output Parameter:
1792 .   flg - boolean indicating whether to use subdomains defined by the DM
1793 
1794     Level: intermediate
1795 
1796 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1797 
1798 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1799           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1800 @*/
1801 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1802 {
1803   PC_ASM         *osm = (PC_ASM*)pc->data;
1804   PetscErrorCode ierr;
1805   PetscBool      match;
1806 
1807   PetscFunctionBegin;
1808   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1809   PetscValidPointer(flg,2);
1810   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1811   if (match) {
1812     if (flg) *flg = osm->dm_subdomains;
1813   }
1814   PetscFunctionReturn(0);
1815 }
1816 
1817 /*@
1818      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1819 
1820    Not Collective
1821 
1822    Input Parameter:
1823 .  pc - the PC
1824 
1825    Output Parameter:
1826 .  -pc_asm_sub_mat_type - name of matrix type
1827 
1828    Level: advanced
1829 
1830 .keywords: PC, PCASM, MatType, set
1831 
1832 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1833 @*/
1834 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1835   PetscErrorCode ierr;
1836 
1837   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1838   PetscFunctionReturn(0);
1839 }
1840 
1841 /*@
1842      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1843 
1844    Collective on Mat
1845 
1846    Input Parameters:
1847 +  pc             - the PC object
1848 -  sub_mat_type   - matrix type
1849 
1850    Options Database Key:
1851 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1852 
1853    Notes:
1854    See "${PETSC_DIR}/include/petscmat.h" for available types
1855 
1856   Level: advanced
1857 
1858 .keywords: PC, PCASM, MatType, set
1859 
1860 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1861 @*/
1862 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1863 {
1864   PetscErrorCode ierr;
1865 
1866   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1867   PetscFunctionReturn(0);
1868 }
1869