xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 9e436492a5f3dfdbfdb4be14586e1a8ebf1e4e34)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion; used for restrict version of algorithms */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
34   /* For multiplicative solve */
35   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
36   Vec        lx, ly;              /* work vectors */
37   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
38 } PC_ASM;
39 
40 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
41 {
42   PC_ASM         *osm = (PC_ASM*)pc->data;
43   PetscErrorCode ierr;
44   PetscMPIInt    rank;
45   PetscInt       i,bsz;
46   PetscBool      iascii,isstring;
47   PetscViewer    sviewer;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
52   if (iascii) {
53     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
54     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
55     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
56     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
57     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
58     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
59     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
60     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
61     if (osm->same_local_solves) {
62       if (osm->ksp) {
63         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
64         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
65         if (!rank) {
66           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
68           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
69         }
70         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71       }
72     } else {
73       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
75       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
80       for (i=0; i<osm->n_local_true; i++) {
81         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
82         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
83         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
85       }
86       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
87       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
90     }
91   } else if (isstring) {
92     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
93     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
94     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
95     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96   }
97   PetscFunctionReturn(0);
98 }
99 
100 static PetscErrorCode PCASMPrintSubdomains(PC pc)
101 {
102   PC_ASM         *osm = (PC_ASM*)pc->data;
103   const char     *prefix;
104   char           fname[PETSC_MAX_PATH_LEN+1];
105   PetscViewer    viewer, sviewer;
106   char           *s;
107   PetscInt       i,j,nidx;
108   const PetscInt *idx;
109   PetscMPIInt    rank, size;
110   PetscErrorCode ierr;
111 
112   PetscFunctionBegin;
113   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
114   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
115   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
116   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
117   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
118   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
119   for (i=0; i<osm->n_local; i++) {
120     if (i < osm->n_local_true) {
121       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
123       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
124       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
125       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
126       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
127       for (j=0; j<nidx; j++) {
128         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
129       }
130       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
131       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
132       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
133       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
135       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
137       ierr = PetscFree(s);CHKERRQ(ierr);
138       if (osm->is_local) {
139         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
140         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
141         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
142         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
143         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
144         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
145         for (j=0; j<nidx; j++) {
146           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
147         }
148         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
149         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
150         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
151         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
152         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
153         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
155         ierr = PetscFree(s);CHKERRQ(ierr);
156       }
157     } else {
158       /* Participate in collective viewer calls. */
159       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
160       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
161       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
162       /* Assume either all ranks have is_local or none do. */
163       if (osm->is_local) {
164         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
165         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
166         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
167       }
168     }
169   }
170   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
171   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
172   PetscFunctionReturn(0);
173 }
174 
175 static PetscErrorCode PCSetUp_ASM(PC pc)
176 {
177   PC_ASM         *osm = (PC_ASM*)pc->data;
178   PetscErrorCode ierr;
179   PetscBool      symset,flg;
180   PetscInt       i,m,m_local;
181   MatReuse       scall = MAT_REUSE_MATRIX;
182   IS             isl;
183   KSP            ksp;
184   PC             subpc;
185   const char     *prefix,*pprefix;
186   Vec            vec;
187   DM             *domain_dm = NULL;
188 
189   PetscFunctionBegin;
190   if (!pc->setupcalled) {
191 
192     if (!osm->type_set) {
193       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
194       if (symset && flg) osm->type = PC_ASM_BASIC;
195     }
196 
197     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
198     if (osm->n_local_true == PETSC_DECIDE) {
199       /* no subdomains given */
200       /* try pc->dm first, if allowed */
201       if (osm->dm_subdomains && pc->dm) {
202         PetscInt  num_domains, d;
203         char      **domain_names;
204         IS        *inner_domain_is, *outer_domain_is;
205         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
206         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
207                               A future improvement of this code might allow one to use
208                               DM-defined subdomains and also increase the overlap,
209                               but that is not currently supported */
210         if (num_domains) {
211           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
212         }
213         for (d = 0; d < num_domains; ++d) {
214           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
215           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
216           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
217         }
218         ierr = PetscFree(domain_names);CHKERRQ(ierr);
219         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
220         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
221       }
222       if (osm->n_local_true == PETSC_DECIDE) {
223         /* still no subdomains; use one subdomain per processor */
224         osm->n_local_true = 1;
225       }
226     }
227     { /* determine the global and max number of subdomains */
228       struct {PetscInt max,sum;} inwork,outwork;
229       PetscMPIInt size;
230 
231       inwork.max   = osm->n_local_true;
232       inwork.sum   = osm->n_local_true;
233       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
234       osm->n_local = outwork.max;
235       osm->n       = outwork.sum;
236 
237       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
238       if (outwork.max == 1 && outwork.sum == size) {
239         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
240         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
241       }
242     }
243     if (!osm->is) { /* create the index sets */
244       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
245     }
246     if (osm->n_local_true > 1 && !osm->is_local) {
247       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
248       for (i=0; i<osm->n_local_true; i++) {
249         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
250           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
251           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
252         } else {
253           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
254           osm->is_local[i] = osm->is[i];
255         }
256       }
257     }
258     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
259     flg  = PETSC_FALSE;
260     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
261     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
262 
263     if (osm->overlap > 0) {
264       /* Extend the "overlapping" regions by a number of steps */
265       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
266     }
267     if (osm->sort_indices) {
268       for (i=0; i<osm->n_local_true; i++) {
269         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
270         if (osm->is_local) {
271           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
272         }
273       }
274     }
275 
276     if (!osm->ksp) {
277       /* Create the local solvers */
278       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
279       if (domain_dm) {
280         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
281       }
282       for (i=0; i<osm->n_local_true; i++) {
283         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
284         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
285         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
286         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
287         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
288         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
289         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
290         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
291         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
292         if (domain_dm) {
293           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
294           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
295           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
296         }
297         osm->ksp[i] = ksp;
298       }
299       if (domain_dm) {
300         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
301       }
302     }
303     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
304       PetscInt m;
305 
306       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
307       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
308       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
309       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
310       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
311     }
312     scall = MAT_INITIAL_MATRIX;
313   } else {
314     /*
315        Destroy the blocks from the previous iteration
316     */
317     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
318       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
319       scall = MAT_INITIAL_MATRIX;
320     }
321   }
322 
323   /*
324      Extract out the submatrices
325   */
326   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
327   if (scall == MAT_INITIAL_MATRIX) {
328     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
329     for (i=0; i<osm->n_local_true; i++) {
330       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
331       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
332     }
333   }
334 
335   /* Convert the types of the submatrices (if needbe) */
336   if (osm->sub_mat_type) {
337     for (i=0; i<osm->n_local_true; i++) {
338       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
339     }
340   }
341 
342   if(!pc->setupcalled){
343     /* Create the local work vectors (from the local matrices) and scatter contexts */
344     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
345     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
346     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
347     if (osm->is_local && osm->type == PC_ASM_RESTRICT) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
348     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
349     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
350     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
351     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
352     for (i=0; i<osm->n_local_true; ++i) {
353       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
354       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
355       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
356       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
357       ierr = ISDestroy(&isl);CHKERRQ(ierr);
358       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
359       if (osm->localization) {
360         ISLocalToGlobalMapping ltog;
361         IS                     isll;
362         const PetscInt         *idx_local;
363         PetscInt               *idx,nout;
364 
365         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
366         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
367         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
368         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
369         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
370         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
371         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
372         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
373         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
374         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
375         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
376         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
377         ierr = ISDestroy(&isll);CHKERRQ(ierr);
378 
379         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
380         ierr = ISDestroy(&isl);CHKERRQ(ierr);
381       } else {
382         osm->y_local[i] = osm->y[i];
383         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
384         osm->prolongation[i] = osm->restriction[i];
385         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
386       }
387     }
388     for (i=osm->n_local_true; i<osm->n_local; i++) {
389       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
390       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
391       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
392       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
393       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
394       if (osm->localization) {
395         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
396         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
397       } else {
398         osm->prolongation[i] = osm->restriction[i];
399         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
400       }
401       ierr = ISDestroy(&isl);CHKERRQ(ierr);
402     }
403     ierr = VecDestroy(&vec);CHKERRQ(ierr);
404   }
405 
406   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
407     IS      *cis;
408     PetscInt c;
409 
410     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
411     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
412     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
413     ierr = PetscFree(cis);CHKERRQ(ierr);
414   }
415 
416   /* Return control to the user so that the submatrices can be modified (e.g., to apply
417      different boundary conditions for the submatrices than for the global problem) */
418   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
419 
420   /*
421      Loop over subdomains putting them into local ksp
422   */
423   for (i=0; i<osm->n_local_true; i++) {
424     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
425     if (!pc->setupcalled) {
426       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
427     }
428   }
429   PetscFunctionReturn(0);
430 }
431 
432 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
433 {
434   PC_ASM             *osm = (PC_ASM*)pc->data;
435   PetscErrorCode     ierr;
436   PetscInt           i;
437   KSPConvergedReason reason;
438 
439   PetscFunctionBegin;
440   for (i=0; i<osm->n_local_true; i++) {
441     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
442     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
443     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
444       pc->failedreason = PC_SUBPC_ERROR;
445     }
446   }
447   PetscFunctionReturn(0);
448 }
449 
450 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
451 {
452   PC_ASM         *osm = (PC_ASM*)pc->data;
453   PetscErrorCode ierr;
454   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
455   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
456 
457   PetscFunctionBegin;
458   /*
459      Support for limiting the restriction or interpolation to only local
460      subdomain values (leaving the other values 0).
461   */
462   if (!(osm->type & PC_ASM_RESTRICT)) {
463     forward = SCATTER_FORWARD_LOCAL;
464     /* have to zero the work RHS since scatter may leave some slots empty */
465     for (i=0; i<n_local_true; i++) {
466       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
467     }
468   }
469   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
470 
471   switch (osm->loctype)
472   {
473   case PC_COMPOSITE_ADDITIVE:
474     for (i=0; i<n_local; i++) {
475       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
476     }
477     ierr = VecZeroEntries(y);CHKERRQ(ierr);
478     /* do the local solves */
479     for (i=0; i<n_local_true; i++) {
480       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
481       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
482       if (osm->localization) {
483         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
484         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
485       }
486       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
487     }
488     /* handle the rest of the scatters that do not have local solves */
489     for (i=n_local_true; i<n_local; i++) {
490       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
491       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
492     }
493     for (i=0; i<n_local; i++) {
494       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
495     }
496     break;
497   case PC_COMPOSITE_MULTIPLICATIVE:
498     ierr = VecZeroEntries(y);CHKERRQ(ierr);
499     /* do the local solves */
500     for (i = 0; i < n_local_true; ++i) {
501       if (i > 0) {
502         /* Update rhs */
503         ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
504         ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
505       } else {
506         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
507       }
508       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
509       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
510       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
511       if (osm->localization) {
512         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
513         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
514       }
515       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
516       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
517       if (i < n_local_true-1) {
518         ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
519         ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
520         ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
521         ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr);
522         ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
523         ierr = VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
524         ierr = VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
525       }
526     }
527     /* handle the rest of the scatters that do not have local solves */
528     for (i = n_local_true; i < n_local; ++i) {
529       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
530       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
531       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
532       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
533     }
534     break;
535   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
536   }
537   PetscFunctionReturn(0);
538 }
539 
540 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
541 {
542   PC_ASM         *osm = (PC_ASM*)pc->data;
543   PetscErrorCode ierr;
544   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
545   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
546 
547   PetscFunctionBegin;
548   /*
549      Support for limiting the restriction or interpolation to only local
550      subdomain values (leaving the other values 0).
551 
552      Note: these are reversed from the PCApply_ASM() because we are applying the
553      transpose of the three terms
554   */
555   if (!(osm->type & PC_ASM_INTERPOLATE)) {
556     forward = SCATTER_FORWARD_LOCAL;
557     /* have to zero the work RHS since scatter may leave some slots empty */
558     for (i=0; i<n_local_true; i++) {
559       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
560     }
561   }
562   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
563 
564   for (i=0; i<n_local; i++) {
565     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
566   }
567   ierr = VecZeroEntries(y);CHKERRQ(ierr);
568   /* do the local solves */
569   for (i=0; i<n_local_true; i++) {
570     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
571     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
572     if (osm->localization) {
573       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
574       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
575     }
576     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
577   }
578   /* handle the rest of the scatters that do not have local solves */
579   for (i=n_local_true; i<n_local; i++) {
580     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
581     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
582   }
583   for (i=0; i<n_local; i++) {
584     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
585   }
586   PetscFunctionReturn(0);
587 }
588 
589 static PetscErrorCode PCReset_ASM(PC pc)
590 {
591   PC_ASM         *osm = (PC_ASM*)pc->data;
592   PetscErrorCode ierr;
593   PetscInt       i;
594 
595   PetscFunctionBegin;
596   if (osm->ksp) {
597     for (i=0; i<osm->n_local_true; i++) {
598       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
599     }
600   }
601   if (osm->pmat) {
602     if (osm->n_local_true > 0) {
603       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
604     }
605   }
606   if (osm->restriction) {
607     for (i=0; i<osm->n_local; i++) {
608       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
609       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
610       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
611       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
612       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
613       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
614     }
615     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
616     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
617     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
618     ierr = PetscFree(osm->x);CHKERRQ(ierr);
619     ierr = PetscFree(osm->y);CHKERRQ(ierr);
620     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
621   }
622   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
623   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
624     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
625     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
626     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
627     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
628   }
629 
630   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
631 
632   osm->is       = 0;
633   osm->is_local = 0;
634   PetscFunctionReturn(0);
635 }
636 
637 static PetscErrorCode PCDestroy_ASM(PC pc)
638 {
639   PC_ASM         *osm = (PC_ASM*)pc->data;
640   PetscErrorCode ierr;
641   PetscInt       i;
642 
643   PetscFunctionBegin;
644   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
645   if (osm->ksp) {
646     for (i=0; i<osm->n_local_true; i++) {
647       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
648     }
649     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
650   }
651   ierr = PetscFree(pc->data);CHKERRQ(ierr);
652   PetscFunctionReturn(0);
653 }
654 
655 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
656 {
657   PC_ASM         *osm = (PC_ASM*)pc->data;
658   PetscErrorCode ierr;
659   PetscInt       blocks,ovl;
660   PetscBool      symset,flg;
661   PCASMType      asmtype;
662   PCCompositeType loctype;
663   char           sub_mat_type[256];
664 
665   PetscFunctionBegin;
666   /* set the type to symmetric if matrix is symmetric */
667   if (!osm->type_set && pc->pmat) {
668     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
669     if (symset && flg) osm->type = PC_ASM_BASIC;
670   }
671   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
672   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
673   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
674   if (flg) {
675     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
676     osm->dm_subdomains = PETSC_FALSE;
677   }
678   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
679   if (flg) {
680     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
681     osm->dm_subdomains = PETSC_FALSE;
682   }
683   flg  = PETSC_FALSE;
684   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
685   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
686   flg  = PETSC_FALSE;
687   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
688   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
689   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
690   if(flg){
691     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
692   }
693   ierr = PetscOptionsTail();CHKERRQ(ierr);
694   PetscFunctionReturn(0);
695 }
696 
697 /*------------------------------------------------------------------------------------*/
698 
699 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
700 {
701   PC_ASM         *osm = (PC_ASM*)pc->data;
702   PetscErrorCode ierr;
703   PetscInt       i;
704 
705   PetscFunctionBegin;
706   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
707   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
708 
709   if (!pc->setupcalled) {
710     if (is) {
711       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
712     }
713     if (is_local) {
714       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
715     }
716     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
717 
718     osm->n_local_true = n;
719     osm->is           = 0;
720     osm->is_local     = 0;
721     if (is) {
722       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
723       for (i=0; i<n; i++) osm->is[i] = is[i];
724       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
725       osm->overlap = -1;
726     }
727     if (is_local) {
728       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
729       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
730       if (!is) {
731         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
732         for (i=0; i<osm->n_local_true; i++) {
733           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
734             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
735             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
736           } else {
737             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
738             osm->is[i] = osm->is_local[i];
739           }
740         }
741       }
742     }
743   }
744   PetscFunctionReturn(0);
745 }
746 
747 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
748 {
749   PC_ASM         *osm = (PC_ASM*)pc->data;
750   PetscErrorCode ierr;
751   PetscMPIInt    rank,size;
752   PetscInt       n;
753 
754   PetscFunctionBegin;
755   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
756   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
757 
758   /*
759      Split the subdomains equally among all processors
760   */
761   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
762   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
763   n    = N/size + ((N % size) > rank);
764   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
765   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
766   if (!pc->setupcalled) {
767     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
768 
769     osm->n_local_true = n;
770     osm->is           = 0;
771     osm->is_local     = 0;
772   }
773   PetscFunctionReturn(0);
774 }
775 
776 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
777 {
778   PC_ASM *osm = (PC_ASM*)pc->data;
779 
780   PetscFunctionBegin;
781   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
782   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
783   if (!pc->setupcalled) osm->overlap = ovl;
784   PetscFunctionReturn(0);
785 }
786 
787 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
788 {
789   PC_ASM *osm = (PC_ASM*)pc->data;
790 
791   PetscFunctionBegin;
792   osm->type     = type;
793   osm->type_set = PETSC_TRUE;
794   PetscFunctionReturn(0);
795 }
796 
797 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
798 {
799   PC_ASM *osm = (PC_ASM*)pc->data;
800 
801   PetscFunctionBegin;
802   *type = osm->type;
803   PetscFunctionReturn(0);
804 }
805 
806 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
807 {
808   PC_ASM *osm = (PC_ASM *) pc->data;
809 
810   PetscFunctionBegin;
811   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
812   osm->loctype = type;
813   PetscFunctionReturn(0);
814 }
815 
816 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
817 {
818   PC_ASM *osm = (PC_ASM *) pc->data;
819 
820   PetscFunctionBegin;
821   *type = osm->loctype;
822   PetscFunctionReturn(0);
823 }
824 
825 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
826 {
827   PC_ASM *osm = (PC_ASM*)pc->data;
828 
829   PetscFunctionBegin;
830   osm->sort_indices = doSort;
831   PetscFunctionReturn(0);
832 }
833 
834 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
835 {
836   PC_ASM         *osm = (PC_ASM*)pc->data;
837   PetscErrorCode ierr;
838 
839   PetscFunctionBegin;
840   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
841 
842   if (n_local) *n_local = osm->n_local_true;
843   if (first_local) {
844     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
845     *first_local -= osm->n_local_true;
846   }
847   if (ksp) {
848     /* Assume that local solves are now different; not necessarily
849        true though!  This flag is used only for PCView_ASM() */
850     *ksp                   = osm->ksp;
851     osm->same_local_solves = PETSC_FALSE;
852   }
853   PetscFunctionReturn(0);
854 }
855 
856 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
857 {
858   PC_ASM         *osm = (PC_ASM*)pc->data;
859 
860   PetscFunctionBegin;
861   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
862   PetscValidPointer(sub_mat_type,2);
863   *sub_mat_type = osm->sub_mat_type;
864   PetscFunctionReturn(0);
865 }
866 
867 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
868 {
869   PetscErrorCode    ierr;
870   PC_ASM            *osm = (PC_ASM*)pc->data;
871 
872   PetscFunctionBegin;
873   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
874   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
875   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
876   PetscFunctionReturn(0);
877 }
878 
879 /*@C
880     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
881 
882     Collective on PC
883 
884     Input Parameters:
885 +   pc - the preconditioner context
886 .   n - the number of subdomains for this processor (default value = 1)
887 .   is - the index set that defines the subdomains for this processor
888          (or NULL for PETSc to determine subdomains)
889 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
890          (or NULL to not provide these)
891 
892     Notes:
893     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
894 
895     By default the ASM preconditioner uses 1 block per processor.
896 
897     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
898 
899     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
900     back to form the global solution (this is the standard restricted additive Schwarz method)
901 
902     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
903     no code to handle that case.
904 
905     Level: advanced
906 
907 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
908 
909 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
910           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
911 @*/
912 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
913 {
914   PetscErrorCode ierr;
915 
916   PetscFunctionBegin;
917   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
918   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
919   PetscFunctionReturn(0);
920 }
921 
922 /*@C
923     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
924     additive Schwarz preconditioner.  Either all or no processors in the
925     PC communicator must call this routine, with the same index sets.
926 
927     Collective on PC
928 
929     Input Parameters:
930 +   pc - the preconditioner context
931 .   N  - the number of subdomains for all processors
932 .   is - the index sets that define the subdomains for all processors
933          (or NULL to ask PETSc to determine the subdomains)
934 -   is_local - the index sets that define the local part of the subdomains for this processor
935          (or NULL to not provide this information)
936 
937     Options Database Key:
938     To set the total number of subdomain blocks rather than specify the
939     index sets, use the option
940 .    -pc_asm_blocks <blks> - Sets total blocks
941 
942     Notes:
943     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
944 
945     By default the ASM preconditioner uses 1 block per processor.
946 
947     These index sets cannot be destroyed until after completion of the
948     linear solves for which the ASM preconditioner is being used.
949 
950     Use PCASMSetLocalSubdomains() to set local subdomains.
951 
952     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
953 
954     Level: advanced
955 
956 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
957 
958 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
959           PCASMCreateSubdomains2D()
960 @*/
961 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
962 {
963   PetscErrorCode ierr;
964 
965   PetscFunctionBegin;
966   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
967   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
968   PetscFunctionReturn(0);
969 }
970 
971 /*@
972     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
973     additive Schwarz preconditioner.  Either all or no processors in the
974     PC communicator must call this routine.
975 
976     Logically Collective on PC
977 
978     Input Parameters:
979 +   pc  - the preconditioner context
980 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
981 
982     Options Database Key:
983 .   -pc_asm_overlap <ovl> - Sets overlap
984 
985     Notes:
986     By default the ASM preconditioner uses 1 block per processor.  To use
987     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
988     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
989 
990     The overlap defaults to 1, so if one desires that no additional
991     overlap be computed beyond what may have been set with a call to
992     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
993     must be set to be 0.  In particular, if one does not explicitly set
994     the subdomains an application code, then all overlap would be computed
995     internally by PETSc, and using an overlap of 0 would result in an ASM
996     variant that is equivalent to the block Jacobi preconditioner.
997 
998     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
999     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1000 
1001     Note that one can define initial index sets with any overlap via
1002     PCASMSetLocalSubdomains(); the routine
1003     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1004     if desired.
1005 
1006     Level: intermediate
1007 
1008 .keywords: PC, ASM, set, overlap
1009 
1010 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1011           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1012 @*/
1013 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1014 {
1015   PetscErrorCode ierr;
1016 
1017   PetscFunctionBegin;
1018   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1019   PetscValidLogicalCollectiveInt(pc,ovl,2);
1020   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 /*@
1025     PCASMSetType - Sets the type of restriction and interpolation used
1026     for local problems in the additive Schwarz method.
1027 
1028     Logically Collective on PC
1029 
1030     Input Parameters:
1031 +   pc  - the preconditioner context
1032 -   type - variant of ASM, one of
1033 .vb
1034       PC_ASM_BASIC       - full interpolation and restriction
1035       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1036       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1037       PC_ASM_NONE        - local processor restriction and interpolation
1038 .ve
1039 
1040     Options Database Key:
1041 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1042 
1043     Notes: if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1044     to limit the local processor interpolation
1045 
1046     Level: intermediate
1047 
1048 .keywords: PC, ASM, set, type
1049 
1050 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1051           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1052 @*/
1053 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1054 {
1055   PetscErrorCode ierr;
1056 
1057   PetscFunctionBegin;
1058   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1059   PetscValidLogicalCollectiveEnum(pc,type,2);
1060   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 /*@
1065     PCASMGetType - Gets the type of restriction and interpolation used
1066     for local problems in the additive Schwarz method.
1067 
1068     Logically Collective on PC
1069 
1070     Input Parameter:
1071 .   pc  - the preconditioner context
1072 
1073     Output Parameter:
1074 .   type - variant of ASM, one of
1075 
1076 .vb
1077       PC_ASM_BASIC       - full interpolation and restriction
1078       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1079       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1080       PC_ASM_NONE        - local processor restriction and interpolation
1081 .ve
1082 
1083     Options Database Key:
1084 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1085 
1086     Level: intermediate
1087 
1088 .keywords: PC, ASM, set, type
1089 
1090 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1091           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1092 @*/
1093 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1094 {
1095   PetscErrorCode ierr;
1096 
1097   PetscFunctionBegin;
1098   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1099   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 /*@
1104   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1105 
1106   Logically Collective on PC
1107 
1108   Input Parameters:
1109 + pc  - the preconditioner context
1110 - type - type of composition, one of
1111 .vb
1112   PC_COMPOSITE_ADDITIVE       - local additive combination
1113   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1114 .ve
1115 
1116   Options Database Key:
1117 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1118 
1119   Level: intermediate
1120 
1121 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1122 @*/
1123 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1124 {
1125   PetscErrorCode ierr;
1126 
1127   PetscFunctionBegin;
1128   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1129   PetscValidLogicalCollectiveEnum(pc, type, 2);
1130   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1131   PetscFunctionReturn(0);
1132 }
1133 
1134 /*@
1135   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1136 
1137   Logically Collective on PC
1138 
1139   Input Parameter:
1140 . pc  - the preconditioner context
1141 
1142   Output Parameter:
1143 . type - type of composition, one of
1144 .vb
1145   PC_COMPOSITE_ADDITIVE       - local additive combination
1146   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1147 .ve
1148 
1149   Options Database Key:
1150 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1151 
1152   Level: intermediate
1153 
1154 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1155 @*/
1156 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1157 {
1158   PetscErrorCode ierr;
1159 
1160   PetscFunctionBegin;
1161   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1162   PetscValidPointer(type, 2);
1163   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1164   PetscFunctionReturn(0);
1165 }
1166 
1167 /*@
1168     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1169 
1170     Logically Collective on PC
1171 
1172     Input Parameters:
1173 +   pc  - the preconditioner context
1174 -   doSort - sort the subdomain indices
1175 
1176     Level: intermediate
1177 
1178 .keywords: PC, ASM, set, type
1179 
1180 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1181           PCASMCreateSubdomains2D()
1182 @*/
1183 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1184 {
1185   PetscErrorCode ierr;
1186 
1187   PetscFunctionBegin;
1188   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1189   PetscValidLogicalCollectiveBool(pc,doSort,2);
1190   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1191   PetscFunctionReturn(0);
1192 }
1193 
1194 /*@C
1195    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1196    this processor.
1197 
1198    Collective on PC iff first_local is requested
1199 
1200    Input Parameter:
1201 .  pc - the preconditioner context
1202 
1203    Output Parameters:
1204 +  n_local - the number of blocks on this processor or NULL
1205 .  first_local - the global number of the first block on this processor or NULL,
1206                  all processors must request or all must pass NULL
1207 -  ksp - the array of KSP contexts
1208 
1209    Note:
1210    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1211 
1212    You must call KSPSetUp() before calling PCASMGetSubKSP().
1213 
1214    Fortran note:
1215    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1216 
1217    Level: advanced
1218 
1219 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1220 
1221 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1222           PCASMCreateSubdomains2D(),
1223 @*/
1224 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1225 {
1226   PetscErrorCode ierr;
1227 
1228   PetscFunctionBegin;
1229   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1230   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1231   PetscFunctionReturn(0);
1232 }
1233 
1234 /* -------------------------------------------------------------------------------------*/
1235 /*MC
1236    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1237            its own KSP object.
1238 
1239    Options Database Keys:
1240 +  -pc_asm_blocks <blks> - Sets total blocks
1241 .  -pc_asm_overlap <ovl> - Sets overlap
1242 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1243 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1244 
1245      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1246       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1247       -pc_asm_type basic to use the standard ASM.
1248 
1249    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1250      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1251 
1252      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1253         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1254 
1255      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1256          and set the options directly on the resulting KSP object (you can access its PC
1257          with KSPGetPC())
1258 
1259    Level: beginner
1260 
1261    Concepts: additive Schwarz method
1262 
1263     References:
1264 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1265      Courant Institute, New York University Technical report
1266 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1267     Cambridge University Press.
1268 
1269 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1270            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1271            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1272 
1273 M*/
1274 
1275 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1276 {
1277   PetscErrorCode ierr;
1278   PC_ASM         *osm;
1279 
1280   PetscFunctionBegin;
1281   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1282 
1283   osm->n                 = PETSC_DECIDE;
1284   osm->n_local           = 0;
1285   osm->n_local_true      = PETSC_DECIDE;
1286   osm->overlap           = 1;
1287   osm->ksp               = 0;
1288   osm->restriction       = 0;
1289   osm->localization      = 0;
1290   osm->prolongation      = 0;
1291   osm->x                 = 0;
1292   osm->y                 = 0;
1293   osm->y_local           = 0;
1294   osm->is                = 0;
1295   osm->is_local          = 0;
1296   osm->mat               = 0;
1297   osm->pmat              = 0;
1298   osm->type              = PC_ASM_RESTRICT;
1299   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1300   osm->same_local_solves = PETSC_TRUE;
1301   osm->sort_indices      = PETSC_TRUE;
1302   osm->dm_subdomains     = PETSC_FALSE;
1303   osm->sub_mat_type      = NULL;
1304 
1305   pc->data                 = (void*)osm;
1306   pc->ops->apply           = PCApply_ASM;
1307   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1308   pc->ops->setup           = PCSetUp_ASM;
1309   pc->ops->reset           = PCReset_ASM;
1310   pc->ops->destroy         = PCDestroy_ASM;
1311   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1312   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1313   pc->ops->view            = PCView_ASM;
1314   pc->ops->applyrichardson = 0;
1315 
1316   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1317   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1318   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1319   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1320   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1321   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1322   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1323   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1324   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1325   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1326   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1327   PetscFunctionReturn(0);
1328 }
1329 
1330 /*@C
1331    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1332    preconditioner for a any problem on a general grid.
1333 
1334    Collective
1335 
1336    Input Parameters:
1337 +  A - The global matrix operator
1338 -  n - the number of local blocks
1339 
1340    Output Parameters:
1341 .  outis - the array of index sets defining the subdomains
1342 
1343    Level: advanced
1344 
1345    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1346     from these if you use PCASMSetLocalSubdomains()
1347 
1348     In the Fortran version you must provide the array outis[] already allocated of length n.
1349 
1350 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1351 
1352 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1353 @*/
1354 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1355 {
1356   MatPartitioning mpart;
1357   const char      *prefix;
1358   void            (*f)(void);
1359   PetscInt        i,j,rstart,rend,bs;
1360   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1361   Mat             Ad     = NULL, adj;
1362   IS              ispart,isnumb,*is;
1363   PetscErrorCode  ierr;
1364 
1365   PetscFunctionBegin;
1366   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1367   PetscValidPointer(outis,3);
1368   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1369 
1370   /* Get prefix, row distribution, and block size */
1371   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1372   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1373   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1374   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1375 
1376   /* Get diagonal block from matrix if possible */
1377   ierr = MatShellGetOperation(A,MATOP_GET_DIAGONAL_BLOCK,&f);CHKERRQ(ierr);
1378   if (f) {
1379     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1380   }
1381   if (Ad) {
1382     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1383     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1384   }
1385   if (Ad && n > 1) {
1386     PetscBool match,done;
1387     /* Try to setup a good matrix partitioning if available */
1388     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1389     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1390     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1391     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1392     if (!match) {
1393       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1394     }
1395     if (!match) { /* assume a "good" partitioner is available */
1396       PetscInt       na;
1397       const PetscInt *ia,*ja;
1398       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1399       if (done) {
1400         /* Build adjacency matrix by hand. Unfortunately a call to
1401            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1402            remove the block-aij structure and we cannot expect
1403            MatPartitioning to split vertices as we need */
1404         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1405         const PetscInt *row;
1406         nnz = 0;
1407         for (i=0; i<na; i++) { /* count number of nonzeros */
1408           len = ia[i+1] - ia[i];
1409           row = ja + ia[i];
1410           for (j=0; j<len; j++) {
1411             if (row[j] == i) { /* don't count diagonal */
1412               len--; break;
1413             }
1414           }
1415           nnz += len;
1416         }
1417         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1418         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1419         nnz    = 0;
1420         iia[0] = 0;
1421         for (i=0; i<na; i++) { /* fill adjacency */
1422           cnt = 0;
1423           len = ia[i+1] - ia[i];
1424           row = ja + ia[i];
1425           for (j=0; j<len; j++) {
1426             if (row[j] != i) { /* if not diagonal */
1427               jja[nnz+cnt++] = row[j];
1428             }
1429           }
1430           nnz     += cnt;
1431           iia[i+1] = nnz;
1432         }
1433         /* Partitioning of the adjacency matrix */
1434         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1435         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1436         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1437         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1438         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1439         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1440         foundpart = PETSC_TRUE;
1441       }
1442       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1443     }
1444     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1445   }
1446 
1447   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1448   *outis = is;
1449 
1450   if (!foundpart) {
1451 
1452     /* Partitioning by contiguous chunks of rows */
1453 
1454     PetscInt mbs   = (rend-rstart)/bs;
1455     PetscInt start = rstart;
1456     for (i=0; i<n; i++) {
1457       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1458       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1459       start += count;
1460     }
1461 
1462   } else {
1463 
1464     /* Partitioning by adjacency of diagonal block  */
1465 
1466     const PetscInt *numbering;
1467     PetscInt       *count,nidx,*indices,*newidx,start=0;
1468     /* Get node count in each partition */
1469     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1470     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1471     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1472       for (i=0; i<n; i++) count[i] *= bs;
1473     }
1474     /* Build indices from node numbering */
1475     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1476     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1477     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1478     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1479     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1480     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1481     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1482       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1483       for (i=0; i<nidx; i++) {
1484         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1485       }
1486       ierr    = PetscFree(indices);CHKERRQ(ierr);
1487       nidx   *= bs;
1488       indices = newidx;
1489     }
1490     /* Shift to get global indices */
1491     for (i=0; i<nidx; i++) indices[i] += rstart;
1492 
1493     /* Build the index sets for each block */
1494     for (i=0; i<n; i++) {
1495       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1496       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1497       start += count[i];
1498     }
1499 
1500     ierr = PetscFree(count);CHKERRQ(ierr);
1501     ierr = PetscFree(indices);CHKERRQ(ierr);
1502     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1503     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1504 
1505   }
1506   PetscFunctionReturn(0);
1507 }
1508 
1509 /*@C
1510    PCASMDestroySubdomains - Destroys the index sets created with
1511    PCASMCreateSubdomains(). Should be called after setting subdomains
1512    with PCASMSetLocalSubdomains().
1513 
1514    Collective
1515 
1516    Input Parameters:
1517 +  n - the number of index sets
1518 .  is - the array of index sets
1519 -  is_local - the array of local index sets, can be NULL
1520 
1521    Level: advanced
1522 
1523 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1524 
1525 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1526 @*/
1527 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1528 {
1529   PetscInt       i;
1530   PetscErrorCode ierr;
1531 
1532   PetscFunctionBegin;
1533   if (n <= 0) PetscFunctionReturn(0);
1534   if (is) {
1535     PetscValidPointer(is,2);
1536     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1537     ierr = PetscFree(is);CHKERRQ(ierr);
1538   }
1539   if (is_local) {
1540     PetscValidPointer(is_local,3);
1541     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1542     ierr = PetscFree(is_local);CHKERRQ(ierr);
1543   }
1544   PetscFunctionReturn(0);
1545 }
1546 
1547 /*@
1548    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1549    preconditioner for a two-dimensional problem on a regular grid.
1550 
1551    Not Collective
1552 
1553    Input Parameters:
1554 +  m, n - the number of mesh points in the x and y directions
1555 .  M, N - the number of subdomains in the x and y directions
1556 .  dof - degrees of freedom per node
1557 -  overlap - overlap in mesh lines
1558 
1559    Output Parameters:
1560 +  Nsub - the number of subdomains created
1561 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1562 -  is_local - array of index sets defining non-overlapping subdomains
1563 
1564    Note:
1565    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1566    preconditioners.  More general related routines are
1567    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1568 
1569    Level: advanced
1570 
1571 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1572 
1573 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1574           PCASMSetOverlap()
1575 @*/
1576 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1577 {
1578   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1579   PetscErrorCode ierr;
1580   PetscInt       nidx,*idx,loc,ii,jj,count;
1581 
1582   PetscFunctionBegin;
1583   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1584 
1585   *Nsub     = N*M;
1586   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1587   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1588   ystart    = 0;
1589   loc_outer = 0;
1590   for (i=0; i<N; i++) {
1591     height = n/N + ((n % N) > i); /* height of subdomain */
1592     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1593     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1594     yright = ystart + height + overlap; if (yright > n) yright = n;
1595     xstart = 0;
1596     for (j=0; j<M; j++) {
1597       width = m/M + ((m % M) > j); /* width of subdomain */
1598       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1599       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1600       xright = xstart + width + overlap; if (xright > m) xright = m;
1601       nidx   = (xright - xleft)*(yright - yleft);
1602       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1603       loc    = 0;
1604       for (ii=yleft; ii<yright; ii++) {
1605         count = m*ii + xleft;
1606         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1607       }
1608       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1609       if (overlap == 0) {
1610         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1611 
1612         (*is_local)[loc_outer] = (*is)[loc_outer];
1613       } else {
1614         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1615           for (jj=xstart; jj<xstart+width; jj++) {
1616             idx[loc++] = m*ii + jj;
1617           }
1618         }
1619         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1620       }
1621       ierr    = PetscFree(idx);CHKERRQ(ierr);
1622       xstart += width;
1623       loc_outer++;
1624     }
1625     ystart += height;
1626   }
1627   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1628   PetscFunctionReturn(0);
1629 }
1630 
1631 /*@C
1632     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1633     only) for the additive Schwarz preconditioner.
1634 
1635     Not Collective
1636 
1637     Input Parameter:
1638 .   pc - the preconditioner context
1639 
1640     Output Parameters:
1641 +   n - the number of subdomains for this processor (default value = 1)
1642 .   is - the index sets that define the subdomains for this processor
1643 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1644 
1645 
1646     Notes:
1647     The IS numbering is in the parallel, global numbering of the vector.
1648 
1649     Level: advanced
1650 
1651 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1652 
1653 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1654           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1655 @*/
1656 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1657 {
1658   PC_ASM         *osm = (PC_ASM*)pc->data;
1659   PetscErrorCode ierr;
1660   PetscBool      match;
1661 
1662   PetscFunctionBegin;
1663   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1664   PetscValidIntPointer(n,2);
1665   if (is) PetscValidPointer(is,3);
1666   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1667   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1668   if (n) *n = osm->n_local_true;
1669   if (is) *is = osm->is;
1670   if (is_local) *is_local = osm->is_local;
1671   PetscFunctionReturn(0);
1672 }
1673 
1674 /*@C
1675     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1676     only) for the additive Schwarz preconditioner.
1677 
1678     Not Collective
1679 
1680     Input Parameter:
1681 .   pc - the preconditioner context
1682 
1683     Output Parameters:
1684 +   n - the number of matrices for this processor (default value = 1)
1685 -   mat - the matrices
1686 
1687     Level: advanced
1688 
1689     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1690 
1691            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1692 
1693 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1694 
1695 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1696           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1697 @*/
1698 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1699 {
1700   PC_ASM         *osm;
1701   PetscErrorCode ierr;
1702   PetscBool      match;
1703 
1704   PetscFunctionBegin;
1705   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1706   PetscValidIntPointer(n,2);
1707   if (mat) PetscValidPointer(mat,3);
1708   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1709   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1710   if (!match) {
1711     if (n) *n = 0;
1712     if (mat) *mat = NULL;
1713   } else {
1714     osm = (PC_ASM*)pc->data;
1715     if (n) *n = osm->n_local_true;
1716     if (mat) *mat = osm->pmat;
1717   }
1718   PetscFunctionReturn(0);
1719 }
1720 
1721 /*@
1722     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1723 
1724     Logically Collective
1725 
1726     Input Parameter:
1727 +   pc  - the preconditioner
1728 -   flg - boolean indicating whether to use subdomains defined by the DM
1729 
1730     Options Database Key:
1731 .   -pc_asm_dm_subdomains
1732 
1733     Level: intermediate
1734 
1735     Notes:
1736     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1737     so setting either of the first two effectively turns the latter off.
1738 
1739 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1740 
1741 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1742           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1743 @*/
1744 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1745 {
1746   PC_ASM         *osm = (PC_ASM*)pc->data;
1747   PetscErrorCode ierr;
1748   PetscBool      match;
1749 
1750   PetscFunctionBegin;
1751   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1752   PetscValidLogicalCollectiveBool(pc,flg,2);
1753   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1754   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1755   if (match) {
1756     osm->dm_subdomains = flg;
1757   }
1758   PetscFunctionReturn(0);
1759 }
1760 
1761 /*@
1762     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1763     Not Collective
1764 
1765     Input Parameter:
1766 .   pc  - the preconditioner
1767 
1768     Output Parameter:
1769 .   flg - boolean indicating whether to use subdomains defined by the DM
1770 
1771     Level: intermediate
1772 
1773 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1774 
1775 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1776           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1777 @*/
1778 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1779 {
1780   PC_ASM         *osm = (PC_ASM*)pc->data;
1781   PetscErrorCode ierr;
1782   PetscBool      match;
1783 
1784   PetscFunctionBegin;
1785   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1786   PetscValidPointer(flg,2);
1787   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1788   if (match) {
1789     if (flg) *flg = osm->dm_subdomains;
1790   }
1791   PetscFunctionReturn(0);
1792 }
1793 
1794 /*@
1795      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1796 
1797    Not Collective
1798 
1799    Input Parameter:
1800 .  pc - the PC
1801 
1802    Output Parameter:
1803 .  -pc_asm_sub_mat_type - name of matrix type
1804 
1805    Level: advanced
1806 
1807 .keywords: PC, PCASM, MatType, set
1808 
1809 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1810 @*/
1811 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1812   PetscErrorCode ierr;
1813 
1814   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1815   PetscFunctionReturn(0);
1816 }
1817 
1818 /*@
1819      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1820 
1821    Collective on Mat
1822 
1823    Input Parameters:
1824 +  pc             - the PC object
1825 -  sub_mat_type   - matrix type
1826 
1827    Options Database Key:
1828 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1829 
1830    Notes:
1831    See "${PETSC_DIR}/include/petscmat.h" for available types
1832 
1833   Level: advanced
1834 
1835 .keywords: PC, PCASM, MatType, set
1836 
1837 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1838 @*/
1839 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1840 {
1841   PetscErrorCode ierr;
1842 
1843   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1844   PetscFunctionReturn(0);
1845 }
1846