xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 9c22d89a4de876f5bef5a7ea72e4e758119447fc)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter restriction;         /* mapping from global to overlapping (process) subdomain*/
21   VecScatter *lrestriction;       /* mapping from subregion to overlapping (process) subdomain */
22   VecScatter *lprolongation;      /* mapping from non-overlapping subregion to overlapping (process) subdomain; used for restrict additive version of algorithms */
23   Vec        lx, ly;              /* work vectors */
24   Vec        *x,*y;  		  /* work vectors */
25   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
26   IS         *is;                 /* index set that defines each overlapping subdomain */
27   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
28   Mat        *mat,*pmat;          /* mat is not currently used */
29   PCASMType  type;                /* use reduced interpolation, restriction or both */
30   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
31   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
32   PetscBool  sort_indices;        /* flag to sort subdomain indices */
33   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
34   PCCompositeType loctype;        /* the type of composition for local solves */
35   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
36   /* For multiplicative solve */
37   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
38 } PC_ASM;
39 
40 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
41 {
42   PC_ASM         *osm = (PC_ASM*)pc->data;
43   PetscErrorCode ierr;
44   PetscMPIInt    rank;
45   PetscInt       i,bsz;
46   PetscBool      iascii,isstring;
47   PetscViewer    sviewer;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
52   if (iascii) {
53     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
54     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
55     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
56     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
57     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
58     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
59     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
60     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
61     if (osm->same_local_solves) {
62       if (osm->ksp) {
63         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
64         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
65         if (!rank) {
66           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
68           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
69         }
70         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71       }
72     } else {
73       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
75       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
80       for (i=0; i<osm->n_local_true; i++) {
81         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
82         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
83         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
85       }
86       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
87       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
90     }
91   } else if (isstring) {
92     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
93     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
94     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
95     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96   }
97   PetscFunctionReturn(0);
98 }
99 
100 static PetscErrorCode PCASMPrintSubdomains(PC pc)
101 {
102   PC_ASM         *osm = (PC_ASM*)pc->data;
103   const char     *prefix;
104   char           fname[PETSC_MAX_PATH_LEN+1];
105   PetscViewer    viewer, sviewer;
106   char           *s;
107   PetscInt       i,j,nidx;
108   const PetscInt *idx;
109   PetscMPIInt    rank, size;
110   PetscErrorCode ierr;
111 
112   PetscFunctionBegin;
113   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
114   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
115   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
116   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
117   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
118   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
119   for (i=0; i<osm->n_local; i++) {
120     if (i < osm->n_local_true) {
121       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
123       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
124       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
125       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
126       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
127       for (j=0; j<nidx; j++) {
128         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
129       }
130       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
131       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
132       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
133       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
135       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
137       ierr = PetscFree(s);CHKERRQ(ierr);
138       if (osm->is_local) {
139         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
140         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
141         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
142         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
143         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
144         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
145         for (j=0; j<nidx; j++) {
146           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
147         }
148         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
149         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
150         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
151         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
152         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
153         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
155         ierr = PetscFree(s);CHKERRQ(ierr);
156       }
157     } else {
158       /* Participate in collective viewer calls. */
159       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
160       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
161       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
162       /* Assume either all ranks have is_local or none do. */
163       if (osm->is_local) {
164         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
165         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
166         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
167       }
168     }
169   }
170   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
171   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
172   PetscFunctionReturn(0);
173 }
174 
175 static PetscErrorCode PCSetUp_ASM(PC pc)
176 {
177   PC_ASM         *osm = (PC_ASM*)pc->data;
178   PetscErrorCode ierr;
179   PetscBool      symset,flg;
180   PetscInt       i,m,m_local;
181   MatReuse       scall = MAT_REUSE_MATRIX;
182   IS             isl;
183   KSP            ksp;
184   PC             subpc;
185   const char     *prefix,*pprefix;
186   Vec            vec;
187   DM             *domain_dm = NULL;
188 
189   PetscFunctionBegin;
190   if (!pc->setupcalled) {
191     PetscInt m;
192 
193     if (!osm->type_set) {
194       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
195       if (symset && flg) osm->type = PC_ASM_BASIC;
196     }
197 
198     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
199     if (osm->n_local_true == PETSC_DECIDE) {
200       /* no subdomains given */
201       /* try pc->dm first, if allowed */
202       if (osm->dm_subdomains && pc->dm) {
203         PetscInt  num_domains, d;
204         char      **domain_names;
205         IS        *inner_domain_is, *outer_domain_is;
206         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
207         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
208                               A future improvement of this code might allow one to use
209                               DM-defined subdomains and also increase the overlap,
210                               but that is not currently supported */
211         if (num_domains) {
212           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
213         }
214         for (d = 0; d < num_domains; ++d) {
215           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
216           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
217           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
218         }
219         ierr = PetscFree(domain_names);CHKERRQ(ierr);
220         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
221         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
222       }
223       if (osm->n_local_true == PETSC_DECIDE) {
224         /* still no subdomains; use one subdomain per processor */
225         osm->n_local_true = 1;
226       }
227     }
228     { /* determine the global and max number of subdomains */
229       struct {PetscInt max,sum;} inwork,outwork;
230       PetscMPIInt size;
231 
232       inwork.max   = osm->n_local_true;
233       inwork.sum   = osm->n_local_true;
234       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
235       osm->n_local = outwork.max;
236       osm->n       = outwork.sum;
237 
238       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
239       if (outwork.max == 1 && outwork.sum == size) {
240         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
241         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
242       }
243     }
244     if (!osm->is) { /* create the index sets */
245       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
246     }
247     if (osm->n_local_true > 1 && !osm->is_local) {
248       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
249       for (i=0; i<osm->n_local_true; i++) {
250         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
251           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
252           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
253         } else {
254           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
255           osm->is_local[i] = osm->is[i];
256         }
257       }
258     }
259     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
260     flg  = PETSC_FALSE;
261     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
262     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
263 
264     if (osm->overlap > 0) {
265       /* Extend the "overlapping" regions by a number of steps */
266       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
267     }
268     if (osm->sort_indices) {
269       for (i=0; i<osm->n_local_true; i++) {
270         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
271         if (osm->is_local) {
272           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
273         }
274       }
275     }
276 
277     if (!osm->ksp) {
278       /* Create the local solvers */
279       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
280       if (domain_dm) {
281         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
282       }
283       for (i=0; i<osm->n_local_true; i++) {
284         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
285         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
286         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
287         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
288         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
289         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
290         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
291         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
292         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
293         if (domain_dm) {
294           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
295           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
296           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
297         }
298         osm->ksp[i] = ksp;
299       }
300       if (domain_dm) {
301         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
302       }
303     }
304 
305     ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
306     ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
307     ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
308     ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
309     ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
310 
311     scall = MAT_INITIAL_MATRIX;
312   } else {
313     /*
314        Destroy the blocks from the previous iteration
315     */
316     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
317       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
318       scall = MAT_INITIAL_MATRIX;
319     }
320   }
321 
322   /*
323      Extract out the submatrices
324   */
325   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
326   if (scall == MAT_INITIAL_MATRIX) {
327     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
328     for (i=0; i<osm->n_local_true; i++) {
329       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
330       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
331     }
332   }
333 
334   /* Convert the types of the submatrices (if needbe) */
335   if (osm->sub_mat_type) {
336     for (i=0; i<osm->n_local_true; i++) {
337       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
338     }
339   }
340 
341   if(!pc->setupcalled){
342     /* Create the local work vectors (from the local matrices) and scatter contexts */
343     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
344 
345     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
346     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
347       ierr = PetscMalloc1(osm->n_local_true,&osm->lprolongation);CHKERRQ(ierr);
348     }
349     ierr = PetscMalloc1(osm->n_local_true,&osm->lrestriction);CHKERRQ(ierr);
350     ierr = PetscMalloc1(osm->n_local_true,&osm->x);CHKERRQ(ierr);
351     ierr = PetscMalloc1(osm->n_local_true,&osm->y);CHKERRQ(ierr);
352 
353     ierr = ISGetLocalSize(osm->lis,&m);CHKERRQ(ierr);
354     ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
355     ierr = VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);CHKERRQ(ierr);
356     ierr = ISDestroy(&isl);CHKERRQ(ierr);
357 
358 
359     for (i=0; i<osm->n_local_true; ++i) {
360       ISLocalToGlobalMapping ltog;
361       IS                     isll;
362       const PetscInt         *idx_is;
363       PetscInt               *idx_lis,nout;
364 
365       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
366       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
367       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
368 
369       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
370       ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
371       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
372       ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
373       ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
374       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
375       if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
376       ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
377       ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
378       ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
379       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
380       ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);CHKERRQ(ierr);
381       ierr = ISDestroy(&isll);CHKERRQ(ierr);
382       ierr = ISDestroy(&isl);CHKERRQ(ierr);
383       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overalapping is_local[i] entries */
384 	ISLocalToGlobalMapping ltog;
385         IS                     isll,isll_local;
386         const PetscInt         *idx_local;
387         PetscInt               *idx1, *idx2, nout;
388 
389         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
390         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
391 
392 	ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
393 	ierr = PetscMalloc1(m_local,&idx1);CHKERRQ(ierr);
394 	ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);CHKERRQ(ierr);
395         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
396         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
397         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
398 
399 	ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
400 	ierr = PetscMalloc1(m_local,&idx2);CHKERRQ(ierr);
401       	ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);CHKERRQ(ierr);
402         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
403         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
404         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);CHKERRQ(ierr);
405 
406 	ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
407 	ierr = VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);CHKERRQ(ierr);
408 
409         ierr = ISDestroy(&isll);CHKERRQ(ierr);
410 	ierr = ISDestroy(&isll_local);CHKERRQ(ierr);
411       }
412     }
413     ierr = VecDestroy(&vec);CHKERRQ(ierr);
414   }
415 
416   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
417     IS      *cis;
418     PetscInt c;
419 
420     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
421     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
422     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
423     ierr = PetscFree(cis);CHKERRQ(ierr);
424   }
425 
426   /* Return control to the user so that the submatrices can be modified (e.g., to apply
427      different boundary conditions for the submatrices than for the global problem) */
428   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
429 
430   /*
431      Loop over subdomains putting them into local ksp
432   */
433   for (i=0; i<osm->n_local_true; i++) {
434     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
435     if (!pc->setupcalled) {
436       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
437     }
438   }
439   PetscFunctionReturn(0);
440 }
441 
442 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
443 {
444   PC_ASM             *osm = (PC_ASM*)pc->data;
445   PetscErrorCode     ierr;
446   PetscInt           i;
447   KSPConvergedReason reason;
448 
449   PetscFunctionBegin;
450   for (i=0; i<osm->n_local_true; i++) {
451     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
452     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
453     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
454       pc->failedreason = PC_SUBPC_ERROR;
455     }
456   }
457   PetscFunctionReturn(0);
458 }
459 
460 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
461 {
462   PC_ASM         *osm = (PC_ASM*)pc->data;
463   PetscErrorCode ierr;
464   PetscInt       i,n_local_true = osm->n_local_true;
465   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
466 
467   PetscFunctionBegin;
468   /*
469      Support for limiting the restriction or interpolation to only local
470      subdomain values (leaving the other values 0).
471   */
472   if (!(osm->type & PC_ASM_RESTRICT)) {
473     forward = SCATTER_FORWARD_LOCAL;
474     /* have to zero the work RHS since scatter may leave some slots empty */
475     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
476   }
477   if (!(osm->type & PC_ASM_INTERPOLATE)) {
478     reverse = SCATTER_REVERSE_LOCAL;
479   }
480 
481   if(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE){
482     /* zero the global and the local solutions */
483     ierr = VecZeroEntries(y);CHKERRQ(ierr);
484     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
485 
486     /* Copy the global RHS to local RHS including the ghost nodes */
487     ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
488     ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
489 
490     /* Restrict local RHS to the overlapping 0-block RHS */
491     ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
492     ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
493 
494     /* do the local solves */
495     for (i = 0; i < n_local_true; ++i) {
496 
497       /* solve the overlapping i-block */
498       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
499 
500       if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution (only for restrictive additive) */
501  	ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
502  	ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
503       }
504       else{ /* interpolate the overalapping i-block solution to the local solution */
505         ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
506 	ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
507       }
508 
509       if (i < n_local_true-1) {
510 	/* Restrict local RHS to the overlapping (i+1)-block RHS */
511 	ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
512 	ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
513 
514 	if ( osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
515 	  /* udpdate the overlapping (i+1)-block RHS using the current local solution */
516 	  ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
517 	  ierr = VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]); CHKERRQ(ierr);
518 	}
519       }
520     }
521     /* Add the local solution to the global solution including the ghost nodes */
522     ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
523     ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
524   }else{
525     SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
526   }
527   PetscFunctionReturn(0);
528 }
529 
530 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
531 {
532   PC_ASM         *osm = (PC_ASM*)pc->data;
533   PetscErrorCode ierr;
534   PetscInt       i,n_local_true = osm->n_local_true;
535   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
536 
537   PetscFunctionBegin;
538   /*
539      Support for limiting the restriction or interpolation to only local
540      subdomain values (leaving the other values 0).
541 
542      Note: these are reversed from the PCApply_ASM() because we are applying the
543      transpose of the three terms
544   */
545 
546   if (!(osm->type & PC_ASM_INTERPOLATE)) {
547     forward = SCATTER_FORWARD_LOCAL;
548     /* have to zero the work RHS since scatter may leave some slots empty */
549     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
550   }
551   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
552 
553   /* zero the global and the local solutions */
554   ierr = VecZeroEntries(y);CHKERRQ(ierr);
555   ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
556 
557   /* Copy the global RHS to local RHS including the ghost nodes */
558   ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
559   ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
560 
561   /* Restrict local RHS to the overlapping 0-block RHS */
562   ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
563   ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
564 
565   /* do the local solves */
566   for (i = 0; i < n_local_true; ++i) {
567 
568     /* solve the overlapping i-block */
569     ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
570 
571     if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution */
572      ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
573      ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
574     }
575     else{ /* interpolate the overalapping i-block solution to the local solution */
576       ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
577       ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
578     }
579 
580     if (i < n_local_true-1) {
581       /* Restrict local RHS to the overlapping (i+1)-block RHS */
582       ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
583       ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
584     }
585   }
586   /* Add the local solution to the global solution including the ghost nodes */
587   ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
588   ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
589 
590   PetscFunctionReturn(0);
591 
592 }
593 
594 static PetscErrorCode PCReset_ASM(PC pc)
595 {
596   PC_ASM         *osm = (PC_ASM*)pc->data;
597   PetscErrorCode ierr;
598   PetscInt       i;
599 
600   PetscFunctionBegin;
601   if (osm->ksp) {
602     for (i=0; i<osm->n_local_true; i++) {
603       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
604     }
605   }
606   if (osm->pmat) {
607     if (osm->n_local_true > 0) {
608       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
609     }
610   }
611   if (osm->lrestriction) {
612     ierr = VecScatterDestroy(&osm->restriction);CHKERRQ(ierr);
613     for (i=0; i<osm->n_local_true; i++) {
614       ierr = VecScatterDestroy(&osm->lrestriction[i]);CHKERRQ(ierr);
615       if (osm->lprolongation) {ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);}
616       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
617       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
618     }
619     ierr = PetscFree(osm->lrestriction);CHKERRQ(ierr);
620     if (osm->lprolongation) {ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);}
621     ierr = PetscFree(osm->x);CHKERRQ(ierr);
622     ierr = PetscFree(osm->y);CHKERRQ(ierr);
623 
624   }
625   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
626   ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
627   ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
628   ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
629   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
630     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
631   }
632 
633   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
634 
635   osm->is       = 0;
636   osm->is_local = 0;
637   PetscFunctionReturn(0);
638 }
639 
640 static PetscErrorCode PCDestroy_ASM(PC pc)
641 {
642   PC_ASM         *osm = (PC_ASM*)pc->data;
643   PetscErrorCode ierr;
644   PetscInt       i;
645 
646   PetscFunctionBegin;
647   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
648   if (osm->ksp) {
649     for (i=0; i<osm->n_local_true; i++) {
650       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
651     }
652     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
653   }
654   ierr = PetscFree(pc->data);CHKERRQ(ierr);
655   PetscFunctionReturn(0);
656 }
657 
658 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
659 {
660   PC_ASM         *osm = (PC_ASM*)pc->data;
661   PetscErrorCode ierr;
662   PetscInt       blocks,ovl;
663   PetscBool      symset,flg;
664   PCASMType      asmtype;
665   PCCompositeType loctype;
666   char           sub_mat_type[256];
667 
668   PetscFunctionBegin;
669   /* set the type to symmetric if matrix is symmetric */
670   if (!osm->type_set && pc->pmat) {
671     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
672     if (symset && flg) osm->type = PC_ASM_BASIC;
673   }
674   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
675   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
676   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
677   if (flg) {
678     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
679     osm->dm_subdomains = PETSC_FALSE;
680   }
681   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
682   if (flg) {
683     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
684     osm->dm_subdomains = PETSC_FALSE;
685   }
686   flg  = PETSC_FALSE;
687   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
688   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
689   flg  = PETSC_FALSE;
690   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
691   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
692   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
693   if(flg){
694     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
695   }
696   ierr = PetscOptionsTail();CHKERRQ(ierr);
697   PetscFunctionReturn(0);
698 }
699 
700 /*------------------------------------------------------------------------------------*/
701 
702 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
703 {
704   PC_ASM         *osm = (PC_ASM*)pc->data;
705   PetscErrorCode ierr;
706   PetscInt       i;
707 
708   PetscFunctionBegin;
709   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
710   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
711 
712   if (!pc->setupcalled) {
713     if (is) {
714       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
715     }
716     if (is_local) {
717       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
718     }
719     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
720 
721     osm->n_local_true = n;
722     osm->is           = 0;
723     osm->is_local     = 0;
724     if (is) {
725       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
726       for (i=0; i<n; i++) osm->is[i] = is[i];
727       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
728       osm->overlap = -1;
729     }
730     if (is_local) {
731       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
732       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
733       if (!is) {
734         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
735         for (i=0; i<osm->n_local_true; i++) {
736           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
737             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
738             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
739           } else {
740             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
741             osm->is[i] = osm->is_local[i];
742           }
743         }
744       }
745     }
746   }
747   PetscFunctionReturn(0);
748 }
749 
750 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
751 {
752   PC_ASM         *osm = (PC_ASM*)pc->data;
753   PetscErrorCode ierr;
754   PetscMPIInt    rank,size;
755   PetscInt       n;
756 
757   PetscFunctionBegin;
758   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
759   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
760 
761   /*
762      Split the subdomains equally among all processors
763   */
764   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
765   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
766   n    = N/size + ((N % size) > rank);
767   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
768   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
769   if (!pc->setupcalled) {
770     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
771 
772     osm->n_local_true = n;
773     osm->is           = 0;
774     osm->is_local     = 0;
775   }
776   PetscFunctionReturn(0);
777 }
778 
779 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
780 {
781   PC_ASM *osm = (PC_ASM*)pc->data;
782 
783   PetscFunctionBegin;
784   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
785   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
786   if (!pc->setupcalled) osm->overlap = ovl;
787   PetscFunctionReturn(0);
788 }
789 
790 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
791 {
792   PC_ASM *osm = (PC_ASM*)pc->data;
793 
794   PetscFunctionBegin;
795   osm->type     = type;
796   osm->type_set = PETSC_TRUE;
797   PetscFunctionReturn(0);
798 }
799 
800 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
801 {
802   PC_ASM *osm = (PC_ASM*)pc->data;
803 
804   PetscFunctionBegin;
805   *type = osm->type;
806   PetscFunctionReturn(0);
807 }
808 
809 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
810 {
811   PC_ASM *osm = (PC_ASM *) pc->data;
812 
813   PetscFunctionBegin;
814   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
815   osm->loctype = type;
816   PetscFunctionReturn(0);
817 }
818 
819 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
820 {
821   PC_ASM *osm = (PC_ASM *) pc->data;
822 
823   PetscFunctionBegin;
824   *type = osm->loctype;
825   PetscFunctionReturn(0);
826 }
827 
828 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
829 {
830   PC_ASM *osm = (PC_ASM*)pc->data;
831 
832   PetscFunctionBegin;
833   osm->sort_indices = doSort;
834   PetscFunctionReturn(0);
835 }
836 
837 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
838 {
839   PC_ASM         *osm = (PC_ASM*)pc->data;
840   PetscErrorCode ierr;
841 
842   PetscFunctionBegin;
843   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
844 
845   if (n_local) *n_local = osm->n_local_true;
846   if (first_local) {
847     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
848     *first_local -= osm->n_local_true;
849   }
850   if (ksp) {
851     /* Assume that local solves are now different; not necessarily
852        true though!  This flag is used only for PCView_ASM() */
853     *ksp                   = osm->ksp;
854     osm->same_local_solves = PETSC_FALSE;
855   }
856   PetscFunctionReturn(0);
857 }
858 
859 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
860 {
861   PC_ASM         *osm = (PC_ASM*)pc->data;
862 
863   PetscFunctionBegin;
864   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
865   PetscValidPointer(sub_mat_type,2);
866   *sub_mat_type = osm->sub_mat_type;
867   PetscFunctionReturn(0);
868 }
869 
870 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
871 {
872   PetscErrorCode    ierr;
873   PC_ASM            *osm = (PC_ASM*)pc->data;
874 
875   PetscFunctionBegin;
876   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
877   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
878   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
879   PetscFunctionReturn(0);
880 }
881 
882 /*@C
883     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
884 
885     Collective on PC
886 
887     Input Parameters:
888 +   pc - the preconditioner context
889 .   n - the number of subdomains for this processor (default value = 1)
890 .   is - the index set that defines the subdomains for this processor
891          (or NULL for PETSc to determine subdomains)
892 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
893          (or NULL to not provide these)
894 
895     Notes:
896     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
897 
898     By default the ASM preconditioner uses 1 block per processor.
899 
900     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
901 
902     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
903     back to form the global solution (this is the standard restricted additive Schwarz method)
904 
905     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
906     no code to handle that case.
907 
908     Level: advanced
909 
910 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
911 
912 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
913           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
914 @*/
915 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
916 {
917   PetscErrorCode ierr;
918 
919   PetscFunctionBegin;
920   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
921   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 /*@C
926     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
927     additive Schwarz preconditioner.  Either all or no processors in the
928     PC communicator must call this routine, with the same index sets.
929 
930     Collective on PC
931 
932     Input Parameters:
933 +   pc - the preconditioner context
934 .   N  - the number of subdomains for all processors
935 .   is - the index sets that define the subdomains for all processors
936          (or NULL to ask PETSc to determine the subdomains)
937 -   is_local - the index sets that define the local part of the subdomains for this processor
938          (or NULL to not provide this information)
939 
940     Options Database Key:
941     To set the total number of subdomain blocks rather than specify the
942     index sets, use the option
943 .    -pc_asm_blocks <blks> - Sets total blocks
944 
945     Notes:
946     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
947 
948     By default the ASM preconditioner uses 1 block per processor.
949 
950     These index sets cannot be destroyed until after completion of the
951     linear solves for which the ASM preconditioner is being used.
952 
953     Use PCASMSetLocalSubdomains() to set local subdomains.
954 
955     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
956 
957     Level: advanced
958 
959 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
960 
961 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
962           PCASMCreateSubdomains2D()
963 @*/
964 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
965 {
966   PetscErrorCode ierr;
967 
968   PetscFunctionBegin;
969   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
970   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
971   PetscFunctionReturn(0);
972 }
973 
974 /*@
975     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
976     additive Schwarz preconditioner.  Either all or no processors in the
977     PC communicator must call this routine.
978 
979     Logically Collective on PC
980 
981     Input Parameters:
982 +   pc  - the preconditioner context
983 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
984 
985     Options Database Key:
986 .   -pc_asm_overlap <ovl> - Sets overlap
987 
988     Notes:
989     By default the ASM preconditioner uses 1 block per processor.  To use
990     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
991     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
992 
993     The overlap defaults to 1, so if one desires that no additional
994     overlap be computed beyond what may have been set with a call to
995     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
996     must be set to be 0.  In particular, if one does not explicitly set
997     the subdomains an application code, then all overlap would be computed
998     internally by PETSc, and using an overlap of 0 would result in an ASM
999     variant that is equivalent to the block Jacobi preconditioner.
1000 
1001     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1002     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1003 
1004     Note that one can define initial index sets with any overlap via
1005     PCASMSetLocalSubdomains(); the routine
1006     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1007     if desired.
1008 
1009     Level: intermediate
1010 
1011 .keywords: PC, ASM, set, overlap
1012 
1013 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1014           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1015 @*/
1016 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1017 {
1018   PetscErrorCode ierr;
1019 
1020   PetscFunctionBegin;
1021   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1022   PetscValidLogicalCollectiveInt(pc,ovl,2);
1023   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 /*@
1028     PCASMSetType - Sets the type of restriction and interpolation used
1029     for local problems in the additive Schwarz method.
1030 
1031     Logically Collective on PC
1032 
1033     Input Parameters:
1034 +   pc  - the preconditioner context
1035 -   type - variant of ASM, one of
1036 .vb
1037       PC_ASM_BASIC       - full interpolation and restriction
1038       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1039       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1040       PC_ASM_NONE        - local processor restriction and interpolation
1041 .ve
1042 
1043     Options Database Key:
1044 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1045 
1046     Notes: if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1047     to limit the local processor interpolation
1048 
1049     Level: intermediate
1050 
1051 .keywords: PC, ASM, set, type
1052 
1053 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1054           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1055 @*/
1056 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1057 {
1058   PetscErrorCode ierr;
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1062   PetscValidLogicalCollectiveEnum(pc,type,2);
1063   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1064   PetscFunctionReturn(0);
1065 }
1066 
1067 /*@
1068     PCASMGetType - Gets the type of restriction and interpolation used
1069     for local problems in the additive Schwarz method.
1070 
1071     Logically Collective on PC
1072 
1073     Input Parameter:
1074 .   pc  - the preconditioner context
1075 
1076     Output Parameter:
1077 .   type - variant of ASM, one of
1078 
1079 .vb
1080       PC_ASM_BASIC       - full interpolation and restriction
1081       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1082       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1083       PC_ASM_NONE        - local processor restriction and interpolation
1084 .ve
1085 
1086     Options Database Key:
1087 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1088 
1089     Level: intermediate
1090 
1091 .keywords: PC, ASM, set, type
1092 
1093 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1094           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1095 @*/
1096 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1097 {
1098   PetscErrorCode ierr;
1099 
1100   PetscFunctionBegin;
1101   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1102   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 /*@
1107   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1108 
1109   Logically Collective on PC
1110 
1111   Input Parameters:
1112 + pc  - the preconditioner context
1113 - type - type of composition, one of
1114 .vb
1115   PC_COMPOSITE_ADDITIVE       - local additive combination
1116   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1117 .ve
1118 
1119   Options Database Key:
1120 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1121 
1122   Level: intermediate
1123 
1124 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1125 @*/
1126 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1127 {
1128   PetscErrorCode ierr;
1129 
1130   PetscFunctionBegin;
1131   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1132   PetscValidLogicalCollectiveEnum(pc, type, 2);
1133   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 /*@
1138   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1139 
1140   Logically Collective on PC
1141 
1142   Input Parameter:
1143 . pc  - the preconditioner context
1144 
1145   Output Parameter:
1146 . type - type of composition, one of
1147 .vb
1148   PC_COMPOSITE_ADDITIVE       - local additive combination
1149   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1150 .ve
1151 
1152   Options Database Key:
1153 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1154 
1155   Level: intermediate
1156 
1157 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1158 @*/
1159 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1160 {
1161   PetscErrorCode ierr;
1162 
1163   PetscFunctionBegin;
1164   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1165   PetscValidPointer(type, 2);
1166   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1167   PetscFunctionReturn(0);
1168 }
1169 
1170 /*@
1171     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1172 
1173     Logically Collective on PC
1174 
1175     Input Parameters:
1176 +   pc  - the preconditioner context
1177 -   doSort - sort the subdomain indices
1178 
1179     Level: intermediate
1180 
1181 .keywords: PC, ASM, set, type
1182 
1183 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1184           PCASMCreateSubdomains2D()
1185 @*/
1186 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1187 {
1188   PetscErrorCode ierr;
1189 
1190   PetscFunctionBegin;
1191   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1192   PetscValidLogicalCollectiveBool(pc,doSort,2);
1193   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1194   PetscFunctionReturn(0);
1195 }
1196 
1197 /*@C
1198    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1199    this processor.
1200 
1201    Collective on PC iff first_local is requested
1202 
1203    Input Parameter:
1204 .  pc - the preconditioner context
1205 
1206    Output Parameters:
1207 +  n_local - the number of blocks on this processor or NULL
1208 .  first_local - the global number of the first block on this processor or NULL,
1209                  all processors must request or all must pass NULL
1210 -  ksp - the array of KSP contexts
1211 
1212    Note:
1213    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1214 
1215    You must call KSPSetUp() before calling PCASMGetSubKSP().
1216 
1217    Fortran note:
1218    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1219 
1220    Level: advanced
1221 
1222 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1223 
1224 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1225           PCASMCreateSubdomains2D(),
1226 @*/
1227 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1228 {
1229   PetscErrorCode ierr;
1230 
1231   PetscFunctionBegin;
1232   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1233   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1234   PetscFunctionReturn(0);
1235 }
1236 
1237 /* -------------------------------------------------------------------------------------*/
1238 /*MC
1239    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1240            its own KSP object.
1241 
1242    Options Database Keys:
1243 +  -pc_asm_blocks <blks> - Sets total blocks
1244 .  -pc_asm_overlap <ovl> - Sets overlap
1245 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1246 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1247 
1248      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1249       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1250       -pc_asm_type basic to use the standard ASM.
1251 
1252    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1253      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1254 
1255      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1256         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1257 
1258      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1259          and set the options directly on the resulting KSP object (you can access its PC
1260          with KSPGetPC())
1261 
1262    Level: beginner
1263 
1264    Concepts: additive Schwarz method
1265 
1266     References:
1267 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1268      Courant Institute, New York University Technical report
1269 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1270     Cambridge University Press.
1271 
1272 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1273            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1274            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1275 
1276 M*/
1277 
1278 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1279 {
1280   PetscErrorCode ierr;
1281   PC_ASM         *osm;
1282 
1283   PetscFunctionBegin;
1284   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1285 
1286   osm->n                 = PETSC_DECIDE;
1287   osm->n_local           = 0;
1288   osm->n_local_true      = PETSC_DECIDE;
1289   osm->overlap           = 1;
1290   osm->ksp               = 0;
1291   osm->restriction       = 0;
1292   osm->lprolongation     = 0;
1293   osm->lrestriction      = 0;
1294   osm->x                 = 0;
1295   osm->y                 = 0;
1296   osm->is                = 0;
1297   osm->is_local          = 0;
1298   osm->mat               = 0;
1299   osm->pmat              = 0;
1300   osm->type              = PC_ASM_RESTRICT;
1301   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1302   osm->same_local_solves = PETSC_TRUE;
1303   osm->sort_indices      = PETSC_TRUE;
1304   osm->dm_subdomains     = PETSC_FALSE;
1305   osm->sub_mat_type      = NULL;
1306 
1307   pc->data                 = (void*)osm;
1308   pc->ops->apply           = PCApply_ASM;
1309   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1310   pc->ops->setup           = PCSetUp_ASM;
1311   pc->ops->reset           = PCReset_ASM;
1312   pc->ops->destroy         = PCDestroy_ASM;
1313   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1314   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1315   pc->ops->view            = PCView_ASM;
1316   pc->ops->applyrichardson = 0;
1317 
1318   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1319   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1320   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1321   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1322   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1323   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1324   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1325   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1326   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1327   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1328   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 /*@C
1333    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1334    preconditioner for a any problem on a general grid.
1335 
1336    Collective
1337 
1338    Input Parameters:
1339 +  A - The global matrix operator
1340 -  n - the number of local blocks
1341 
1342    Output Parameters:
1343 .  outis - the array of index sets defining the subdomains
1344 
1345    Level: advanced
1346 
1347    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1348     from these if you use PCASMSetLocalSubdomains()
1349 
1350     In the Fortran version you must provide the array outis[] already allocated of length n.
1351 
1352 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1353 
1354 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1355 @*/
1356 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1357 {
1358   MatPartitioning mpart;
1359   const char      *prefix;
1360   void            (*f)(void);
1361   PetscInt        i,j,rstart,rend,bs;
1362   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1363   Mat             Ad     = NULL, adj;
1364   IS              ispart,isnumb,*is;
1365   PetscErrorCode  ierr;
1366 
1367   PetscFunctionBegin;
1368   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1369   PetscValidPointer(outis,3);
1370   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1371 
1372   /* Get prefix, row distribution, and block size */
1373   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1374   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1375   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1376   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1377 
1378   /* Get diagonal block from matrix if possible */
1379   ierr = MatShellGetOperation(A,MATOP_GET_DIAGONAL_BLOCK,&f);CHKERRQ(ierr);
1380   if (f) {
1381     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1382   }
1383   if (Ad) {
1384     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1385     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1386   }
1387   if (Ad && n > 1) {
1388     PetscBool match,done;
1389     /* Try to setup a good matrix partitioning if available */
1390     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1391     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1392     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1393     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1394     if (!match) {
1395       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1396     }
1397     if (!match) { /* assume a "good" partitioner is available */
1398       PetscInt       na;
1399       const PetscInt *ia,*ja;
1400       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1401       if (done) {
1402         /* Build adjacency matrix by hand. Unfortunately a call to
1403            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1404            remove the block-aij structure and we cannot expect
1405            MatPartitioning to split vertices as we need */
1406         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1407         const PetscInt *row;
1408         nnz = 0;
1409         for (i=0; i<na; i++) { /* count number of nonzeros */
1410           len = ia[i+1] - ia[i];
1411           row = ja + ia[i];
1412           for (j=0; j<len; j++) {
1413             if (row[j] == i) { /* don't count diagonal */
1414               len--; break;
1415             }
1416           }
1417           nnz += len;
1418         }
1419         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1420         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1421         nnz    = 0;
1422         iia[0] = 0;
1423         for (i=0; i<na; i++) { /* fill adjacency */
1424           cnt = 0;
1425           len = ia[i+1] - ia[i];
1426           row = ja + ia[i];
1427           for (j=0; j<len; j++) {
1428             if (row[j] != i) { /* if not diagonal */
1429               jja[nnz+cnt++] = row[j];
1430             }
1431           }
1432           nnz     += cnt;
1433           iia[i+1] = nnz;
1434         }
1435         /* Partitioning of the adjacency matrix */
1436         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1437         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1438         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1439         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1440         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1441         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1442         foundpart = PETSC_TRUE;
1443       }
1444       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1445     }
1446     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1447   }
1448 
1449   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1450   *outis = is;
1451 
1452   if (!foundpart) {
1453 
1454     /* Partitioning by contiguous chunks of rows */
1455 
1456     PetscInt mbs   = (rend-rstart)/bs;
1457     PetscInt start = rstart;
1458     for (i=0; i<n; i++) {
1459       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1460       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1461       start += count;
1462     }
1463 
1464   } else {
1465 
1466     /* Partitioning by adjacency of diagonal block  */
1467 
1468     const PetscInt *numbering;
1469     PetscInt       *count,nidx,*indices,*newidx,start=0;
1470     /* Get node count in each partition */
1471     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1472     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1473     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1474       for (i=0; i<n; i++) count[i] *= bs;
1475     }
1476     /* Build indices from node numbering */
1477     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1478     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1479     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1480     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1481     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1482     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1483     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1484       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1485       for (i=0; i<nidx; i++) {
1486         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1487       }
1488       ierr    = PetscFree(indices);CHKERRQ(ierr);
1489       nidx   *= bs;
1490       indices = newidx;
1491     }
1492     /* Shift to get global indices */
1493     for (i=0; i<nidx; i++) indices[i] += rstart;
1494 
1495     /* Build the index sets for each block */
1496     for (i=0; i<n; i++) {
1497       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1498       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1499       start += count[i];
1500     }
1501 
1502     ierr = PetscFree(count);CHKERRQ(ierr);
1503     ierr = PetscFree(indices);CHKERRQ(ierr);
1504     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1505     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1506 
1507   }
1508   PetscFunctionReturn(0);
1509 }
1510 
1511 /*@C
1512    PCASMDestroySubdomains - Destroys the index sets created with
1513    PCASMCreateSubdomains(). Should be called after setting subdomains
1514    with PCASMSetLocalSubdomains().
1515 
1516    Collective
1517 
1518    Input Parameters:
1519 +  n - the number of index sets
1520 .  is - the array of index sets
1521 -  is_local - the array of local index sets, can be NULL
1522 
1523    Level: advanced
1524 
1525 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1526 
1527 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1528 @*/
1529 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1530 {
1531   PetscInt       i;
1532   PetscErrorCode ierr;
1533 
1534   PetscFunctionBegin;
1535   if (n <= 0) PetscFunctionReturn(0);
1536   if (is) {
1537     PetscValidPointer(is,2);
1538     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1539     ierr = PetscFree(is);CHKERRQ(ierr);
1540   }
1541   if (is_local) {
1542     PetscValidPointer(is_local,3);
1543     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1544     ierr = PetscFree(is_local);CHKERRQ(ierr);
1545   }
1546   PetscFunctionReturn(0);
1547 }
1548 
1549 /*@
1550    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1551    preconditioner for a two-dimensional problem on a regular grid.
1552 
1553    Not Collective
1554 
1555    Input Parameters:
1556 +  m, n - the number of mesh points in the x and y directions
1557 .  M, N - the number of subdomains in the x and y directions
1558 .  dof - degrees of freedom per node
1559 -  overlap - overlap in mesh lines
1560 
1561    Output Parameters:
1562 +  Nsub - the number of subdomains created
1563 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1564 -  is_local - array of index sets defining non-overlapping subdomains
1565 
1566    Note:
1567    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1568    preconditioners.  More general related routines are
1569    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1570 
1571    Level: advanced
1572 
1573 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1574 
1575 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1576           PCASMSetOverlap()
1577 @*/
1578 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1579 {
1580   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1581   PetscErrorCode ierr;
1582   PetscInt       nidx,*idx,loc,ii,jj,count;
1583 
1584   PetscFunctionBegin;
1585   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1586 
1587   *Nsub     = N*M;
1588   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1589   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1590   ystart    = 0;
1591   loc_outer = 0;
1592   for (i=0; i<N; i++) {
1593     height = n/N + ((n % N) > i); /* height of subdomain */
1594     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1595     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1596     yright = ystart + height + overlap; if (yright > n) yright = n;
1597     xstart = 0;
1598     for (j=0; j<M; j++) {
1599       width = m/M + ((m % M) > j); /* width of subdomain */
1600       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1601       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1602       xright = xstart + width + overlap; if (xright > m) xright = m;
1603       nidx   = (xright - xleft)*(yright - yleft);
1604       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1605       loc    = 0;
1606       for (ii=yleft; ii<yright; ii++) {
1607         count = m*ii + xleft;
1608         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1609       }
1610       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1611       if (overlap == 0) {
1612         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1613 
1614         (*is_local)[loc_outer] = (*is)[loc_outer];
1615       } else {
1616         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1617           for (jj=xstart; jj<xstart+width; jj++) {
1618             idx[loc++] = m*ii + jj;
1619           }
1620         }
1621         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1622       }
1623       ierr    = PetscFree(idx);CHKERRQ(ierr);
1624       xstart += width;
1625       loc_outer++;
1626     }
1627     ystart += height;
1628   }
1629   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1630   PetscFunctionReturn(0);
1631 }
1632 
1633 /*@C
1634     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1635     only) for the additive Schwarz preconditioner.
1636 
1637     Not Collective
1638 
1639     Input Parameter:
1640 .   pc - the preconditioner context
1641 
1642     Output Parameters:
1643 +   n - the number of subdomains for this processor (default value = 1)
1644 .   is - the index sets that define the subdomains for this processor
1645 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1646 
1647 
1648     Notes:
1649     The IS numbering is in the parallel, global numbering of the vector.
1650 
1651     Level: advanced
1652 
1653 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1654 
1655 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1656           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1657 @*/
1658 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1659 {
1660   PC_ASM         *osm = (PC_ASM*)pc->data;
1661   PetscErrorCode ierr;
1662   PetscBool      match;
1663 
1664   PetscFunctionBegin;
1665   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1666   PetscValidIntPointer(n,2);
1667   if (is) PetscValidPointer(is,3);
1668   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1669   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1670   if (n) *n = osm->n_local_true;
1671   if (is) *is = osm->is;
1672   if (is_local) *is_local = osm->is_local;
1673   PetscFunctionReturn(0);
1674 }
1675 
1676 /*@C
1677     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1678     only) for the additive Schwarz preconditioner.
1679 
1680     Not Collective
1681 
1682     Input Parameter:
1683 .   pc - the preconditioner context
1684 
1685     Output Parameters:
1686 +   n - the number of matrices for this processor (default value = 1)
1687 -   mat - the matrices
1688 
1689     Level: advanced
1690 
1691     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1692 
1693            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1694 
1695 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1696 
1697 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1698           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1699 @*/
1700 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1701 {
1702   PC_ASM         *osm;
1703   PetscErrorCode ierr;
1704   PetscBool      match;
1705 
1706   PetscFunctionBegin;
1707   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1708   PetscValidIntPointer(n,2);
1709   if (mat) PetscValidPointer(mat,3);
1710   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1711   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1712   if (!match) {
1713     if (n) *n = 0;
1714     if (mat) *mat = NULL;
1715   } else {
1716     osm = (PC_ASM*)pc->data;
1717     if (n) *n = osm->n_local_true;
1718     if (mat) *mat = osm->pmat;
1719   }
1720   PetscFunctionReturn(0);
1721 }
1722 
1723 /*@
1724     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1725 
1726     Logically Collective
1727 
1728     Input Parameter:
1729 +   pc  - the preconditioner
1730 -   flg - boolean indicating whether to use subdomains defined by the DM
1731 
1732     Options Database Key:
1733 .   -pc_asm_dm_subdomains
1734 
1735     Level: intermediate
1736 
1737     Notes:
1738     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1739     so setting either of the first two effectively turns the latter off.
1740 
1741 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1742 
1743 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1744           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1745 @*/
1746 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1747 {
1748   PC_ASM         *osm = (PC_ASM*)pc->data;
1749   PetscErrorCode ierr;
1750   PetscBool      match;
1751 
1752   PetscFunctionBegin;
1753   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1754   PetscValidLogicalCollectiveBool(pc,flg,2);
1755   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1756   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1757   if (match) {
1758     osm->dm_subdomains = flg;
1759   }
1760   PetscFunctionReturn(0);
1761 }
1762 
1763 /*@
1764     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1765     Not Collective
1766 
1767     Input Parameter:
1768 .   pc  - the preconditioner
1769 
1770     Output Parameter:
1771 .   flg - boolean indicating whether to use subdomains defined by the DM
1772 
1773     Level: intermediate
1774 
1775 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1776 
1777 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1778           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1779 @*/
1780 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1781 {
1782   PC_ASM         *osm = (PC_ASM*)pc->data;
1783   PetscErrorCode ierr;
1784   PetscBool      match;
1785 
1786   PetscFunctionBegin;
1787   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1788   PetscValidPointer(flg,2);
1789   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1790   if (match) {
1791     if (flg) *flg = osm->dm_subdomains;
1792   }
1793   PetscFunctionReturn(0);
1794 }
1795 
1796 /*@
1797      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1798 
1799    Not Collective
1800 
1801    Input Parameter:
1802 .  pc - the PC
1803 
1804    Output Parameter:
1805 .  -pc_asm_sub_mat_type - name of matrix type
1806 
1807    Level: advanced
1808 
1809 .keywords: PC, PCASM, MatType, set
1810 
1811 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1812 @*/
1813 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1814   PetscErrorCode ierr;
1815 
1816   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1817   PetscFunctionReturn(0);
1818 }
1819 
1820 /*@
1821      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1822 
1823    Collective on Mat
1824 
1825    Input Parameters:
1826 +  pc             - the PC object
1827 -  sub_mat_type   - matrix type
1828 
1829    Options Database Key:
1830 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1831 
1832    Notes:
1833    See "${PETSC_DIR}/include/petscmat.h" for available types
1834 
1835   Level: advanced
1836 
1837 .keywords: PC, PCASM, MatType, set
1838 
1839 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1840 @*/
1841 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1842 {
1843   PetscErrorCode ierr;
1844 
1845   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1846   PetscFunctionReturn(0);
1847 }
1848