xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 8d76b567012869fc44a72bfbee2ffc0c796c6d4b)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
13 #include <petscdm.h>
14 
15 typedef struct {
16   PetscInt   n, n_local, n_local_true;
17   PetscInt   overlap;             /* overlap requested by user */
18   KSP        *ksp;                /* linear solvers for each block */
19   VecScatter restriction;         /* mapping from global to overlapping (process) subdomain*/
20   VecScatter *lrestriction;       /* mapping from subregion to overlapping (process) subdomain */
21   VecScatter *lprolongation;      /* mapping from non-overlapping subregion to overlapping (process) subdomain; used for restrict additive version of algorithms */
22   Vec        lx, ly;              /* work vectors */
23   Vec        *x,*y;               /* work vectors */
24   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
25   IS         *is;                 /* index set that defines each overlapping subdomain */
26   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
27   Mat        *mat,*pmat;          /* mat is not currently used */
28   PCASMType  type;                /* use reduced interpolation, restriction or both */
29   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
30   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
31   PetscBool  sort_indices;        /* flag to sort subdomain indices */
32   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
33   PCCompositeType loctype;        /* the type of composition for local solves */
34   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
35   /* For multiplicative solve */
36   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
40 {
41   PC_ASM         *osm = (PC_ASM*)pc->data;
42   PetscErrorCode ierr;
43   PetscMPIInt    rank;
44   PetscInt       i,bsz;
45   PetscBool      iascii,isstring;
46   PetscViewer    sviewer;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
51   if (iascii) {
52     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
53     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
54     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
55     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
56     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
57     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
58     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
59     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
60     if (osm->same_local_solves) {
61       if (osm->ksp) {
62         ierr = PetscViewerASCIIPrintf(viewer,"  Local solver is the same for all blocks, as in the following KSP and PC objects on rank 0:\n");CHKERRQ(ierr);
63         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
64         if (!rank) {
65           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
66           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
67           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
68         }
69         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
70       }
71     } else {
72       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
73       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
74       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
75       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
79       for (i=0; i<osm->n_local_true; i++) {
80         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
81         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
82         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
83         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
84       }
85       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
86       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
87       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
89     }
90   } else if (isstring) {
91     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
92     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
93     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
94     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
95   }
96   PetscFunctionReturn(0);
97 }
98 
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer, sviewer;
105   char           *s;
106   PetscInt       i,j,nidx;
107   const PetscInt *idx;
108   PetscMPIInt    rank, size;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
113   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
114   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
115   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
116   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
117   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
118   for (i=0; i<osm->n_local; i++) {
119     if (i < osm->n_local_true) {
120       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
121       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
122       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
123 #define len  16*(nidx+1)+512
124       ierr = PetscMalloc1(len,&s);CHKERRQ(ierr);
125       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
126 #undef len
127       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
128       for (j=0; j<nidx; j++) {
129         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
130       }
131       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
132       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
133       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
136       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
137       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
138       ierr = PetscFree(s);CHKERRQ(ierr);
139       if (osm->is_local) {
140         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
141 #define len  16*(nidx+1)+512
142         ierr = PetscMalloc1(len, &s);CHKERRQ(ierr);
143         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
144 #undef len
145         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
146         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
147         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
148         for (j=0; j<nidx; j++) {
149           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
150         }
151         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
152         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
153         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
155         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
156         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
157         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
158         ierr = PetscFree(s);CHKERRQ(ierr);
159       }
160     } else {
161       /* Participate in collective viewer calls. */
162       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
163       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
164       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
165       /* Assume either all ranks have is_local or none do. */
166       if (osm->is_local) {
167         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
168         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
169         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
170       }
171     }
172   }
173   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
174   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
175   PetscFunctionReturn(0);
176 }
177 
178 static PetscErrorCode PCSetUp_ASM(PC pc)
179 {
180   PC_ASM         *osm = (PC_ASM*)pc->data;
181   PetscErrorCode ierr;
182   PetscBool      flg;
183   PetscInt       i,m,m_local;
184   MatReuse       scall = MAT_REUSE_MATRIX;
185   IS             isl;
186   KSP            ksp;
187   PC             subpc;
188   const char     *prefix,*pprefix;
189   Vec            vec;
190   DM             *domain_dm = NULL;
191 
192   PetscFunctionBegin;
193   if (!pc->setupcalled) {
194     PetscInt m;
195 
196     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
197     if (osm->n_local_true == PETSC_DECIDE) {
198       /* no subdomains given */
199       /* try pc->dm first, if allowed */
200       if (osm->dm_subdomains && pc->dm) {
201         PetscInt  num_domains, d;
202         char      **domain_names;
203         IS        *inner_domain_is, *outer_domain_is;
204         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
205         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
206                               A future improvement of this code might allow one to use
207                               DM-defined subdomains and also increase the overlap,
208                               but that is not currently supported */
209         if (num_domains) {
210           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
211         }
212         for (d = 0; d < num_domains; ++d) {
213           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
214           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
215           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
216         }
217         ierr = PetscFree(domain_names);CHKERRQ(ierr);
218         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
219         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
220       }
221       if (osm->n_local_true == PETSC_DECIDE) {
222         /* still no subdomains; use one subdomain per processor */
223         osm->n_local_true = 1;
224       }
225     }
226     { /* determine the global and max number of subdomains */
227       struct {PetscInt max,sum;} inwork,outwork;
228       PetscMPIInt size;
229 
230       inwork.max   = osm->n_local_true;
231       inwork.sum   = osm->n_local_true;
232       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
233       osm->n_local = outwork.max;
234       osm->n       = outwork.sum;
235 
236       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
237       if (outwork.max == 1 && outwork.sum == size) {
238         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
239         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
240       }
241     }
242     if (!osm->is) { /* create the index sets */
243       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
244     }
245     if (osm->n_local_true > 1 && !osm->is_local) {
246       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
247       for (i=0; i<osm->n_local_true; i++) {
248         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
249           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
250           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
251         } else {
252           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
253           osm->is_local[i] = osm->is[i];
254         }
255       }
256     }
257     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
258     flg  = PETSC_FALSE;
259     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
260     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
261 
262     if (osm->overlap > 0) {
263       /* Extend the "overlapping" regions by a number of steps */
264       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
265     }
266     if (osm->sort_indices) {
267       for (i=0; i<osm->n_local_true; i++) {
268         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
269         if (osm->is_local) {
270           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
271         }
272       }
273     }
274 
275     if (!osm->ksp) {
276       /* Create the local solvers */
277       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
278       if (domain_dm) {
279         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
280       }
281       for (i=0; i<osm->n_local_true; i++) {
282         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
283         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
284         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
285         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
286         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
287         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
288         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
289         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
290         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
291         if (domain_dm) {
292           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
293           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
294           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
295         }
296         osm->ksp[i] = ksp;
297       }
298       if (domain_dm) {
299         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
300       }
301     }
302 
303     ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
304     ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
305     ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
306     ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
307     ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
308 
309     scall = MAT_INITIAL_MATRIX;
310   } else {
311     /*
312        Destroy the blocks from the previous iteration
313     */
314     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
315       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
316       scall = MAT_INITIAL_MATRIX;
317     }
318   }
319 
320   /*
321      Extract out the submatrices
322   */
323   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
324   if (scall == MAT_INITIAL_MATRIX) {
325     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
326     for (i=0; i<osm->n_local_true; i++) {
327       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
328       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
329     }
330   }
331 
332   /* Convert the types of the submatrices (if needbe) */
333   if (osm->sub_mat_type) {
334     for (i=0; i<osm->n_local_true; i++) {
335       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
336     }
337   }
338 
339   if(!pc->setupcalled){
340     /* Create the local work vectors (from the local matrices) and scatter contexts */
341     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
342 
343     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
344     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
345       ierr = PetscMalloc1(osm->n_local_true,&osm->lprolongation);CHKERRQ(ierr);
346     }
347     ierr = PetscMalloc1(osm->n_local_true,&osm->lrestriction);CHKERRQ(ierr);
348     ierr = PetscMalloc1(osm->n_local_true,&osm->x);CHKERRQ(ierr);
349     ierr = PetscMalloc1(osm->n_local_true,&osm->y);CHKERRQ(ierr);
350 
351     ierr = ISGetLocalSize(osm->lis,&m);CHKERRQ(ierr);
352     ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
353     ierr = VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);CHKERRQ(ierr);
354     ierr = ISDestroy(&isl);CHKERRQ(ierr);
355 
356 
357     for (i=0; i<osm->n_local_true; ++i) {
358       ISLocalToGlobalMapping ltog;
359       IS                     isll;
360       const PetscInt         *idx_is;
361       PetscInt               *idx_lis,nout;
362 
363       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
364       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
365       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
366 
367       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
368       ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
369       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
370       ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
371       ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
372       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
373       if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
374       ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
375       ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
376       ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
377       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
378       ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);CHKERRQ(ierr);
379       ierr = ISDestroy(&isll);CHKERRQ(ierr);
380       ierr = ISDestroy(&isl);CHKERRQ(ierr);
381       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overalapping is_local[i] entries */
382         ISLocalToGlobalMapping ltog;
383         IS                     isll,isll_local;
384         const PetscInt         *idx_local;
385         PetscInt               *idx1, *idx2, nout;
386 
387         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
388         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
389 
390         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
391         ierr = PetscMalloc1(m_local,&idx1);CHKERRQ(ierr);
392         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);CHKERRQ(ierr);
393         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
394         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
395         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
396 
397         ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
398         ierr = PetscMalloc1(m_local,&idx2);CHKERRQ(ierr);
399         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);CHKERRQ(ierr);
400         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
401         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
402         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);CHKERRQ(ierr);
403 
404         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
405         ierr = VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);CHKERRQ(ierr);
406 
407         ierr = ISDestroy(&isll);CHKERRQ(ierr);
408         ierr = ISDestroy(&isll_local);CHKERRQ(ierr);
409       }
410     }
411     ierr = VecDestroy(&vec);CHKERRQ(ierr);
412   }
413 
414   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
415     IS      *cis;
416     PetscInt c;
417 
418     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
419     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
420     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
421     ierr = PetscFree(cis);CHKERRQ(ierr);
422   }
423 
424   /* Return control to the user so that the submatrices can be modified (e.g., to apply
425      different boundary conditions for the submatrices than for the global problem) */
426   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
427 
428   /*
429      Loop over subdomains putting them into local ksp
430   */
431   for (i=0; i<osm->n_local_true; i++) {
432     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
433     if (!pc->setupcalled) {
434       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
435     }
436   }
437   PetscFunctionReturn(0);
438 }
439 
440 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
441 {
442   PC_ASM             *osm = (PC_ASM*)pc->data;
443   PetscErrorCode     ierr;
444   PetscInt           i;
445   KSPConvergedReason reason;
446 
447   PetscFunctionBegin;
448   for (i=0; i<osm->n_local_true; i++) {
449     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
450     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
451     if (reason == KSP_DIVERGED_PC_FAILED) {
452       pc->failedreason = PC_SUBPC_ERROR;
453     }
454   }
455   PetscFunctionReturn(0);
456 }
457 
458 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
459 {
460   PC_ASM         *osm = (PC_ASM*)pc->data;
461   PetscErrorCode ierr;
462   PetscInt       i,n_local_true = osm->n_local_true;
463   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
464 
465   PetscFunctionBegin;
466   /*
467      Support for limiting the restriction or interpolation to only local
468      subdomain values (leaving the other values 0).
469   */
470   if (!(osm->type & PC_ASM_RESTRICT)) {
471     forward = SCATTER_FORWARD_LOCAL;
472     /* have to zero the work RHS since scatter may leave some slots empty */
473     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
474   }
475   if (!(osm->type & PC_ASM_INTERPOLATE)) {
476     reverse = SCATTER_REVERSE_LOCAL;
477   }
478 
479   if(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE){
480     /* zero the global and the local solutions */
481     ierr = VecZeroEntries(y);CHKERRQ(ierr);
482     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
483 
484     /* Copy the global RHS to local RHS including the ghost nodes */
485     ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
486     ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
487 
488     /* Restrict local RHS to the overlapping 0-block RHS */
489     ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
490     ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
491 
492     /* do the local solves */
493     for (i = 0; i < n_local_true; ++i) {
494 
495       /* solve the overlapping i-block */
496       ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
497       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
498       ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr);
499       ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
500 
501       if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution (only for restrictive additive) */
502         ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
503         ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
504       }
505       else{ /* interpolate the overalapping i-block solution to the local solution */
506         ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
507         ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
508       }
509 
510       if (i < n_local_true-1) {
511         /* Restrict local RHS to the overlapping (i+1)-block RHS */
512         ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
513         ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
514 
515         if ( osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
516           /* update the overlapping (i+1)-block RHS using the current local solution */
517           ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
518           ierr = VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]); CHKERRQ(ierr);
519         }
520       }
521     }
522     /* Add the local solution to the global solution including the ghost nodes */
523     ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
524     ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
525   }else{
526     SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
527   }
528   PetscFunctionReturn(0);
529 }
530 
531 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
532 {
533   PC_ASM         *osm = (PC_ASM*)pc->data;
534   PetscErrorCode ierr;
535   PetscInt       i,n_local_true = osm->n_local_true;
536   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
537 
538   PetscFunctionBegin;
539   /*
540      Support for limiting the restriction or interpolation to only local
541      subdomain values (leaving the other values 0).
542 
543      Note: these are reversed from the PCApply_ASM() because we are applying the
544      transpose of the three terms
545   */
546 
547   if (!(osm->type & PC_ASM_INTERPOLATE)) {
548     forward = SCATTER_FORWARD_LOCAL;
549     /* have to zero the work RHS since scatter may leave some slots empty */
550     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
551   }
552   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
553 
554   /* zero the global and the local solutions */
555   ierr = VecZeroEntries(y);CHKERRQ(ierr);
556   ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
557 
558   /* Copy the global RHS to local RHS including the ghost nodes */
559   ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
560   ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
561 
562   /* Restrict local RHS to the overlapping 0-block RHS */
563   ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
564   ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
565 
566   /* do the local solves */
567   for (i = 0; i < n_local_true; ++i) {
568 
569     /* solve the overlapping i-block */
570     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
571     ierr = KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
572     ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr);
573     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
574 
575     if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution */
576      ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
577      ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
578     }
579     else{ /* interpolate the overalapping i-block solution to the local solution */
580       ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
581       ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
582     }
583 
584     if (i < n_local_true-1) {
585       /* Restrict local RHS to the overlapping (i+1)-block RHS */
586       ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
587       ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
588     }
589   }
590   /* Add the local solution to the global solution including the ghost nodes */
591   ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
592   ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
593 
594   PetscFunctionReturn(0);
595 
596 }
597 
598 static PetscErrorCode PCReset_ASM(PC pc)
599 {
600   PC_ASM         *osm = (PC_ASM*)pc->data;
601   PetscErrorCode ierr;
602   PetscInt       i;
603 
604   PetscFunctionBegin;
605   if (osm->ksp) {
606     for (i=0; i<osm->n_local_true; i++) {
607       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
608     }
609   }
610   if (osm->pmat) {
611     if (osm->n_local_true > 0) {
612       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
613     }
614   }
615   if (osm->lrestriction) {
616     ierr = VecScatterDestroy(&osm->restriction);CHKERRQ(ierr);
617     for (i=0; i<osm->n_local_true; i++) {
618       ierr = VecScatterDestroy(&osm->lrestriction[i]);CHKERRQ(ierr);
619       if (osm->lprolongation) {ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);}
620       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
621       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
622     }
623     ierr = PetscFree(osm->lrestriction);CHKERRQ(ierr);
624     if (osm->lprolongation) {ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);}
625     ierr = PetscFree(osm->x);CHKERRQ(ierr);
626     ierr = PetscFree(osm->y);CHKERRQ(ierr);
627 
628   }
629   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
630   ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
631   ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
632   ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
633   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
634     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
635   }
636 
637   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
638 
639   osm->is       = 0;
640   osm->is_local = 0;
641   PetscFunctionReturn(0);
642 }
643 
644 static PetscErrorCode PCDestroy_ASM(PC pc)
645 {
646   PC_ASM         *osm = (PC_ASM*)pc->data;
647   PetscErrorCode ierr;
648   PetscInt       i;
649 
650   PetscFunctionBegin;
651   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
652   if (osm->ksp) {
653     for (i=0; i<osm->n_local_true; i++) {
654       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
655     }
656     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
657   }
658   ierr = PetscFree(pc->data);CHKERRQ(ierr);
659   PetscFunctionReturn(0);
660 }
661 
662 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
663 {
664   PC_ASM         *osm = (PC_ASM*)pc->data;
665   PetscErrorCode ierr;
666   PetscInt       blocks,ovl;
667   PetscBool      flg;
668   PCASMType      asmtype;
669   PCCompositeType loctype;
670   char           sub_mat_type[256];
671 
672   PetscFunctionBegin;
673   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
674   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
675   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
676   if (flg) {
677     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
678     osm->dm_subdomains = PETSC_FALSE;
679   }
680   ierr = PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);CHKERRQ(ierr);
681   if (flg) {
682     ierr = PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
683     osm->dm_subdomains = PETSC_FALSE;
684   }
685   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
686   if (flg) {
687     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
688     osm->dm_subdomains = PETSC_FALSE;
689   }
690   flg  = PETSC_FALSE;
691   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
692   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
693   flg  = PETSC_FALSE;
694   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
695   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
696   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
697   if(flg){
698     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
699   }
700   ierr = PetscOptionsTail();CHKERRQ(ierr);
701   PetscFunctionReturn(0);
702 }
703 
704 /*------------------------------------------------------------------------------------*/
705 
706 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
707 {
708   PC_ASM         *osm = (PC_ASM*)pc->data;
709   PetscErrorCode ierr;
710   PetscInt       i;
711 
712   PetscFunctionBegin;
713   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
714   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
715 
716   if (!pc->setupcalled) {
717     if (is) {
718       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
719     }
720     if (is_local) {
721       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
722     }
723     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
724 
725     osm->n_local_true = n;
726     osm->is           = 0;
727     osm->is_local     = 0;
728     if (is) {
729       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
730       for (i=0; i<n; i++) osm->is[i] = is[i];
731       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
732       osm->overlap = -1;
733     }
734     if (is_local) {
735       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
736       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
737       if (!is) {
738         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
739         for (i=0; i<osm->n_local_true; i++) {
740           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
741             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
742             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
743           } else {
744             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
745             osm->is[i] = osm->is_local[i];
746           }
747         }
748       }
749     }
750   }
751   PetscFunctionReturn(0);
752 }
753 
754 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
755 {
756   PC_ASM         *osm = (PC_ASM*)pc->data;
757   PetscErrorCode ierr;
758   PetscMPIInt    rank,size;
759   PetscInt       n;
760 
761   PetscFunctionBegin;
762   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
763   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
764 
765   /*
766      Split the subdomains equally among all processors
767   */
768   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
769   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
770   n    = N/size + ((N % size) > rank);
771   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
772   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
773   if (!pc->setupcalled) {
774     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
775 
776     osm->n_local_true = n;
777     osm->is           = 0;
778     osm->is_local     = 0;
779   }
780   PetscFunctionReturn(0);
781 }
782 
783 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
784 {
785   PC_ASM *osm = (PC_ASM*)pc->data;
786 
787   PetscFunctionBegin;
788   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
789   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
790   if (!pc->setupcalled) osm->overlap = ovl;
791   PetscFunctionReturn(0);
792 }
793 
794 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
795 {
796   PC_ASM *osm = (PC_ASM*)pc->data;
797 
798   PetscFunctionBegin;
799   osm->type     = type;
800   osm->type_set = PETSC_TRUE;
801   PetscFunctionReturn(0);
802 }
803 
804 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
805 {
806   PC_ASM *osm = (PC_ASM*)pc->data;
807 
808   PetscFunctionBegin;
809   *type = osm->type;
810   PetscFunctionReturn(0);
811 }
812 
813 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
814 {
815   PC_ASM *osm = (PC_ASM *) pc->data;
816 
817   PetscFunctionBegin;
818   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
819   osm->loctype = type;
820   PetscFunctionReturn(0);
821 }
822 
823 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
824 {
825   PC_ASM *osm = (PC_ASM *) pc->data;
826 
827   PetscFunctionBegin;
828   *type = osm->loctype;
829   PetscFunctionReturn(0);
830 }
831 
832 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
833 {
834   PC_ASM *osm = (PC_ASM*)pc->data;
835 
836   PetscFunctionBegin;
837   osm->sort_indices = doSort;
838   PetscFunctionReturn(0);
839 }
840 
841 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
842 {
843   PC_ASM         *osm = (PC_ASM*)pc->data;
844   PetscErrorCode ierr;
845 
846   PetscFunctionBegin;
847   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
848 
849   if (n_local) *n_local = osm->n_local_true;
850   if (first_local) {
851     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
852     *first_local -= osm->n_local_true;
853   }
854   if (ksp) {
855     /* Assume that local solves are now different; not necessarily
856        true though!  This flag is used only for PCView_ASM() */
857     *ksp                   = osm->ksp;
858     osm->same_local_solves = PETSC_FALSE;
859   }
860   PetscFunctionReturn(0);
861 }
862 
863 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
864 {
865   PC_ASM         *osm = (PC_ASM*)pc->data;
866 
867   PetscFunctionBegin;
868   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
869   PetscValidPointer(sub_mat_type,2);
870   *sub_mat_type = osm->sub_mat_type;
871   PetscFunctionReturn(0);
872 }
873 
874 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
875 {
876   PetscErrorCode    ierr;
877   PC_ASM            *osm = (PC_ASM*)pc->data;
878 
879   PetscFunctionBegin;
880   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
881   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
882   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
883   PetscFunctionReturn(0);
884 }
885 
886 /*@C
887     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
888 
889     Collective on pc
890 
891     Input Parameters:
892 +   pc - the preconditioner context
893 .   n - the number of subdomains for this processor (default value = 1)
894 .   is - the index set that defines the subdomains for this processor
895          (or NULL for PETSc to determine subdomains)
896 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
897          (or NULL to not provide these)
898 
899     Options Database Key:
900     To set the total number of subdomain blocks rather than specify the
901     index sets, use the option
902 .    -pc_asm_local_blocks <blks> - Sets local blocks
903 
904     Notes:
905     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
906 
907     By default the ASM preconditioner uses 1 block per processor.
908 
909     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
910 
911     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
912     back to form the global solution (this is the standard restricted additive Schwarz method)
913 
914     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
915     no code to handle that case.
916 
917     Level: advanced
918 
919 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
920           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
921 @*/
922 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
923 {
924   PetscErrorCode ierr;
925 
926   PetscFunctionBegin;
927   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
928   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
929   PetscFunctionReturn(0);
930 }
931 
932 /*@C
933     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
934     additive Schwarz preconditioner.  Either all or no processors in the
935     PC communicator must call this routine, with the same index sets.
936 
937     Collective on pc
938 
939     Input Parameters:
940 +   pc - the preconditioner context
941 .   N  - the number of subdomains for all processors
942 .   is - the index sets that define the subdomains for all processors
943          (or NULL to ask PETSc to determine the subdomains)
944 -   is_local - the index sets that define the local part of the subdomains for this processor
945          (or NULL to not provide this information)
946 
947     Options Database Key:
948     To set the total number of subdomain blocks rather than specify the
949     index sets, use the option
950 .    -pc_asm_blocks <blks> - Sets total blocks
951 
952     Notes:
953     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
954 
955     By default the ASM preconditioner uses 1 block per processor.
956 
957     These index sets cannot be destroyed until after completion of the
958     linear solves for which the ASM preconditioner is being used.
959 
960     Use PCASMSetLocalSubdomains() to set local subdomains.
961 
962     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
963 
964     Level: advanced
965 
966 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
967           PCASMCreateSubdomains2D()
968 @*/
969 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
970 {
971   PetscErrorCode ierr;
972 
973   PetscFunctionBegin;
974   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
975   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
976   PetscFunctionReturn(0);
977 }
978 
979 /*@
980     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
981     additive Schwarz preconditioner.  Either all or no processors in the
982     PC communicator must call this routine.
983 
984     Logically Collective on pc
985 
986     Input Parameters:
987 +   pc  - the preconditioner context
988 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
989 
990     Options Database Key:
991 .   -pc_asm_overlap <ovl> - Sets overlap
992 
993     Notes:
994     By default the ASM preconditioner uses 1 block per processor.  To use
995     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
996     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
997 
998     The overlap defaults to 1, so if one desires that no additional
999     overlap be computed beyond what may have been set with a call to
1000     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1001     must be set to be 0.  In particular, if one does not explicitly set
1002     the subdomains an application code, then all overlap would be computed
1003     internally by PETSc, and using an overlap of 0 would result in an ASM
1004     variant that is equivalent to the block Jacobi preconditioner.
1005 
1006     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1007     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1008 
1009     Note that one can define initial index sets with any overlap via
1010     PCASMSetLocalSubdomains(); the routine
1011     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1012     if desired.
1013 
1014     Level: intermediate
1015 
1016 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1017           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1018 @*/
1019 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1020 {
1021   PetscErrorCode ierr;
1022 
1023   PetscFunctionBegin;
1024   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1025   PetscValidLogicalCollectiveInt(pc,ovl,2);
1026   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1027   PetscFunctionReturn(0);
1028 }
1029 
1030 /*@
1031     PCASMSetType - Sets the type of restriction and interpolation used
1032     for local problems in the additive Schwarz method.
1033 
1034     Logically Collective on pc
1035 
1036     Input Parameters:
1037 +   pc  - the preconditioner context
1038 -   type - variant of ASM, one of
1039 .vb
1040       PC_ASM_BASIC       - full interpolation and restriction
1041       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1042       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1043       PC_ASM_NONE        - local processor restriction and interpolation
1044 .ve
1045 
1046     Options Database Key:
1047 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1048 
1049     Notes:
1050     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1051     to limit the local processor interpolation
1052 
1053     Level: intermediate
1054 
1055 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1056           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1057 @*/
1058 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1059 {
1060   PetscErrorCode ierr;
1061 
1062   PetscFunctionBegin;
1063   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1064   PetscValidLogicalCollectiveEnum(pc,type,2);
1065   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1066   PetscFunctionReturn(0);
1067 }
1068 
1069 /*@
1070     PCASMGetType - Gets the type of restriction and interpolation used
1071     for local problems in the additive Schwarz method.
1072 
1073     Logically Collective on pc
1074 
1075     Input Parameter:
1076 .   pc  - the preconditioner context
1077 
1078     Output Parameter:
1079 .   type - variant of ASM, one of
1080 
1081 .vb
1082       PC_ASM_BASIC       - full interpolation and restriction
1083       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1084       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1085       PC_ASM_NONE        - local processor restriction and interpolation
1086 .ve
1087 
1088     Options Database Key:
1089 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1090 
1091     Level: intermediate
1092 
1093 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1094           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1095 @*/
1096 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1097 {
1098   PetscErrorCode ierr;
1099 
1100   PetscFunctionBegin;
1101   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1102   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 /*@
1107   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1108 
1109   Logically Collective on pc
1110 
1111   Input Parameters:
1112 + pc  - the preconditioner context
1113 - type - type of composition, one of
1114 .vb
1115   PC_COMPOSITE_ADDITIVE       - local additive combination
1116   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1117 .ve
1118 
1119   Options Database Key:
1120 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1121 
1122   Level: intermediate
1123 
1124 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1125 @*/
1126 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1127 {
1128   PetscErrorCode ierr;
1129 
1130   PetscFunctionBegin;
1131   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1132   PetscValidLogicalCollectiveEnum(pc, type, 2);
1133   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 /*@
1138   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1139 
1140   Logically Collective on pc
1141 
1142   Input Parameter:
1143 . pc  - the preconditioner context
1144 
1145   Output Parameter:
1146 . type - type of composition, one of
1147 .vb
1148   PC_COMPOSITE_ADDITIVE       - local additive combination
1149   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1150 .ve
1151 
1152   Options Database Key:
1153 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1154 
1155   Level: intermediate
1156 
1157 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1158 @*/
1159 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1160 {
1161   PetscErrorCode ierr;
1162 
1163   PetscFunctionBegin;
1164   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1165   PetscValidPointer(type, 2);
1166   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1167   PetscFunctionReturn(0);
1168 }
1169 
1170 /*@
1171     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1172 
1173     Logically Collective on pc
1174 
1175     Input Parameters:
1176 +   pc  - the preconditioner context
1177 -   doSort - sort the subdomain indices
1178 
1179     Level: intermediate
1180 
1181 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1182           PCASMCreateSubdomains2D()
1183 @*/
1184 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1185 {
1186   PetscErrorCode ierr;
1187 
1188   PetscFunctionBegin;
1189   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1190   PetscValidLogicalCollectiveBool(pc,doSort,2);
1191   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1192   PetscFunctionReturn(0);
1193 }
1194 
1195 /*@C
1196    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1197    this processor.
1198 
1199    Collective on pc iff first_local is requested
1200 
1201    Input Parameter:
1202 .  pc - the preconditioner context
1203 
1204    Output Parameters:
1205 +  n_local - the number of blocks on this processor or NULL
1206 .  first_local - the global number of the first block on this processor or NULL,
1207                  all processors must request or all must pass NULL
1208 -  ksp - the array of KSP contexts
1209 
1210    Note:
1211    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1212 
1213    You must call KSPSetUp() before calling PCASMGetSubKSP().
1214 
1215    Fortran note:
1216    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1217 
1218    Level: advanced
1219 
1220 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1221           PCASMCreateSubdomains2D(),
1222 @*/
1223 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1224 {
1225   PetscErrorCode ierr;
1226 
1227   PetscFunctionBegin;
1228   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1229   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1230   PetscFunctionReturn(0);
1231 }
1232 
1233 /* -------------------------------------------------------------------------------------*/
1234 /*MC
1235    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1236            its own KSP object.
1237 
1238    Options Database Keys:
1239 +  -pc_asm_blocks <blks> - Sets total blocks
1240 .  -pc_asm_overlap <ovl> - Sets overlap
1241 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1242 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1243 
1244      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1245       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1246       -pc_asm_type basic to use the standard ASM.
1247 
1248    Notes:
1249     Each processor can have one or more blocks, but a block cannot be shared by more
1250      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1251 
1252      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1253         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1254 
1255      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1256          and set the options directly on the resulting KSP object (you can access its PC
1257          with KSPGetPC())
1258 
1259    Level: beginner
1260 
1261     References:
1262 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1263      Courant Institute, New York University Technical report
1264 -   2. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1265     Cambridge University Press.
1266 
1267 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1268            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1269            PCASMSetTotalSubdomains(), PCSetModifySubMatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1270 
1271 M*/
1272 
1273 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1274 {
1275   PetscErrorCode ierr;
1276   PC_ASM         *osm;
1277 
1278   PetscFunctionBegin;
1279   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1280 
1281   osm->n                 = PETSC_DECIDE;
1282   osm->n_local           = 0;
1283   osm->n_local_true      = PETSC_DECIDE;
1284   osm->overlap           = 1;
1285   osm->ksp               = 0;
1286   osm->restriction       = 0;
1287   osm->lprolongation     = 0;
1288   osm->lrestriction      = 0;
1289   osm->x                 = 0;
1290   osm->y                 = 0;
1291   osm->is                = 0;
1292   osm->is_local          = 0;
1293   osm->mat               = 0;
1294   osm->pmat              = 0;
1295   osm->type              = PC_ASM_RESTRICT;
1296   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1297   osm->same_local_solves = PETSC_TRUE;
1298   osm->sort_indices      = PETSC_TRUE;
1299   osm->dm_subdomains     = PETSC_FALSE;
1300   osm->sub_mat_type      = NULL;
1301 
1302   pc->data                 = (void*)osm;
1303   pc->ops->apply           = PCApply_ASM;
1304   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1305   pc->ops->setup           = PCSetUp_ASM;
1306   pc->ops->reset           = PCReset_ASM;
1307   pc->ops->destroy         = PCDestroy_ASM;
1308   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1309   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1310   pc->ops->view            = PCView_ASM;
1311   pc->ops->applyrichardson = 0;
1312 
1313   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1314   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1315   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1316   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1317   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1318   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1319   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1320   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1321   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1322   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1323   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1324   PetscFunctionReturn(0);
1325 }
1326 
1327 /*@C
1328    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1329    preconditioner for a any problem on a general grid.
1330 
1331    Collective
1332 
1333    Input Parameters:
1334 +  A - The global matrix operator
1335 -  n - the number of local blocks
1336 
1337    Output Parameters:
1338 .  outis - the array of index sets defining the subdomains
1339 
1340    Level: advanced
1341 
1342    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1343     from these if you use PCASMSetLocalSubdomains()
1344 
1345     In the Fortran version you must provide the array outis[] already allocated of length n.
1346 
1347 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1348 @*/
1349 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1350 {
1351   MatPartitioning mpart;
1352   const char      *prefix;
1353   PetscInt        i,j,rstart,rend,bs;
1354   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1355   Mat             Ad     = NULL, adj;
1356   IS              ispart,isnumb,*is;
1357   PetscErrorCode  ierr;
1358 
1359   PetscFunctionBegin;
1360   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1361   PetscValidPointer(outis,3);
1362   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1363 
1364   /* Get prefix, row distribution, and block size */
1365   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1366   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1367   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1368   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1369 
1370   /* Get diagonal block from matrix if possible */
1371   ierr = MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
1372   if (hasop) {
1373     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1374   }
1375   if (Ad) {
1376     ierr = PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1377     if (!isbaij) {ierr = PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1378   }
1379   if (Ad && n > 1) {
1380     PetscBool match,done;
1381     /* Try to setup a good matrix partitioning if available */
1382     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1383     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1384     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1385     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1386     if (!match) {
1387       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1388     }
1389     if (!match) { /* assume a "good" partitioner is available */
1390       PetscInt       na;
1391       const PetscInt *ia,*ja;
1392       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1393       if (done) {
1394         /* Build adjacency matrix by hand. Unfortunately a call to
1395            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1396            remove the block-aij structure and we cannot expect
1397            MatPartitioning to split vertices as we need */
1398         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1399         const PetscInt *row;
1400         nnz = 0;
1401         for (i=0; i<na; i++) { /* count number of nonzeros */
1402           len = ia[i+1] - ia[i];
1403           row = ja + ia[i];
1404           for (j=0; j<len; j++) {
1405             if (row[j] == i) { /* don't count diagonal */
1406               len--; break;
1407             }
1408           }
1409           nnz += len;
1410         }
1411         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1412         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1413         nnz    = 0;
1414         iia[0] = 0;
1415         for (i=0; i<na; i++) { /* fill adjacency */
1416           cnt = 0;
1417           len = ia[i+1] - ia[i];
1418           row = ja + ia[i];
1419           for (j=0; j<len; j++) {
1420             if (row[j] != i) { /* if not diagonal */
1421               jja[nnz+cnt++] = row[j];
1422             }
1423           }
1424           nnz     += cnt;
1425           iia[i+1] = nnz;
1426         }
1427         /* Partitioning of the adjacency matrix */
1428         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1429         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1430         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1431         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1432         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1433         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1434         foundpart = PETSC_TRUE;
1435       }
1436       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1437     }
1438     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1439   }
1440 
1441   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1442   *outis = is;
1443 
1444   if (!foundpart) {
1445 
1446     /* Partitioning by contiguous chunks of rows */
1447 
1448     PetscInt mbs   = (rend-rstart)/bs;
1449     PetscInt start = rstart;
1450     for (i=0; i<n; i++) {
1451       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1452       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1453       start += count;
1454     }
1455 
1456   } else {
1457 
1458     /* Partitioning by adjacency of diagonal block  */
1459 
1460     const PetscInt *numbering;
1461     PetscInt       *count,nidx,*indices,*newidx,start=0;
1462     /* Get node count in each partition */
1463     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1464     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1465     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1466       for (i=0; i<n; i++) count[i] *= bs;
1467     }
1468     /* Build indices from node numbering */
1469     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1470     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1471     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1472     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1473     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1474     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1475     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1476       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1477       for (i=0; i<nidx; i++) {
1478         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1479       }
1480       ierr    = PetscFree(indices);CHKERRQ(ierr);
1481       nidx   *= bs;
1482       indices = newidx;
1483     }
1484     /* Shift to get global indices */
1485     for (i=0; i<nidx; i++) indices[i] += rstart;
1486 
1487     /* Build the index sets for each block */
1488     for (i=0; i<n; i++) {
1489       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1490       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1491       start += count[i];
1492     }
1493 
1494     ierr = PetscFree(count);CHKERRQ(ierr);
1495     ierr = PetscFree(indices);CHKERRQ(ierr);
1496     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1497     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1498 
1499   }
1500   PetscFunctionReturn(0);
1501 }
1502 
1503 /*@C
1504    PCASMDestroySubdomains - Destroys the index sets created with
1505    PCASMCreateSubdomains(). Should be called after setting subdomains
1506    with PCASMSetLocalSubdomains().
1507 
1508    Collective
1509 
1510    Input Parameters:
1511 +  n - the number of index sets
1512 .  is - the array of index sets
1513 -  is_local - the array of local index sets, can be NULL
1514 
1515    Level: advanced
1516 
1517 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1518 @*/
1519 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1520 {
1521   PetscInt       i;
1522   PetscErrorCode ierr;
1523 
1524   PetscFunctionBegin;
1525   if (n <= 0) PetscFunctionReturn(0);
1526   if (is) {
1527     PetscValidPointer(is,2);
1528     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1529     ierr = PetscFree(is);CHKERRQ(ierr);
1530   }
1531   if (is_local) {
1532     PetscValidPointer(is_local,3);
1533     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1534     ierr = PetscFree(is_local);CHKERRQ(ierr);
1535   }
1536   PetscFunctionReturn(0);
1537 }
1538 
1539 /*@
1540    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1541    preconditioner for a two-dimensional problem on a regular grid.
1542 
1543    Not Collective
1544 
1545    Input Parameters:
1546 +  m, n - the number of mesh points in the x and y directions
1547 .  M, N - the number of subdomains in the x and y directions
1548 .  dof - degrees of freedom per node
1549 -  overlap - overlap in mesh lines
1550 
1551    Output Parameters:
1552 +  Nsub - the number of subdomains created
1553 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1554 -  is_local - array of index sets defining non-overlapping subdomains
1555 
1556    Note:
1557    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1558    preconditioners.  More general related routines are
1559    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1560 
1561    Level: advanced
1562 
1563 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1564           PCASMSetOverlap()
1565 @*/
1566 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1567 {
1568   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1569   PetscErrorCode ierr;
1570   PetscInt       nidx,*idx,loc,ii,jj,count;
1571 
1572   PetscFunctionBegin;
1573   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1574 
1575   *Nsub     = N*M;
1576   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1577   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1578   ystart    = 0;
1579   loc_outer = 0;
1580   for (i=0; i<N; i++) {
1581     height = n/N + ((n % N) > i); /* height of subdomain */
1582     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1583     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1584     yright = ystart + height + overlap; if (yright > n) yright = n;
1585     xstart = 0;
1586     for (j=0; j<M; j++) {
1587       width = m/M + ((m % M) > j); /* width of subdomain */
1588       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1589       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1590       xright = xstart + width + overlap; if (xright > m) xright = m;
1591       nidx   = (xright - xleft)*(yright - yleft);
1592       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1593       loc    = 0;
1594       for (ii=yleft; ii<yright; ii++) {
1595         count = m*ii + xleft;
1596         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1597       }
1598       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1599       if (overlap == 0) {
1600         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1601 
1602         (*is_local)[loc_outer] = (*is)[loc_outer];
1603       } else {
1604         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1605           for (jj=xstart; jj<xstart+width; jj++) {
1606             idx[loc++] = m*ii + jj;
1607           }
1608         }
1609         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1610       }
1611       ierr    = PetscFree(idx);CHKERRQ(ierr);
1612       xstart += width;
1613       loc_outer++;
1614     }
1615     ystart += height;
1616   }
1617   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1618   PetscFunctionReturn(0);
1619 }
1620 
1621 /*@C
1622     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1623     only) for the additive Schwarz preconditioner.
1624 
1625     Not Collective
1626 
1627     Input Parameter:
1628 .   pc - the preconditioner context
1629 
1630     Output Parameters:
1631 +   n - the number of subdomains for this processor (default value = 1)
1632 .   is - the index sets that define the subdomains for this processor
1633 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1634 
1635 
1636     Notes:
1637     The IS numbering is in the parallel, global numbering of the vector.
1638 
1639     Level: advanced
1640 
1641 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1642           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1643 @*/
1644 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1645 {
1646   PC_ASM         *osm = (PC_ASM*)pc->data;
1647   PetscErrorCode ierr;
1648   PetscBool      match;
1649 
1650   PetscFunctionBegin;
1651   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1652   PetscValidIntPointer(n,2);
1653   if (is) PetscValidPointer(is,3);
1654   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1655   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1656   if (n) *n = osm->n_local_true;
1657   if (is) *is = osm->is;
1658   if (is_local) *is_local = osm->is_local;
1659   PetscFunctionReturn(0);
1660 }
1661 
1662 /*@C
1663     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1664     only) for the additive Schwarz preconditioner.
1665 
1666     Not Collective
1667 
1668     Input Parameter:
1669 .   pc - the preconditioner context
1670 
1671     Output Parameters:
1672 +   n - the number of matrices for this processor (default value = 1)
1673 -   mat - the matrices
1674 
1675     Level: advanced
1676 
1677     Notes:
1678     Call after PCSetUp() (or KSPSetUp()) but before PCApply() and before PCSetUpOnBlocks())
1679 
1680            Usually one would use PCSetModifySubMatrices() to change the submatrices in building the preconditioner.
1681 
1682 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1683           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubMatrices()
1684 @*/
1685 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1686 {
1687   PC_ASM         *osm;
1688   PetscErrorCode ierr;
1689   PetscBool      match;
1690 
1691   PetscFunctionBegin;
1692   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1693   PetscValidIntPointer(n,2);
1694   if (mat) PetscValidPointer(mat,3);
1695   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUp() or PCSetUp().");
1696   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1697   if (!match) {
1698     if (n) *n = 0;
1699     if (mat) *mat = NULL;
1700   } else {
1701     osm = (PC_ASM*)pc->data;
1702     if (n) *n = osm->n_local_true;
1703     if (mat) *mat = osm->pmat;
1704   }
1705   PetscFunctionReturn(0);
1706 }
1707 
1708 /*@
1709     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1710 
1711     Logically Collective
1712 
1713     Input Parameter:
1714 +   pc  - the preconditioner
1715 -   flg - boolean indicating whether to use subdomains defined by the DM
1716 
1717     Options Database Key:
1718 .   -pc_asm_dm_subdomains
1719 
1720     Level: intermediate
1721 
1722     Notes:
1723     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1724     so setting either of the first two effectively turns the latter off.
1725 
1726 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1727           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1728 @*/
1729 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1730 {
1731   PC_ASM         *osm = (PC_ASM*)pc->data;
1732   PetscErrorCode ierr;
1733   PetscBool      match;
1734 
1735   PetscFunctionBegin;
1736   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1737   PetscValidLogicalCollectiveBool(pc,flg,2);
1738   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1739   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1740   if (match) {
1741     osm->dm_subdomains = flg;
1742   }
1743   PetscFunctionReturn(0);
1744 }
1745 
1746 /*@
1747     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1748     Not Collective
1749 
1750     Input Parameter:
1751 .   pc  - the preconditioner
1752 
1753     Output Parameter:
1754 .   flg - boolean indicating whether to use subdomains defined by the DM
1755 
1756     Level: intermediate
1757 
1758 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1759           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1760 @*/
1761 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1762 {
1763   PC_ASM         *osm = (PC_ASM*)pc->data;
1764   PetscErrorCode ierr;
1765   PetscBool      match;
1766 
1767   PetscFunctionBegin;
1768   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1769   PetscValidBoolPointer(flg,2);
1770   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1771   if (match) {
1772     if (flg) *flg = osm->dm_subdomains;
1773   }
1774   PetscFunctionReturn(0);
1775 }
1776 
1777 /*@
1778      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1779 
1780    Not Collective
1781 
1782    Input Parameter:
1783 .  pc - the PC
1784 
1785    Output Parameter:
1786 .  -pc_asm_sub_mat_type - name of matrix type
1787 
1788    Level: advanced
1789 
1790 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1791 @*/
1792 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1793   PetscErrorCode ierr;
1794 
1795   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1796   PetscFunctionReturn(0);
1797 }
1798 
1799 /*@
1800      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1801 
1802    Collective on Mat
1803 
1804    Input Parameters:
1805 +  pc             - the PC object
1806 -  sub_mat_type   - matrix type
1807 
1808    Options Database Key:
1809 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1810 
1811    Notes:
1812    See "${PETSC_DIR}/include/petscmat.h" for available types
1813 
1814   Level: advanced
1815 
1816 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1817 @*/
1818 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1819 {
1820   PetscErrorCode ierr;
1821 
1822   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1823   PetscFunctionReturn(0);
1824 }
1825