xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision bfcb38ea38335faa6e7f8d97f6bc6ce9aa2a1dd1)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
13 #include <petscdm.h>
14 
15 typedef struct {
16   PetscInt   n, n_local, n_local_true;
17   PetscInt   overlap;             /* overlap requested by user */
18   KSP        *ksp;                /* linear solvers for each block */
19   VecScatter restriction;         /* mapping from global to overlapping (process) subdomain*/
20   VecScatter *lrestriction;       /* mapping from subregion to overlapping (process) subdomain */
21   VecScatter *lprolongation;      /* mapping from non-overlapping subregion to overlapping (process) subdomain; used for restrict additive version of algorithms */
22   Vec        lx, ly;              /* work vectors */
23   Vec        *x,*y;               /* work vectors */
24   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
25   IS         *is;                 /* index set that defines each overlapping subdomain */
26   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
27   Mat        *mat,*pmat;          /* mat is not currently used */
28   PCASMType  type;                /* use reduced interpolation, restriction or both */
29   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
30   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
31   PetscBool  sort_indices;        /* flag to sort subdomain indices */
32   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
33   PCCompositeType loctype;        /* the type of composition for local solves */
34   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
35   /* For multiplicative solve */
36   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
40 {
41   PC_ASM         *osm = (PC_ASM*)pc->data;
42   PetscErrorCode ierr;
43   PetscMPIInt    rank;
44   PetscInt       i,bsz;
45   PetscBool      iascii,isstring;
46   PetscViewer    sviewer;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
51   if (iascii) {
52     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
53     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
54     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
55     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
56     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
57     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
58     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
59     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
60     if (osm->same_local_solves) {
61       if (osm->ksp) {
62         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
63         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
64         if (!rank) {
65           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
66           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
67           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
68         }
69         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
70       }
71     } else {
72       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
73       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
74       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
75       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
79       for (i=0; i<osm->n_local_true; i++) {
80         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
81         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
82         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
83         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
84       }
85       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
86       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
87       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
89     }
90   } else if (isstring) {
91     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
92     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
93     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
94     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
95   }
96   PetscFunctionReturn(0);
97 }
98 
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer, sviewer;
105   char           *s;
106   PetscInt       i,j,nidx;
107   const PetscInt *idx;
108   PetscMPIInt    rank, size;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
113   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
114   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
115   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
116   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
117   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
118   for (i=0; i<osm->n_local; i++) {
119     if (i < osm->n_local_true) {
120       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
121       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
122       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
123 #define len  16*(nidx+1)+512
124       ierr = PetscMalloc1(len,&s);CHKERRQ(ierr);
125       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
126 #undef len
127       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
128       for (j=0; j<nidx; j++) {
129         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
130       }
131       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
132       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
133       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
136       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
137       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
138       ierr = PetscFree(s);CHKERRQ(ierr);
139       if (osm->is_local) {
140         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
141 #define len  16*(nidx+1)+512
142         ierr = PetscMalloc1(len, &s);CHKERRQ(ierr);
143         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);CHKERRQ(ierr);
144 #undef len
145         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
146         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
147         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
148         for (j=0; j<nidx; j++) {
149           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
150         }
151         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
152         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
153         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
155         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
156         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
157         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
158         ierr = PetscFree(s);CHKERRQ(ierr);
159       }
160     } else {
161       /* Participate in collective viewer calls. */
162       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
163       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
164       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
165       /* Assume either all ranks have is_local or none do. */
166       if (osm->is_local) {
167         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
168         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
169         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
170       }
171     }
172   }
173   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
174   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
175   PetscFunctionReturn(0);
176 }
177 
178 static PetscErrorCode PCSetUp_ASM(PC pc)
179 {
180   PC_ASM         *osm = (PC_ASM*)pc->data;
181   PetscErrorCode ierr;
182   PetscBool      symset,flg;
183   PetscInt       i,m,m_local;
184   MatReuse       scall = MAT_REUSE_MATRIX;
185   IS             isl;
186   KSP            ksp;
187   PC             subpc;
188   const char     *prefix,*pprefix;
189   Vec            vec;
190   DM             *domain_dm = NULL;
191 
192   PetscFunctionBegin;
193   if (!pc->setupcalled) {
194     PetscInt m;
195 
196     if (!osm->type_set) {
197       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
198       if (symset && flg) osm->type = PC_ASM_BASIC;
199     }
200 
201     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
202     if (osm->n_local_true == PETSC_DECIDE) {
203       /* no subdomains given */
204       /* try pc->dm first, if allowed */
205       if (osm->dm_subdomains && pc->dm) {
206         PetscInt  num_domains, d;
207         char      **domain_names;
208         IS        *inner_domain_is, *outer_domain_is;
209         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
210         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
211                               A future improvement of this code might allow one to use
212                               DM-defined subdomains and also increase the overlap,
213                               but that is not currently supported */
214         if (num_domains) {
215           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
216         }
217         for (d = 0; d < num_domains; ++d) {
218           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
219           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
220           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
221         }
222         ierr = PetscFree(domain_names);CHKERRQ(ierr);
223         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
224         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
225       }
226       if (osm->n_local_true == PETSC_DECIDE) {
227         /* still no subdomains; use one subdomain per processor */
228         osm->n_local_true = 1;
229       }
230     }
231     { /* determine the global and max number of subdomains */
232       struct {PetscInt max,sum;} inwork,outwork;
233       PetscMPIInt size;
234 
235       inwork.max   = osm->n_local_true;
236       inwork.sum   = osm->n_local_true;
237       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
238       osm->n_local = outwork.max;
239       osm->n       = outwork.sum;
240 
241       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
242       if (outwork.max == 1 && outwork.sum == size) {
243         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
244         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
245       }
246     }
247     if (!osm->is) { /* create the index sets */
248       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
249     }
250     if (osm->n_local_true > 1 && !osm->is_local) {
251       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
252       for (i=0; i<osm->n_local_true; i++) {
253         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
254           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
255           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
256         } else {
257           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
258           osm->is_local[i] = osm->is[i];
259         }
260       }
261     }
262     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
263     flg  = PETSC_FALSE;
264     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
265     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
266 
267     if (osm->overlap > 0) {
268       /* Extend the "overlapping" regions by a number of steps */
269       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
270     }
271     if (osm->sort_indices) {
272       for (i=0; i<osm->n_local_true; i++) {
273         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
274         if (osm->is_local) {
275           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
276         }
277       }
278     }
279 
280     if (!osm->ksp) {
281       /* Create the local solvers */
282       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
283       if (domain_dm) {
284         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
285       }
286       for (i=0; i<osm->n_local_true; i++) {
287         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
288         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
289         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
290         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
291         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
292         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
293         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
294         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
295         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
296         if (domain_dm) {
297           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
298           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
299           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
300         }
301         osm->ksp[i] = ksp;
302       }
303       if (domain_dm) {
304         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
305       }
306     }
307 
308     ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
309     ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
310     ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
311     ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
312     ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
313 
314     scall = MAT_INITIAL_MATRIX;
315   } else {
316     /*
317        Destroy the blocks from the previous iteration
318     */
319     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
320       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
321       scall = MAT_INITIAL_MATRIX;
322     }
323   }
324 
325   /*
326      Extract out the submatrices
327   */
328   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
329   if (scall == MAT_INITIAL_MATRIX) {
330     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
331     for (i=0; i<osm->n_local_true; i++) {
332       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
333       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
334     }
335   }
336 
337   /* Convert the types of the submatrices (if needbe) */
338   if (osm->sub_mat_type) {
339     for (i=0; i<osm->n_local_true; i++) {
340       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
341     }
342   }
343 
344   if(!pc->setupcalled){
345     /* Create the local work vectors (from the local matrices) and scatter contexts */
346     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
347 
348     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
349     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
350       ierr = PetscMalloc1(osm->n_local_true,&osm->lprolongation);CHKERRQ(ierr);
351     }
352     ierr = PetscMalloc1(osm->n_local_true,&osm->lrestriction);CHKERRQ(ierr);
353     ierr = PetscMalloc1(osm->n_local_true,&osm->x);CHKERRQ(ierr);
354     ierr = PetscMalloc1(osm->n_local_true,&osm->y);CHKERRQ(ierr);
355 
356     ierr = ISGetLocalSize(osm->lis,&m);CHKERRQ(ierr);
357     ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
358     ierr = VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);CHKERRQ(ierr);
359     ierr = ISDestroy(&isl);CHKERRQ(ierr);
360 
361 
362     for (i=0; i<osm->n_local_true; ++i) {
363       ISLocalToGlobalMapping ltog;
364       IS                     isll;
365       const PetscInt         *idx_is;
366       PetscInt               *idx_lis,nout;
367 
368       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
369       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
370       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
371 
372       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
373       ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
374       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
375       ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
376       ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
377       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
378       if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
379       ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
380       ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
381       ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
382       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
383       ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);CHKERRQ(ierr);
384       ierr = ISDestroy(&isll);CHKERRQ(ierr);
385       ierr = ISDestroy(&isl);CHKERRQ(ierr);
386       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overalapping is_local[i] entries */
387         ISLocalToGlobalMapping ltog;
388         IS                     isll,isll_local;
389         const PetscInt         *idx_local;
390         PetscInt               *idx1, *idx2, nout;
391 
392         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
393         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
394 
395         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
396         ierr = PetscMalloc1(m_local,&idx1);CHKERRQ(ierr);
397         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);CHKERRQ(ierr);
398         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
399         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
400         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
401 
402         ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
403         ierr = PetscMalloc1(m_local,&idx2);CHKERRQ(ierr);
404         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);CHKERRQ(ierr);
405         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
406         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
407         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);CHKERRQ(ierr);
408 
409         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
410         ierr = VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);CHKERRQ(ierr);
411 
412         ierr = ISDestroy(&isll);CHKERRQ(ierr);
413         ierr = ISDestroy(&isll_local);CHKERRQ(ierr);
414       }
415     }
416     ierr = VecDestroy(&vec);CHKERRQ(ierr);
417   }
418 
419   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
420     IS      *cis;
421     PetscInt c;
422 
423     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
424     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
425     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
426     ierr = PetscFree(cis);CHKERRQ(ierr);
427   }
428 
429   /* Return control to the user so that the submatrices can be modified (e.g., to apply
430      different boundary conditions for the submatrices than for the global problem) */
431   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
432 
433   /*
434      Loop over subdomains putting them into local ksp
435   */
436   for (i=0; i<osm->n_local_true; i++) {
437     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
438     if (!pc->setupcalled) {
439       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
440     }
441   }
442   PetscFunctionReturn(0);
443 }
444 
445 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
446 {
447   PC_ASM             *osm = (PC_ASM*)pc->data;
448   PetscErrorCode     ierr;
449   PetscInt           i;
450   KSPConvergedReason reason;
451 
452   PetscFunctionBegin;
453   for (i=0; i<osm->n_local_true; i++) {
454     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
455     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
456     if (reason == KSP_DIVERGED_PC_FAILED) {
457       pc->failedreason = PC_SUBPC_ERROR;
458     }
459   }
460   PetscFunctionReturn(0);
461 }
462 
463 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
464 {
465   PC_ASM         *osm = (PC_ASM*)pc->data;
466   PetscErrorCode ierr;
467   PetscInt       i,n_local_true = osm->n_local_true;
468   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
469 
470   PetscFunctionBegin;
471   /*
472      Support for limiting the restriction or interpolation to only local
473      subdomain values (leaving the other values 0).
474   */
475   if (!(osm->type & PC_ASM_RESTRICT)) {
476     forward = SCATTER_FORWARD_LOCAL;
477     /* have to zero the work RHS since scatter may leave some slots empty */
478     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
479   }
480   if (!(osm->type & PC_ASM_INTERPOLATE)) {
481     reverse = SCATTER_REVERSE_LOCAL;
482   }
483 
484   if(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE){
485     /* zero the global and the local solutions */
486     ierr = VecZeroEntries(y);CHKERRQ(ierr);
487     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
488 
489     /* Copy the global RHS to local RHS including the ghost nodes */
490     ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
491     ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
492 
493     /* Restrict local RHS to the overlapping 0-block RHS */
494     ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
495     ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
496 
497     /* do the local solves */
498     for (i = 0; i < n_local_true; ++i) {
499 
500       /* solve the overlapping i-block */
501       ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
502       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
503       ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr);
504       ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
505 
506       if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution (only for restrictive additive) */
507         ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
508         ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
509       }
510       else{ /* interpolate the overalapping i-block solution to the local solution */
511         ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
512         ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
513       }
514 
515       if (i < n_local_true-1) {
516         /* Restrict local RHS to the overlapping (i+1)-block RHS */
517         ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
518         ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
519 
520         if ( osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
521           /* udpdate the overlapping (i+1)-block RHS using the current local solution */
522           ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
523           ierr = VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]); CHKERRQ(ierr);
524         }
525       }
526     }
527     /* Add the local solution to the global solution including the ghost nodes */
528     ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
529     ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
530   }else{
531     SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
532   }
533   PetscFunctionReturn(0);
534 }
535 
536 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
537 {
538   PC_ASM         *osm = (PC_ASM*)pc->data;
539   PetscErrorCode ierr;
540   PetscInt       i,n_local_true = osm->n_local_true;
541   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
542 
543   PetscFunctionBegin;
544   /*
545      Support for limiting the restriction or interpolation to only local
546      subdomain values (leaving the other values 0).
547 
548      Note: these are reversed from the PCApply_ASM() because we are applying the
549      transpose of the three terms
550   */
551 
552   if (!(osm->type & PC_ASM_INTERPOLATE)) {
553     forward = SCATTER_FORWARD_LOCAL;
554     /* have to zero the work RHS since scatter may leave some slots empty */
555     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
556   }
557   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
558 
559   /* zero the global and the local solutions */
560   ierr = VecZeroEntries(y);CHKERRQ(ierr);
561   ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
562 
563   /* Copy the global RHS to local RHS including the ghost nodes */
564   ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
565   ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
566 
567   /* Restrict local RHS to the overlapping 0-block RHS */
568   ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
569   ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
570 
571   /* do the local solves */
572   for (i = 0; i < n_local_true; ++i) {
573 
574     /* solve the overlapping i-block */
575     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
576     ierr = KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
577     ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr);
578     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
579 
580     if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution */
581      ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
582      ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
583     }
584     else{ /* interpolate the overalapping i-block solution to the local solution */
585       ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
586       ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
587     }
588 
589     if (i < n_local_true-1) {
590       /* Restrict local RHS to the overlapping (i+1)-block RHS */
591       ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
592       ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
593     }
594   }
595   /* Add the local solution to the global solution including the ghost nodes */
596   ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
597   ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
598 
599   PetscFunctionReturn(0);
600 
601 }
602 
603 static PetscErrorCode PCReset_ASM(PC pc)
604 {
605   PC_ASM         *osm = (PC_ASM*)pc->data;
606   PetscErrorCode ierr;
607   PetscInt       i;
608 
609   PetscFunctionBegin;
610   if (osm->ksp) {
611     for (i=0; i<osm->n_local_true; i++) {
612       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
613     }
614   }
615   if (osm->pmat) {
616     if (osm->n_local_true > 0) {
617       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
618     }
619   }
620   if (osm->lrestriction) {
621     ierr = VecScatterDestroy(&osm->restriction);CHKERRQ(ierr);
622     for (i=0; i<osm->n_local_true; i++) {
623       ierr = VecScatterDestroy(&osm->lrestriction[i]);CHKERRQ(ierr);
624       if (osm->lprolongation) {ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);}
625       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
626       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
627     }
628     ierr = PetscFree(osm->lrestriction);CHKERRQ(ierr);
629     if (osm->lprolongation) {ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);}
630     ierr = PetscFree(osm->x);CHKERRQ(ierr);
631     ierr = PetscFree(osm->y);CHKERRQ(ierr);
632 
633   }
634   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
635   ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
636   ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
637   ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
638   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
639     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
640   }
641 
642   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
643 
644   osm->is       = 0;
645   osm->is_local = 0;
646   PetscFunctionReturn(0);
647 }
648 
649 static PetscErrorCode PCDestroy_ASM(PC pc)
650 {
651   PC_ASM         *osm = (PC_ASM*)pc->data;
652   PetscErrorCode ierr;
653   PetscInt       i;
654 
655   PetscFunctionBegin;
656   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
657   if (osm->ksp) {
658     for (i=0; i<osm->n_local_true; i++) {
659       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
660     }
661     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
662   }
663   ierr = PetscFree(pc->data);CHKERRQ(ierr);
664   PetscFunctionReturn(0);
665 }
666 
667 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
668 {
669   PC_ASM         *osm = (PC_ASM*)pc->data;
670   PetscErrorCode ierr;
671   PetscInt       blocks,ovl;
672   PetscBool      symset,flg;
673   PCASMType      asmtype;
674   PCCompositeType loctype;
675   char           sub_mat_type[256];
676 
677   PetscFunctionBegin;
678   /* set the type to symmetric if matrix is symmetric */
679   if (!osm->type_set && pc->pmat) {
680     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
681     if (symset && flg) osm->type = PC_ASM_BASIC;
682   }
683   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
684   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
685   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
686   if (flg) {
687     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
688     osm->dm_subdomains = PETSC_FALSE;
689   }
690   ierr = PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);CHKERRQ(ierr);
691   if (flg) {
692     ierr = PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
693     osm->dm_subdomains = PETSC_FALSE;
694   }
695   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
696   if (flg) {
697     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
698     osm->dm_subdomains = PETSC_FALSE;
699   }
700   flg  = PETSC_FALSE;
701   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
702   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
703   flg  = PETSC_FALSE;
704   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
705   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
706   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
707   if(flg){
708     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
709   }
710   ierr = PetscOptionsTail();CHKERRQ(ierr);
711   PetscFunctionReturn(0);
712 }
713 
714 /*------------------------------------------------------------------------------------*/
715 
716 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
717 {
718   PC_ASM         *osm = (PC_ASM*)pc->data;
719   PetscErrorCode ierr;
720   PetscInt       i;
721 
722   PetscFunctionBegin;
723   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
724   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
725 
726   if (!pc->setupcalled) {
727     if (is) {
728       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
729     }
730     if (is_local) {
731       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
732     }
733     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
734 
735     osm->n_local_true = n;
736     osm->is           = 0;
737     osm->is_local     = 0;
738     if (is) {
739       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
740       for (i=0; i<n; i++) osm->is[i] = is[i];
741       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
742       osm->overlap = -1;
743     }
744     if (is_local) {
745       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
746       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
747       if (!is) {
748         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
749         for (i=0; i<osm->n_local_true; i++) {
750           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
751             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
752             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
753           } else {
754             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
755             osm->is[i] = osm->is_local[i];
756           }
757         }
758       }
759     }
760   }
761   PetscFunctionReturn(0);
762 }
763 
764 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
765 {
766   PC_ASM         *osm = (PC_ASM*)pc->data;
767   PetscErrorCode ierr;
768   PetscMPIInt    rank,size;
769   PetscInt       n;
770 
771   PetscFunctionBegin;
772   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
773   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
774 
775   /*
776      Split the subdomains equally among all processors
777   */
778   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
779   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
780   n    = N/size + ((N % size) > rank);
781   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
782   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
783   if (!pc->setupcalled) {
784     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
785 
786     osm->n_local_true = n;
787     osm->is           = 0;
788     osm->is_local     = 0;
789   }
790   PetscFunctionReturn(0);
791 }
792 
793 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
794 {
795   PC_ASM *osm = (PC_ASM*)pc->data;
796 
797   PetscFunctionBegin;
798   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
799   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
800   if (!pc->setupcalled) osm->overlap = ovl;
801   PetscFunctionReturn(0);
802 }
803 
804 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
805 {
806   PC_ASM *osm = (PC_ASM*)pc->data;
807 
808   PetscFunctionBegin;
809   osm->type     = type;
810   osm->type_set = PETSC_TRUE;
811   PetscFunctionReturn(0);
812 }
813 
814 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
815 {
816   PC_ASM *osm = (PC_ASM*)pc->data;
817 
818   PetscFunctionBegin;
819   *type = osm->type;
820   PetscFunctionReturn(0);
821 }
822 
823 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
824 {
825   PC_ASM *osm = (PC_ASM *) pc->data;
826 
827   PetscFunctionBegin;
828   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
829   osm->loctype = type;
830   PetscFunctionReturn(0);
831 }
832 
833 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
834 {
835   PC_ASM *osm = (PC_ASM *) pc->data;
836 
837   PetscFunctionBegin;
838   *type = osm->loctype;
839   PetscFunctionReturn(0);
840 }
841 
842 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
843 {
844   PC_ASM *osm = (PC_ASM*)pc->data;
845 
846   PetscFunctionBegin;
847   osm->sort_indices = doSort;
848   PetscFunctionReturn(0);
849 }
850 
851 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
852 {
853   PC_ASM         *osm = (PC_ASM*)pc->data;
854   PetscErrorCode ierr;
855 
856   PetscFunctionBegin;
857   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
858 
859   if (n_local) *n_local = osm->n_local_true;
860   if (first_local) {
861     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
862     *first_local -= osm->n_local_true;
863   }
864   if (ksp) {
865     /* Assume that local solves are now different; not necessarily
866        true though!  This flag is used only for PCView_ASM() */
867     *ksp                   = osm->ksp;
868     osm->same_local_solves = PETSC_FALSE;
869   }
870   PetscFunctionReturn(0);
871 }
872 
873 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
874 {
875   PC_ASM         *osm = (PC_ASM*)pc->data;
876 
877   PetscFunctionBegin;
878   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
879   PetscValidPointer(sub_mat_type,2);
880   *sub_mat_type = osm->sub_mat_type;
881   PetscFunctionReturn(0);
882 }
883 
884 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
885 {
886   PetscErrorCode    ierr;
887   PC_ASM            *osm = (PC_ASM*)pc->data;
888 
889   PetscFunctionBegin;
890   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
891   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
892   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
893   PetscFunctionReturn(0);
894 }
895 
896 /*@C
897     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
898 
899     Collective on PC
900 
901     Input Parameters:
902 +   pc - the preconditioner context
903 .   n - the number of subdomains for this processor (default value = 1)
904 .   is - the index set that defines the subdomains for this processor
905          (or NULL for PETSc to determine subdomains)
906 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
907          (or NULL to not provide these)
908 
909     Options Database Key:
910     To set the total number of subdomain blocks rather than specify the
911     index sets, use the option
912 .    -pc_asm_local_blocks <blks> - Sets local blocks
913 
914     Notes:
915     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
916 
917     By default the ASM preconditioner uses 1 block per processor.
918 
919     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
920 
921     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
922     back to form the global solution (this is the standard restricted additive Schwarz method)
923 
924     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
925     no code to handle that case.
926 
927     Level: advanced
928 
929 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
930           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
931 @*/
932 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
933 {
934   PetscErrorCode ierr;
935 
936   PetscFunctionBegin;
937   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
938   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941 
942 /*@C
943     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
944     additive Schwarz preconditioner.  Either all or no processors in the
945     PC communicator must call this routine, with the same index sets.
946 
947     Collective on PC
948 
949     Input Parameters:
950 +   pc - the preconditioner context
951 .   N  - the number of subdomains for all processors
952 .   is - the index sets that define the subdomains for all processors
953          (or NULL to ask PETSc to determine the subdomains)
954 -   is_local - the index sets that define the local part of the subdomains for this processor
955          (or NULL to not provide this information)
956 
957     Options Database Key:
958     To set the total number of subdomain blocks rather than specify the
959     index sets, use the option
960 .    -pc_asm_blocks <blks> - Sets total blocks
961 
962     Notes:
963     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
964 
965     By default the ASM preconditioner uses 1 block per processor.
966 
967     These index sets cannot be destroyed until after completion of the
968     linear solves for which the ASM preconditioner is being used.
969 
970     Use PCASMSetLocalSubdomains() to set local subdomains.
971 
972     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
973 
974     Level: advanced
975 
976 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
977           PCASMCreateSubdomains2D()
978 @*/
979 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
980 {
981   PetscErrorCode ierr;
982 
983   PetscFunctionBegin;
984   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
985   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
986   PetscFunctionReturn(0);
987 }
988 
989 /*@
990     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
991     additive Schwarz preconditioner.  Either all or no processors in the
992     PC communicator must call this routine.
993 
994     Logically Collective on PC
995 
996     Input Parameters:
997 +   pc  - the preconditioner context
998 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
999 
1000     Options Database Key:
1001 .   -pc_asm_overlap <ovl> - Sets overlap
1002 
1003     Notes:
1004     By default the ASM preconditioner uses 1 block per processor.  To use
1005     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1006     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1007 
1008     The overlap defaults to 1, so if one desires that no additional
1009     overlap be computed beyond what may have been set with a call to
1010     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1011     must be set to be 0.  In particular, if one does not explicitly set
1012     the subdomains an application code, then all overlap would be computed
1013     internally by PETSc, and using an overlap of 0 would result in an ASM
1014     variant that is equivalent to the block Jacobi preconditioner.
1015 
1016     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1017     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1018 
1019     Note that one can define initial index sets with any overlap via
1020     PCASMSetLocalSubdomains(); the routine
1021     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1022     if desired.
1023 
1024     Level: intermediate
1025 
1026 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1027           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1028 @*/
1029 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1030 {
1031   PetscErrorCode ierr;
1032 
1033   PetscFunctionBegin;
1034   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1035   PetscValidLogicalCollectiveInt(pc,ovl,2);
1036   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1037   PetscFunctionReturn(0);
1038 }
1039 
1040 /*@
1041     PCASMSetType - Sets the type of restriction and interpolation used
1042     for local problems in the additive Schwarz method.
1043 
1044     Logically Collective on PC
1045 
1046     Input Parameters:
1047 +   pc  - the preconditioner context
1048 -   type - variant of ASM, one of
1049 .vb
1050       PC_ASM_BASIC       - full interpolation and restriction
1051       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1052       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1053       PC_ASM_NONE        - local processor restriction and interpolation
1054 .ve
1055 
1056     Options Database Key:
1057 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1058 
1059     Notes:
1060     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1061     to limit the local processor interpolation
1062 
1063     Level: intermediate
1064 
1065 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1066           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1067 @*/
1068 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1069 {
1070   PetscErrorCode ierr;
1071 
1072   PetscFunctionBegin;
1073   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1074   PetscValidLogicalCollectiveEnum(pc,type,2);
1075   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 /*@
1080     PCASMGetType - Gets the type of restriction and interpolation used
1081     for local problems in the additive Schwarz method.
1082 
1083     Logically Collective on PC
1084 
1085     Input Parameter:
1086 .   pc  - the preconditioner context
1087 
1088     Output Parameter:
1089 .   type - variant of ASM, one of
1090 
1091 .vb
1092       PC_ASM_BASIC       - full interpolation and restriction
1093       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1094       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1095       PC_ASM_NONE        - local processor restriction and interpolation
1096 .ve
1097 
1098     Options Database Key:
1099 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1100 
1101     Level: intermediate
1102 
1103 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1104           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1105 @*/
1106 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1107 {
1108   PetscErrorCode ierr;
1109 
1110   PetscFunctionBegin;
1111   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1112   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1113   PetscFunctionReturn(0);
1114 }
1115 
1116 /*@
1117   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1118 
1119   Logically Collective on PC
1120 
1121   Input Parameters:
1122 + pc  - the preconditioner context
1123 - type - type of composition, one of
1124 .vb
1125   PC_COMPOSITE_ADDITIVE       - local additive combination
1126   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1127 .ve
1128 
1129   Options Database Key:
1130 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1131 
1132   Level: intermediate
1133 
1134 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1135 @*/
1136 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1137 {
1138   PetscErrorCode ierr;
1139 
1140   PetscFunctionBegin;
1141   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1142   PetscValidLogicalCollectiveEnum(pc, type, 2);
1143   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1144   PetscFunctionReturn(0);
1145 }
1146 
1147 /*@
1148   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1149 
1150   Logically Collective on PC
1151 
1152   Input Parameter:
1153 . pc  - the preconditioner context
1154 
1155   Output Parameter:
1156 . type - type of composition, one of
1157 .vb
1158   PC_COMPOSITE_ADDITIVE       - local additive combination
1159   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1160 .ve
1161 
1162   Options Database Key:
1163 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1164 
1165   Level: intermediate
1166 
1167 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1168 @*/
1169 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1170 {
1171   PetscErrorCode ierr;
1172 
1173   PetscFunctionBegin;
1174   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1175   PetscValidPointer(type, 2);
1176   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 /*@
1181     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1182 
1183     Logically Collective on PC
1184 
1185     Input Parameters:
1186 +   pc  - the preconditioner context
1187 -   doSort - sort the subdomain indices
1188 
1189     Level: intermediate
1190 
1191 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1192           PCASMCreateSubdomains2D()
1193 @*/
1194 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1195 {
1196   PetscErrorCode ierr;
1197 
1198   PetscFunctionBegin;
1199   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1200   PetscValidLogicalCollectiveBool(pc,doSort,2);
1201   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 /*@C
1206    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1207    this processor.
1208 
1209    Collective on PC iff first_local is requested
1210 
1211    Input Parameter:
1212 .  pc - the preconditioner context
1213 
1214    Output Parameters:
1215 +  n_local - the number of blocks on this processor or NULL
1216 .  first_local - the global number of the first block on this processor or NULL,
1217                  all processors must request or all must pass NULL
1218 -  ksp - the array of KSP contexts
1219 
1220    Note:
1221    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1222 
1223    You must call KSPSetUp() before calling PCASMGetSubKSP().
1224 
1225    Fortran note:
1226    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1227 
1228    Level: advanced
1229 
1230 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1231           PCASMCreateSubdomains2D(),
1232 @*/
1233 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1234 {
1235   PetscErrorCode ierr;
1236 
1237   PetscFunctionBegin;
1238   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1239   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1240   PetscFunctionReturn(0);
1241 }
1242 
1243 /* -------------------------------------------------------------------------------------*/
1244 /*MC
1245    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1246            its own KSP object.
1247 
1248    Options Database Keys:
1249 +  -pc_asm_blocks <blks> - Sets total blocks
1250 .  -pc_asm_overlap <ovl> - Sets overlap
1251 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1252 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1253 
1254      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1255       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1256       -pc_asm_type basic to use the standard ASM.
1257 
1258    Notes:
1259     Each processor can have one or more blocks, but a block cannot be shared by more
1260      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1261 
1262      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1263         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1264 
1265      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1266          and set the options directly on the resulting KSP object (you can access its PC
1267          with KSPGetPC())
1268 
1269    Level: beginner
1270 
1271    Concepts: additive Schwarz method
1272 
1273     References:
1274 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1275      Courant Institute, New York University Technical report
1276 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1277     Cambridge University Press.
1278 
1279 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1280            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1281            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1282 
1283 M*/
1284 
1285 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1286 {
1287   PetscErrorCode ierr;
1288   PC_ASM         *osm;
1289 
1290   PetscFunctionBegin;
1291   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1292 
1293   osm->n                 = PETSC_DECIDE;
1294   osm->n_local           = 0;
1295   osm->n_local_true      = PETSC_DECIDE;
1296   osm->overlap           = 1;
1297   osm->ksp               = 0;
1298   osm->restriction       = 0;
1299   osm->lprolongation     = 0;
1300   osm->lrestriction      = 0;
1301   osm->x                 = 0;
1302   osm->y                 = 0;
1303   osm->is                = 0;
1304   osm->is_local          = 0;
1305   osm->mat               = 0;
1306   osm->pmat              = 0;
1307   osm->type              = PC_ASM_RESTRICT;
1308   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1309   osm->same_local_solves = PETSC_TRUE;
1310   osm->sort_indices      = PETSC_TRUE;
1311   osm->dm_subdomains     = PETSC_FALSE;
1312   osm->sub_mat_type      = NULL;
1313 
1314   pc->data                 = (void*)osm;
1315   pc->ops->apply           = PCApply_ASM;
1316   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1317   pc->ops->setup           = PCSetUp_ASM;
1318   pc->ops->reset           = PCReset_ASM;
1319   pc->ops->destroy         = PCDestroy_ASM;
1320   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1321   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1322   pc->ops->view            = PCView_ASM;
1323   pc->ops->applyrichardson = 0;
1324 
1325   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1326   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1327   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1328   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1329   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1330   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1331   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1332   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1333   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1334   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1335   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1336   PetscFunctionReturn(0);
1337 }
1338 
1339 /*@C
1340    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1341    preconditioner for a any problem on a general grid.
1342 
1343    Collective
1344 
1345    Input Parameters:
1346 +  A - The global matrix operator
1347 -  n - the number of local blocks
1348 
1349    Output Parameters:
1350 .  outis - the array of index sets defining the subdomains
1351 
1352    Level: advanced
1353 
1354    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1355     from these if you use PCASMSetLocalSubdomains()
1356 
1357     In the Fortran version you must provide the array outis[] already allocated of length n.
1358 
1359 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1360 @*/
1361 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1362 {
1363   MatPartitioning mpart;
1364   const char      *prefix;
1365   PetscInt        i,j,rstart,rend,bs;
1366   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1367   Mat             Ad     = NULL, adj;
1368   IS              ispart,isnumb,*is;
1369   PetscErrorCode  ierr;
1370 
1371   PetscFunctionBegin;
1372   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1373   PetscValidPointer(outis,3);
1374   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1375 
1376   /* Get prefix, row distribution, and block size */
1377   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1378   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1379   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1380   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1381 
1382   /* Get diagonal block from matrix if possible */
1383   ierr = MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
1384   if (hasop) {
1385     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1386   }
1387   if (Ad) {
1388     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1389     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1390   }
1391   if (Ad && n > 1) {
1392     PetscBool match,done;
1393     /* Try to setup a good matrix partitioning if available */
1394     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1395     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1396     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1397     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1398     if (!match) {
1399       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1400     }
1401     if (!match) { /* assume a "good" partitioner is available */
1402       PetscInt       na;
1403       const PetscInt *ia,*ja;
1404       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1405       if (done) {
1406         /* Build adjacency matrix by hand. Unfortunately a call to
1407            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1408            remove the block-aij structure and we cannot expect
1409            MatPartitioning to split vertices as we need */
1410         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1411         const PetscInt *row;
1412         nnz = 0;
1413         for (i=0; i<na; i++) { /* count number of nonzeros */
1414           len = ia[i+1] - ia[i];
1415           row = ja + ia[i];
1416           for (j=0; j<len; j++) {
1417             if (row[j] == i) { /* don't count diagonal */
1418               len--; break;
1419             }
1420           }
1421           nnz += len;
1422         }
1423         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1424         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1425         nnz    = 0;
1426         iia[0] = 0;
1427         for (i=0; i<na; i++) { /* fill adjacency */
1428           cnt = 0;
1429           len = ia[i+1] - ia[i];
1430           row = ja + ia[i];
1431           for (j=0; j<len; j++) {
1432             if (row[j] != i) { /* if not diagonal */
1433               jja[nnz+cnt++] = row[j];
1434             }
1435           }
1436           nnz     += cnt;
1437           iia[i+1] = nnz;
1438         }
1439         /* Partitioning of the adjacency matrix */
1440         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1441         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1442         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1443         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1444         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1445         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1446         foundpart = PETSC_TRUE;
1447       }
1448       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1449     }
1450     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1451   }
1452 
1453   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1454   *outis = is;
1455 
1456   if (!foundpart) {
1457 
1458     /* Partitioning by contiguous chunks of rows */
1459 
1460     PetscInt mbs   = (rend-rstart)/bs;
1461     PetscInt start = rstart;
1462     for (i=0; i<n; i++) {
1463       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1464       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1465       start += count;
1466     }
1467 
1468   } else {
1469 
1470     /* Partitioning by adjacency of diagonal block  */
1471 
1472     const PetscInt *numbering;
1473     PetscInt       *count,nidx,*indices,*newidx,start=0;
1474     /* Get node count in each partition */
1475     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1476     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1477     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1478       for (i=0; i<n; i++) count[i] *= bs;
1479     }
1480     /* Build indices from node numbering */
1481     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1482     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1483     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1484     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1485     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1486     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1487     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1488       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1489       for (i=0; i<nidx; i++) {
1490         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1491       }
1492       ierr    = PetscFree(indices);CHKERRQ(ierr);
1493       nidx   *= bs;
1494       indices = newidx;
1495     }
1496     /* Shift to get global indices */
1497     for (i=0; i<nidx; i++) indices[i] += rstart;
1498 
1499     /* Build the index sets for each block */
1500     for (i=0; i<n; i++) {
1501       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1502       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1503       start += count[i];
1504     }
1505 
1506     ierr = PetscFree(count);CHKERRQ(ierr);
1507     ierr = PetscFree(indices);CHKERRQ(ierr);
1508     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1509     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1510 
1511   }
1512   PetscFunctionReturn(0);
1513 }
1514 
1515 /*@C
1516    PCASMDestroySubdomains - Destroys the index sets created with
1517    PCASMCreateSubdomains(). Should be called after setting subdomains
1518    with PCASMSetLocalSubdomains().
1519 
1520    Collective
1521 
1522    Input Parameters:
1523 +  n - the number of index sets
1524 .  is - the array of index sets
1525 -  is_local - the array of local index sets, can be NULL
1526 
1527    Level: advanced
1528 
1529 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1530 @*/
1531 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1532 {
1533   PetscInt       i;
1534   PetscErrorCode ierr;
1535 
1536   PetscFunctionBegin;
1537   if (n <= 0) PetscFunctionReturn(0);
1538   if (is) {
1539     PetscValidPointer(is,2);
1540     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1541     ierr = PetscFree(is);CHKERRQ(ierr);
1542   }
1543   if (is_local) {
1544     PetscValidPointer(is_local,3);
1545     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1546     ierr = PetscFree(is_local);CHKERRQ(ierr);
1547   }
1548   PetscFunctionReturn(0);
1549 }
1550 
1551 /*@
1552    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1553    preconditioner for a two-dimensional problem on a regular grid.
1554 
1555    Not Collective
1556 
1557    Input Parameters:
1558 +  m, n - the number of mesh points in the x and y directions
1559 .  M, N - the number of subdomains in the x and y directions
1560 .  dof - degrees of freedom per node
1561 -  overlap - overlap in mesh lines
1562 
1563    Output Parameters:
1564 +  Nsub - the number of subdomains created
1565 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1566 -  is_local - array of index sets defining non-overlapping subdomains
1567 
1568    Note:
1569    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1570    preconditioners.  More general related routines are
1571    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1572 
1573    Level: advanced
1574 
1575 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1576           PCASMSetOverlap()
1577 @*/
1578 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1579 {
1580   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1581   PetscErrorCode ierr;
1582   PetscInt       nidx,*idx,loc,ii,jj,count;
1583 
1584   PetscFunctionBegin;
1585   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1586 
1587   *Nsub     = N*M;
1588   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1589   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1590   ystart    = 0;
1591   loc_outer = 0;
1592   for (i=0; i<N; i++) {
1593     height = n/N + ((n % N) > i); /* height of subdomain */
1594     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1595     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1596     yright = ystart + height + overlap; if (yright > n) yright = n;
1597     xstart = 0;
1598     for (j=0; j<M; j++) {
1599       width = m/M + ((m % M) > j); /* width of subdomain */
1600       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1601       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1602       xright = xstart + width + overlap; if (xright > m) xright = m;
1603       nidx   = (xright - xleft)*(yright - yleft);
1604       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1605       loc    = 0;
1606       for (ii=yleft; ii<yright; ii++) {
1607         count = m*ii + xleft;
1608         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1609       }
1610       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1611       if (overlap == 0) {
1612         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1613 
1614         (*is_local)[loc_outer] = (*is)[loc_outer];
1615       } else {
1616         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1617           for (jj=xstart; jj<xstart+width; jj++) {
1618             idx[loc++] = m*ii + jj;
1619           }
1620         }
1621         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1622       }
1623       ierr    = PetscFree(idx);CHKERRQ(ierr);
1624       xstart += width;
1625       loc_outer++;
1626     }
1627     ystart += height;
1628   }
1629   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1630   PetscFunctionReturn(0);
1631 }
1632 
1633 /*@C
1634     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1635     only) for the additive Schwarz preconditioner.
1636 
1637     Not Collective
1638 
1639     Input Parameter:
1640 .   pc - the preconditioner context
1641 
1642     Output Parameters:
1643 +   n - the number of subdomains for this processor (default value = 1)
1644 .   is - the index sets that define the subdomains for this processor
1645 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1646 
1647 
1648     Notes:
1649     The IS numbering is in the parallel, global numbering of the vector.
1650 
1651     Level: advanced
1652 
1653 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1654           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1655 @*/
1656 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1657 {
1658   PC_ASM         *osm = (PC_ASM*)pc->data;
1659   PetscErrorCode ierr;
1660   PetscBool      match;
1661 
1662   PetscFunctionBegin;
1663   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1664   PetscValidIntPointer(n,2);
1665   if (is) PetscValidPointer(is,3);
1666   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1667   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1668   if (n) *n = osm->n_local_true;
1669   if (is) *is = osm->is;
1670   if (is_local) *is_local = osm->is_local;
1671   PetscFunctionReturn(0);
1672 }
1673 
1674 /*@C
1675     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1676     only) for the additive Schwarz preconditioner.
1677 
1678     Not Collective
1679 
1680     Input Parameter:
1681 .   pc - the preconditioner context
1682 
1683     Output Parameters:
1684 +   n - the number of matrices for this processor (default value = 1)
1685 -   mat - the matrices
1686 
1687     Level: advanced
1688 
1689     Notes:
1690     Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1691 
1692            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1693 
1694 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1695           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1696 @*/
1697 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1698 {
1699   PC_ASM         *osm;
1700   PetscErrorCode ierr;
1701   PetscBool      match;
1702 
1703   PetscFunctionBegin;
1704   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1705   PetscValidIntPointer(n,2);
1706   if (mat) PetscValidPointer(mat,3);
1707   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1708   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1709   if (!match) {
1710     if (n) *n = 0;
1711     if (mat) *mat = NULL;
1712   } else {
1713     osm = (PC_ASM*)pc->data;
1714     if (n) *n = osm->n_local_true;
1715     if (mat) *mat = osm->pmat;
1716   }
1717   PetscFunctionReturn(0);
1718 }
1719 
1720 /*@
1721     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1722 
1723     Logically Collective
1724 
1725     Input Parameter:
1726 +   pc  - the preconditioner
1727 -   flg - boolean indicating whether to use subdomains defined by the DM
1728 
1729     Options Database Key:
1730 .   -pc_asm_dm_subdomains
1731 
1732     Level: intermediate
1733 
1734     Notes:
1735     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1736     so setting either of the first two effectively turns the latter off.
1737 
1738 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1739           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1740 @*/
1741 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1742 {
1743   PC_ASM         *osm = (PC_ASM*)pc->data;
1744   PetscErrorCode ierr;
1745   PetscBool      match;
1746 
1747   PetscFunctionBegin;
1748   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1749   PetscValidLogicalCollectiveBool(pc,flg,2);
1750   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1751   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1752   if (match) {
1753     osm->dm_subdomains = flg;
1754   }
1755   PetscFunctionReturn(0);
1756 }
1757 
1758 /*@
1759     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1760     Not Collective
1761 
1762     Input Parameter:
1763 .   pc  - the preconditioner
1764 
1765     Output Parameter:
1766 .   flg - boolean indicating whether to use subdomains defined by the DM
1767 
1768     Level: intermediate
1769 
1770 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1771           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1772 @*/
1773 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1774 {
1775   PC_ASM         *osm = (PC_ASM*)pc->data;
1776   PetscErrorCode ierr;
1777   PetscBool      match;
1778 
1779   PetscFunctionBegin;
1780   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1781   PetscValidPointer(flg,2);
1782   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1783   if (match) {
1784     if (flg) *flg = osm->dm_subdomains;
1785   }
1786   PetscFunctionReturn(0);
1787 }
1788 
1789 /*@
1790      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1791 
1792    Not Collective
1793 
1794    Input Parameter:
1795 .  pc - the PC
1796 
1797    Output Parameter:
1798 .  -pc_asm_sub_mat_type - name of matrix type
1799 
1800    Level: advanced
1801 
1802 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1803 @*/
1804 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1805   PetscErrorCode ierr;
1806 
1807   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1808   PetscFunctionReturn(0);
1809 }
1810 
1811 /*@
1812      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1813 
1814    Collective on Mat
1815 
1816    Input Parameters:
1817 +  pc             - the PC object
1818 -  sub_mat_type   - matrix type
1819 
1820    Options Database Key:
1821 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1822 
1823    Notes:
1824    See "${PETSC_DIR}/include/petscmat.h" for available types
1825 
1826   Level: advanced
1827 
1828 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1829 @*/
1830 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1831 {
1832   PetscErrorCode ierr;
1833 
1834   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1835   PetscFunctionReturn(0);
1836 }
1837