xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 55e7fe800d976e85ed2b5cd8bfdef564daa37bd9)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
13 #include <petscdm.h>
14 
15 typedef struct {
16   PetscInt   n, n_local, n_local_true;
17   PetscInt   overlap;             /* overlap requested by user */
18   KSP        *ksp;                /* linear solvers for each block */
19   VecScatter restriction;         /* mapping from global to overlapping (process) subdomain*/
20   VecScatter *lrestriction;       /* mapping from subregion to overlapping (process) subdomain */
21   VecScatter *lprolongation;      /* mapping from non-overlapping subregion to overlapping (process) subdomain; used for restrict additive version of algorithms */
22   Vec        lx, ly;              /* work vectors */
23   Vec        *x,*y;               /* work vectors */
24   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
25   IS         *is;                 /* index set that defines each overlapping subdomain */
26   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
27   Mat        *mat,*pmat;          /* mat is not currently used */
28   PCASMType  type;                /* use reduced interpolation, restriction or both */
29   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
30   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
31   PetscBool  sort_indices;        /* flag to sort subdomain indices */
32   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
33   PCCompositeType loctype;        /* the type of composition for local solves */
34   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
35   /* For multiplicative solve */
36   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
40 {
41   PC_ASM         *osm = (PC_ASM*)pc->data;
42   PetscErrorCode ierr;
43   PetscMPIInt    rank;
44   PetscInt       i,bsz;
45   PetscBool      iascii,isstring;
46   PetscViewer    sviewer;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
51   if (iascii) {
52     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
53     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
54     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
55     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
56     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
57     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
58     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
59     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
60     if (osm->same_local_solves) {
61       if (osm->ksp) {
62         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
63         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
64         if (!rank) {
65           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
66           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
67           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
68         }
69         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
70       }
71     } else {
72       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
73       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
74       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
75       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
79       for (i=0; i<osm->n_local_true; i++) {
80         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
81         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
82         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
83         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
84       }
85       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
86       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
87       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
89     }
90   } else if (isstring) {
91     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
92     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
93     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
94     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
95   }
96   PetscFunctionReturn(0);
97 }
98 
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer, sviewer;
105   char           *s;
106   PetscInt       i,j,nidx;
107   const PetscInt *idx;
108   PetscMPIInt    rank, size;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
113   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
114   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
115   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
116   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
117   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
118   for (i=0; i<osm->n_local; i++) {
119     if (i < osm->n_local_true) {
120       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
121       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
122       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
123       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
124       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
125       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
126       for (j=0; j<nidx; j++) {
127         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
128       }
129       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
130       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
131       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
132       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
133       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
134       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
136       ierr = PetscFree(s);CHKERRQ(ierr);
137       if (osm->is_local) {
138         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
139         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
140         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
141         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
142         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
143         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
144         for (j=0; j<nidx; j++) {
145           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
146         }
147         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
148         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
149         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
150         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
151         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
152         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
153         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
154         ierr = PetscFree(s);CHKERRQ(ierr);
155       }
156     } else {
157       /* Participate in collective viewer calls. */
158       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
159       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
160       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
161       /* Assume either all ranks have is_local or none do. */
162       if (osm->is_local) {
163         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
164         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
165         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
166       }
167     }
168   }
169   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
170   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 static PetscErrorCode PCSetUp_ASM(PC pc)
175 {
176   PC_ASM         *osm = (PC_ASM*)pc->data;
177   PetscErrorCode ierr;
178   PetscBool      symset,flg;
179   PetscInt       i,m,m_local;
180   MatReuse       scall = MAT_REUSE_MATRIX;
181   IS             isl;
182   KSP            ksp;
183   PC             subpc;
184   const char     *prefix,*pprefix;
185   Vec            vec;
186   DM             *domain_dm = NULL;
187 
188   PetscFunctionBegin;
189   if (!pc->setupcalled) {
190     PetscInt m;
191 
192     if (!osm->type_set) {
193       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
194       if (symset && flg) osm->type = PC_ASM_BASIC;
195     }
196 
197     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
198     if (osm->n_local_true == PETSC_DECIDE) {
199       /* no subdomains given */
200       /* try pc->dm first, if allowed */
201       if (osm->dm_subdomains && pc->dm) {
202         PetscInt  num_domains, d;
203         char      **domain_names;
204         IS        *inner_domain_is, *outer_domain_is;
205         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
206         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
207                               A future improvement of this code might allow one to use
208                               DM-defined subdomains and also increase the overlap,
209                               but that is not currently supported */
210         if (num_domains) {
211           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
212         }
213         for (d = 0; d < num_domains; ++d) {
214           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
215           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
216           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
217         }
218         ierr = PetscFree(domain_names);CHKERRQ(ierr);
219         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
220         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
221       }
222       if (osm->n_local_true == PETSC_DECIDE) {
223         /* still no subdomains; use one subdomain per processor */
224         osm->n_local_true = 1;
225       }
226     }
227     { /* determine the global and max number of subdomains */
228       struct {PetscInt max,sum;} inwork,outwork;
229       PetscMPIInt size;
230 
231       inwork.max   = osm->n_local_true;
232       inwork.sum   = osm->n_local_true;
233       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
234       osm->n_local = outwork.max;
235       osm->n       = outwork.sum;
236 
237       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
238       if (outwork.max == 1 && outwork.sum == size) {
239         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
240         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
241       }
242     }
243     if (!osm->is) { /* create the index sets */
244       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
245     }
246     if (osm->n_local_true > 1 && !osm->is_local) {
247       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
248       for (i=0; i<osm->n_local_true; i++) {
249         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
250           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
251           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
252         } else {
253           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
254           osm->is_local[i] = osm->is[i];
255         }
256       }
257     }
258     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
259     flg  = PETSC_FALSE;
260     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
261     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
262 
263     if (osm->overlap > 0) {
264       /* Extend the "overlapping" regions by a number of steps */
265       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
266     }
267     if (osm->sort_indices) {
268       for (i=0; i<osm->n_local_true; i++) {
269         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
270         if (osm->is_local) {
271           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
272         }
273       }
274     }
275 
276     if (!osm->ksp) {
277       /* Create the local solvers */
278       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
279       if (domain_dm) {
280         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
281       }
282       for (i=0; i<osm->n_local_true; i++) {
283         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
284         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
285         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
286         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
287         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
288         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
289         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
290         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
291         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
292         if (domain_dm) {
293           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
294           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
295           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
296         }
297         osm->ksp[i] = ksp;
298       }
299       if (domain_dm) {
300         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
301       }
302     }
303 
304     ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
305     ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
306     ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
307     ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
308     ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
309 
310     scall = MAT_INITIAL_MATRIX;
311   } else {
312     /*
313        Destroy the blocks from the previous iteration
314     */
315     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
316       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
317       scall = MAT_INITIAL_MATRIX;
318     }
319   }
320 
321   /*
322      Extract out the submatrices
323   */
324   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
325   if (scall == MAT_INITIAL_MATRIX) {
326     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
327     for (i=0; i<osm->n_local_true; i++) {
328       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
329       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
330     }
331   }
332 
333   /* Convert the types of the submatrices (if needbe) */
334   if (osm->sub_mat_type) {
335     for (i=0; i<osm->n_local_true; i++) {
336       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
337     }
338   }
339 
340   if(!pc->setupcalled){
341     /* Create the local work vectors (from the local matrices) and scatter contexts */
342     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
343 
344     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
345     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
346       ierr = PetscMalloc1(osm->n_local_true,&osm->lprolongation);CHKERRQ(ierr);
347     }
348     ierr = PetscMalloc1(osm->n_local_true,&osm->lrestriction);CHKERRQ(ierr);
349     ierr = PetscMalloc1(osm->n_local_true,&osm->x);CHKERRQ(ierr);
350     ierr = PetscMalloc1(osm->n_local_true,&osm->y);CHKERRQ(ierr);
351 
352     ierr = ISGetLocalSize(osm->lis,&m);CHKERRQ(ierr);
353     ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
354     ierr = VecScatterCreateWithData(vec,osm->lis,osm->lx,isl,&osm->restriction);CHKERRQ(ierr);
355     ierr = ISDestroy(&isl);CHKERRQ(ierr);
356 
357 
358     for (i=0; i<osm->n_local_true; ++i) {
359       ISLocalToGlobalMapping ltog;
360       IS                     isll;
361       const PetscInt         *idx_is;
362       PetscInt               *idx_lis,nout;
363 
364       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
365       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
366       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
367 
368       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
369       ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
370       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
371       ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
372       ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
373       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
374       if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
375       ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
376       ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
377       ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
378       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
379       ierr = VecScatterCreateWithData(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);CHKERRQ(ierr);
380       ierr = ISDestroy(&isll);CHKERRQ(ierr);
381       ierr = ISDestroy(&isl);CHKERRQ(ierr);
382       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overalapping is_local[i] entries */
383         ISLocalToGlobalMapping ltog;
384         IS                     isll,isll_local;
385         const PetscInt         *idx_local;
386         PetscInt               *idx1, *idx2, nout;
387 
388         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
389         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
390 
391         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
392         ierr = PetscMalloc1(m_local,&idx1);CHKERRQ(ierr);
393         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);CHKERRQ(ierr);
394         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
395         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
396         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
397 
398         ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
399         ierr = PetscMalloc1(m_local,&idx2);CHKERRQ(ierr);
400         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);CHKERRQ(ierr);
401         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
402         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
403         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);CHKERRQ(ierr);
404 
405         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
406         ierr = VecScatterCreateWithData(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);CHKERRQ(ierr);
407 
408         ierr = ISDestroy(&isll);CHKERRQ(ierr);
409         ierr = ISDestroy(&isll_local);CHKERRQ(ierr);
410       }
411     }
412     ierr = VecDestroy(&vec);CHKERRQ(ierr);
413   }
414 
415   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
416     IS      *cis;
417     PetscInt c;
418 
419     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
420     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
421     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
422     ierr = PetscFree(cis);CHKERRQ(ierr);
423   }
424 
425   /* Return control to the user so that the submatrices can be modified (e.g., to apply
426      different boundary conditions for the submatrices than for the global problem) */
427   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
428 
429   /*
430      Loop over subdomains putting them into local ksp
431   */
432   for (i=0; i<osm->n_local_true; i++) {
433     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
434     if (!pc->setupcalled) {
435       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
436     }
437   }
438   PetscFunctionReturn(0);
439 }
440 
441 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
442 {
443   PC_ASM             *osm = (PC_ASM*)pc->data;
444   PetscErrorCode     ierr;
445   PetscInt           i;
446   KSPConvergedReason reason;
447 
448   PetscFunctionBegin;
449   for (i=0; i<osm->n_local_true; i++) {
450     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
451     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
452     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
453       pc->failedreason = PC_SUBPC_ERROR;
454     }
455   }
456   PetscFunctionReturn(0);
457 }
458 
459 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
460 {
461   PC_ASM         *osm = (PC_ASM*)pc->data;
462   PetscErrorCode ierr;
463   PetscInt       i,n_local_true = osm->n_local_true;
464   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
465 
466   PetscFunctionBegin;
467   /*
468      Support for limiting the restriction or interpolation to only local
469      subdomain values (leaving the other values 0).
470   */
471   if (!(osm->type & PC_ASM_RESTRICT)) {
472     forward = SCATTER_FORWARD_LOCAL;
473     /* have to zero the work RHS since scatter may leave some slots empty */
474     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
475   }
476   if (!(osm->type & PC_ASM_INTERPOLATE)) {
477     reverse = SCATTER_REVERSE_LOCAL;
478   }
479 
480   if(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE){
481     /* zero the global and the local solutions */
482     ierr = VecZeroEntries(y);CHKERRQ(ierr);
483     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
484 
485     /* Copy the global RHS to local RHS including the ghost nodes */
486     ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
487     ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
488 
489     /* Restrict local RHS to the overlapping 0-block RHS */
490     ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
491     ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
492 
493     /* do the local solves */
494     for (i = 0; i < n_local_true; ++i) {
495 
496       /* solve the overlapping i-block */
497       ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
498       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
499       ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
500 
501       if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution (only for restrictive additive) */
502         ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
503         ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
504       }
505       else{ /* interpolate the overalapping i-block solution to the local solution */
506         ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
507         ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
508       }
509 
510       if (i < n_local_true-1) {
511         /* Restrict local RHS to the overlapping (i+1)-block RHS */
512         ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
513         ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
514 
515         if ( osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
516           /* udpdate the overlapping (i+1)-block RHS using the current local solution */
517           ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
518           ierr = VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]); CHKERRQ(ierr);
519         }
520       }
521     }
522     /* Add the local solution to the global solution including the ghost nodes */
523     ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
524     ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
525   }else{
526     SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
527   }
528   PetscFunctionReturn(0);
529 }
530 
531 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
532 {
533   PC_ASM         *osm = (PC_ASM*)pc->data;
534   PetscErrorCode ierr;
535   PetscInt       i,n_local_true = osm->n_local_true;
536   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
537 
538   PetscFunctionBegin;
539   /*
540      Support for limiting the restriction or interpolation to only local
541      subdomain values (leaving the other values 0).
542 
543      Note: these are reversed from the PCApply_ASM() because we are applying the
544      transpose of the three terms
545   */
546 
547   if (!(osm->type & PC_ASM_INTERPOLATE)) {
548     forward = SCATTER_FORWARD_LOCAL;
549     /* have to zero the work RHS since scatter may leave some slots empty */
550     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
551   }
552   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
553 
554   /* zero the global and the local solutions */
555   ierr = VecZeroEntries(y);CHKERRQ(ierr);
556   ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
557 
558   /* Copy the global RHS to local RHS including the ghost nodes */
559   ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
560   ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
561 
562   /* Restrict local RHS to the overlapping 0-block RHS */
563   ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
564   ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
565 
566   /* do the local solves */
567   for (i = 0; i < n_local_true; ++i) {
568 
569     /* solve the overlapping i-block */
570     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
571     ierr = KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
572     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);CHKERRQ(ierr);
573 
574     if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution */
575      ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
576      ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
577     }
578     else{ /* interpolate the overalapping i-block solution to the local solution */
579       ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
580       ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
581     }
582 
583     if (i < n_local_true-1) {
584       /* Restrict local RHS to the overlapping (i+1)-block RHS */
585       ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
586       ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
587     }
588   }
589   /* Add the local solution to the global solution including the ghost nodes */
590   ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
591   ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
592 
593   PetscFunctionReturn(0);
594 
595 }
596 
597 static PetscErrorCode PCReset_ASM(PC pc)
598 {
599   PC_ASM         *osm = (PC_ASM*)pc->data;
600   PetscErrorCode ierr;
601   PetscInt       i;
602 
603   PetscFunctionBegin;
604   if (osm->ksp) {
605     for (i=0; i<osm->n_local_true; i++) {
606       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
607     }
608   }
609   if (osm->pmat) {
610     if (osm->n_local_true > 0) {
611       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
612     }
613   }
614   if (osm->lrestriction) {
615     ierr = VecScatterDestroy(&osm->restriction);CHKERRQ(ierr);
616     for (i=0; i<osm->n_local_true; i++) {
617       ierr = VecScatterDestroy(&osm->lrestriction[i]);CHKERRQ(ierr);
618       if (osm->lprolongation) {ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);}
619       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
620       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
621     }
622     ierr = PetscFree(osm->lrestriction);CHKERRQ(ierr);
623     if (osm->lprolongation) {ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);}
624     ierr = PetscFree(osm->x);CHKERRQ(ierr);
625     ierr = PetscFree(osm->y);CHKERRQ(ierr);
626 
627   }
628   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
629   ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
630   ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
631   ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
632   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
633     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
634   }
635 
636   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
637 
638   osm->is       = 0;
639   osm->is_local = 0;
640   PetscFunctionReturn(0);
641 }
642 
643 static PetscErrorCode PCDestroy_ASM(PC pc)
644 {
645   PC_ASM         *osm = (PC_ASM*)pc->data;
646   PetscErrorCode ierr;
647   PetscInt       i;
648 
649   PetscFunctionBegin;
650   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
651   if (osm->ksp) {
652     for (i=0; i<osm->n_local_true; i++) {
653       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
654     }
655     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
656   }
657   ierr = PetscFree(pc->data);CHKERRQ(ierr);
658   PetscFunctionReturn(0);
659 }
660 
661 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
662 {
663   PC_ASM         *osm = (PC_ASM*)pc->data;
664   PetscErrorCode ierr;
665   PetscInt       blocks,ovl;
666   PetscBool      symset,flg;
667   PCASMType      asmtype;
668   PCCompositeType loctype;
669   char           sub_mat_type[256];
670 
671   PetscFunctionBegin;
672   /* set the type to symmetric if matrix is symmetric */
673   if (!osm->type_set && pc->pmat) {
674     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
675     if (symset && flg) osm->type = PC_ASM_BASIC;
676   }
677   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
678   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
679   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
680   if (flg) {
681     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
682     osm->dm_subdomains = PETSC_FALSE;
683   }
684   ierr = PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);CHKERRQ(ierr);
685   if (flg) {
686     ierr = PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
687     osm->dm_subdomains = PETSC_FALSE;
688   }
689   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
690   if (flg) {
691     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
692     osm->dm_subdomains = PETSC_FALSE;
693   }
694   flg  = PETSC_FALSE;
695   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
696   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
697   flg  = PETSC_FALSE;
698   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
699   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
700   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
701   if(flg){
702     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
703   }
704   ierr = PetscOptionsTail();CHKERRQ(ierr);
705   PetscFunctionReturn(0);
706 }
707 
708 /*------------------------------------------------------------------------------------*/
709 
710 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
711 {
712   PC_ASM         *osm = (PC_ASM*)pc->data;
713   PetscErrorCode ierr;
714   PetscInt       i;
715 
716   PetscFunctionBegin;
717   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
718   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
719 
720   if (!pc->setupcalled) {
721     if (is) {
722       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
723     }
724     if (is_local) {
725       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
726     }
727     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
728 
729     osm->n_local_true = n;
730     osm->is           = 0;
731     osm->is_local     = 0;
732     if (is) {
733       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
734       for (i=0; i<n; i++) osm->is[i] = is[i];
735       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
736       osm->overlap = -1;
737     }
738     if (is_local) {
739       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
740       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
741       if (!is) {
742         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
743         for (i=0; i<osm->n_local_true; i++) {
744           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
745             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
746             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
747           } else {
748             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
749             osm->is[i] = osm->is_local[i];
750           }
751         }
752       }
753     }
754   }
755   PetscFunctionReturn(0);
756 }
757 
758 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
759 {
760   PC_ASM         *osm = (PC_ASM*)pc->data;
761   PetscErrorCode ierr;
762   PetscMPIInt    rank,size;
763   PetscInt       n;
764 
765   PetscFunctionBegin;
766   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
767   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
768 
769   /*
770      Split the subdomains equally among all processors
771   */
772   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
773   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
774   n    = N/size + ((N % size) > rank);
775   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
776   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
777   if (!pc->setupcalled) {
778     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
779 
780     osm->n_local_true = n;
781     osm->is           = 0;
782     osm->is_local     = 0;
783   }
784   PetscFunctionReturn(0);
785 }
786 
787 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
788 {
789   PC_ASM *osm = (PC_ASM*)pc->data;
790 
791   PetscFunctionBegin;
792   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
793   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
794   if (!pc->setupcalled) osm->overlap = ovl;
795   PetscFunctionReturn(0);
796 }
797 
798 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
799 {
800   PC_ASM *osm = (PC_ASM*)pc->data;
801 
802   PetscFunctionBegin;
803   osm->type     = type;
804   osm->type_set = PETSC_TRUE;
805   PetscFunctionReturn(0);
806 }
807 
808 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
809 {
810   PC_ASM *osm = (PC_ASM*)pc->data;
811 
812   PetscFunctionBegin;
813   *type = osm->type;
814   PetscFunctionReturn(0);
815 }
816 
817 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
818 {
819   PC_ASM *osm = (PC_ASM *) pc->data;
820 
821   PetscFunctionBegin;
822   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
823   osm->loctype = type;
824   PetscFunctionReturn(0);
825 }
826 
827 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
828 {
829   PC_ASM *osm = (PC_ASM *) pc->data;
830 
831   PetscFunctionBegin;
832   *type = osm->loctype;
833   PetscFunctionReturn(0);
834 }
835 
836 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
837 {
838   PC_ASM *osm = (PC_ASM*)pc->data;
839 
840   PetscFunctionBegin;
841   osm->sort_indices = doSort;
842   PetscFunctionReturn(0);
843 }
844 
845 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
846 {
847   PC_ASM         *osm = (PC_ASM*)pc->data;
848   PetscErrorCode ierr;
849 
850   PetscFunctionBegin;
851   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
852 
853   if (n_local) *n_local = osm->n_local_true;
854   if (first_local) {
855     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
856     *first_local -= osm->n_local_true;
857   }
858   if (ksp) {
859     /* Assume that local solves are now different; not necessarily
860        true though!  This flag is used only for PCView_ASM() */
861     *ksp                   = osm->ksp;
862     osm->same_local_solves = PETSC_FALSE;
863   }
864   PetscFunctionReturn(0);
865 }
866 
867 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
868 {
869   PC_ASM         *osm = (PC_ASM*)pc->data;
870 
871   PetscFunctionBegin;
872   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
873   PetscValidPointer(sub_mat_type,2);
874   *sub_mat_type = osm->sub_mat_type;
875   PetscFunctionReturn(0);
876 }
877 
878 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
879 {
880   PetscErrorCode    ierr;
881   PC_ASM            *osm = (PC_ASM*)pc->data;
882 
883   PetscFunctionBegin;
884   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
885   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
886   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
887   PetscFunctionReturn(0);
888 }
889 
890 /*@C
891     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
892 
893     Collective on PC
894 
895     Input Parameters:
896 +   pc - the preconditioner context
897 .   n - the number of subdomains for this processor (default value = 1)
898 .   is - the index set that defines the subdomains for this processor
899          (or NULL for PETSc to determine subdomains)
900 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
901          (or NULL to not provide these)
902 
903     Options Database Key:
904     To set the total number of subdomain blocks rather than specify the
905     index sets, use the option
906 .    -pc_asm_local_blocks <blks> - Sets local blocks
907 
908     Notes:
909     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
910 
911     By default the ASM preconditioner uses 1 block per processor.
912 
913     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
914 
915     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
916     back to form the global solution (this is the standard restricted additive Schwarz method)
917 
918     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
919     no code to handle that case.
920 
921     Level: advanced
922 
923 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
924 
925 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
926           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
927 @*/
928 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
929 {
930   PetscErrorCode ierr;
931 
932   PetscFunctionBegin;
933   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
934   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
935   PetscFunctionReturn(0);
936 }
937 
938 /*@C
939     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
940     additive Schwarz preconditioner.  Either all or no processors in the
941     PC communicator must call this routine, with the same index sets.
942 
943     Collective on PC
944 
945     Input Parameters:
946 +   pc - the preconditioner context
947 .   N  - the number of subdomains for all processors
948 .   is - the index sets that define the subdomains for all processors
949          (or NULL to ask PETSc to determine the subdomains)
950 -   is_local - the index sets that define the local part of the subdomains for this processor
951          (or NULL to not provide this information)
952 
953     Options Database Key:
954     To set the total number of subdomain blocks rather than specify the
955     index sets, use the option
956 .    -pc_asm_blocks <blks> - Sets total blocks
957 
958     Notes:
959     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
960 
961     By default the ASM preconditioner uses 1 block per processor.
962 
963     These index sets cannot be destroyed until after completion of the
964     linear solves for which the ASM preconditioner is being used.
965 
966     Use PCASMSetLocalSubdomains() to set local subdomains.
967 
968     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
969 
970     Level: advanced
971 
972 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
973 
974 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
975           PCASMCreateSubdomains2D()
976 @*/
977 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
978 {
979   PetscErrorCode ierr;
980 
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
983   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
984   PetscFunctionReturn(0);
985 }
986 
987 /*@
988     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
989     additive Schwarz preconditioner.  Either all or no processors in the
990     PC communicator must call this routine.
991 
992     Logically Collective on PC
993 
994     Input Parameters:
995 +   pc  - the preconditioner context
996 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
997 
998     Options Database Key:
999 .   -pc_asm_overlap <ovl> - Sets overlap
1000 
1001     Notes:
1002     By default the ASM preconditioner uses 1 block per processor.  To use
1003     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1004     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1005 
1006     The overlap defaults to 1, so if one desires that no additional
1007     overlap be computed beyond what may have been set with a call to
1008     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1009     must be set to be 0.  In particular, if one does not explicitly set
1010     the subdomains an application code, then all overlap would be computed
1011     internally by PETSc, and using an overlap of 0 would result in an ASM
1012     variant that is equivalent to the block Jacobi preconditioner.
1013 
1014     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1015     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1016 
1017     Note that one can define initial index sets with any overlap via
1018     PCASMSetLocalSubdomains(); the routine
1019     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1020     if desired.
1021 
1022     Level: intermediate
1023 
1024 .keywords: PC, ASM, set, overlap
1025 
1026 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1027           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1028 @*/
1029 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1030 {
1031   PetscErrorCode ierr;
1032 
1033   PetscFunctionBegin;
1034   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1035   PetscValidLogicalCollectiveInt(pc,ovl,2);
1036   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1037   PetscFunctionReturn(0);
1038 }
1039 
1040 /*@
1041     PCASMSetType - Sets the type of restriction and interpolation used
1042     for local problems in the additive Schwarz method.
1043 
1044     Logically Collective on PC
1045 
1046     Input Parameters:
1047 +   pc  - the preconditioner context
1048 -   type - variant of ASM, one of
1049 .vb
1050       PC_ASM_BASIC       - full interpolation and restriction
1051       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1052       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1053       PC_ASM_NONE        - local processor restriction and interpolation
1054 .ve
1055 
1056     Options Database Key:
1057 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1058 
1059     Notes:
1060     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1061     to limit the local processor interpolation
1062 
1063     Level: intermediate
1064 
1065 .keywords: PC, ASM, set, type
1066 
1067 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1068           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1069 @*/
1070 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1071 {
1072   PetscErrorCode ierr;
1073 
1074   PetscFunctionBegin;
1075   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1076   PetscValidLogicalCollectiveEnum(pc,type,2);
1077   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1078   PetscFunctionReturn(0);
1079 }
1080 
1081 /*@
1082     PCASMGetType - Gets the type of restriction and interpolation used
1083     for local problems in the additive Schwarz method.
1084 
1085     Logically Collective on PC
1086 
1087     Input Parameter:
1088 .   pc  - the preconditioner context
1089 
1090     Output Parameter:
1091 .   type - variant of ASM, one of
1092 
1093 .vb
1094       PC_ASM_BASIC       - full interpolation and restriction
1095       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1096       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1097       PC_ASM_NONE        - local processor restriction and interpolation
1098 .ve
1099 
1100     Options Database Key:
1101 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1102 
1103     Level: intermediate
1104 
1105 .keywords: PC, ASM, set, type
1106 
1107 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1108           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1109 @*/
1110 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1111 {
1112   PetscErrorCode ierr;
1113 
1114   PetscFunctionBegin;
1115   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1116   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 /*@
1121   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1122 
1123   Logically Collective on PC
1124 
1125   Input Parameters:
1126 + pc  - the preconditioner context
1127 - type - type of composition, one of
1128 .vb
1129   PC_COMPOSITE_ADDITIVE       - local additive combination
1130   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1131 .ve
1132 
1133   Options Database Key:
1134 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1135 
1136   Level: intermediate
1137 
1138 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1139 @*/
1140 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1141 {
1142   PetscErrorCode ierr;
1143 
1144   PetscFunctionBegin;
1145   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1146   PetscValidLogicalCollectiveEnum(pc, type, 2);
1147   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1148   PetscFunctionReturn(0);
1149 }
1150 
1151 /*@
1152   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1153 
1154   Logically Collective on PC
1155 
1156   Input Parameter:
1157 . pc  - the preconditioner context
1158 
1159   Output Parameter:
1160 . type - type of composition, one of
1161 .vb
1162   PC_COMPOSITE_ADDITIVE       - local additive combination
1163   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1164 .ve
1165 
1166   Options Database Key:
1167 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1168 
1169   Level: intermediate
1170 
1171 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1172 @*/
1173 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1174 {
1175   PetscErrorCode ierr;
1176 
1177   PetscFunctionBegin;
1178   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1179   PetscValidPointer(type, 2);
1180   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1181   PetscFunctionReturn(0);
1182 }
1183 
1184 /*@
1185     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1186 
1187     Logically Collective on PC
1188 
1189     Input Parameters:
1190 +   pc  - the preconditioner context
1191 -   doSort - sort the subdomain indices
1192 
1193     Level: intermediate
1194 
1195 .keywords: PC, ASM, set, type
1196 
1197 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1198           PCASMCreateSubdomains2D()
1199 @*/
1200 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1201 {
1202   PetscErrorCode ierr;
1203 
1204   PetscFunctionBegin;
1205   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1206   PetscValidLogicalCollectiveBool(pc,doSort,2);
1207   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1208   PetscFunctionReturn(0);
1209 }
1210 
1211 /*@C
1212    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1213    this processor.
1214 
1215    Collective on PC iff first_local is requested
1216 
1217    Input Parameter:
1218 .  pc - the preconditioner context
1219 
1220    Output Parameters:
1221 +  n_local - the number of blocks on this processor or NULL
1222 .  first_local - the global number of the first block on this processor or NULL,
1223                  all processors must request or all must pass NULL
1224 -  ksp - the array of KSP contexts
1225 
1226    Note:
1227    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1228 
1229    You must call KSPSetUp() before calling PCASMGetSubKSP().
1230 
1231    Fortran note:
1232    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1233 
1234    Level: advanced
1235 
1236 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1237 
1238 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1239           PCASMCreateSubdomains2D(),
1240 @*/
1241 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1242 {
1243   PetscErrorCode ierr;
1244 
1245   PetscFunctionBegin;
1246   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1247   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1248   PetscFunctionReturn(0);
1249 }
1250 
1251 /* -------------------------------------------------------------------------------------*/
1252 /*MC
1253    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1254            its own KSP object.
1255 
1256    Options Database Keys:
1257 +  -pc_asm_blocks <blks> - Sets total blocks
1258 .  -pc_asm_overlap <ovl> - Sets overlap
1259 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1260 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1261 
1262      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1263       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1264       -pc_asm_type basic to use the standard ASM.
1265 
1266    Notes:
1267     Each processor can have one or more blocks, but a block cannot be shared by more
1268      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1269 
1270      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1271         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1272 
1273      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1274          and set the options directly on the resulting KSP object (you can access its PC
1275          with KSPGetPC())
1276 
1277    Level: beginner
1278 
1279    Concepts: additive Schwarz method
1280 
1281     References:
1282 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1283      Courant Institute, New York University Technical report
1284 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1285     Cambridge University Press.
1286 
1287 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1288            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1289            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1290 
1291 M*/
1292 
1293 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1294 {
1295   PetscErrorCode ierr;
1296   PC_ASM         *osm;
1297 
1298   PetscFunctionBegin;
1299   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1300 
1301   osm->n                 = PETSC_DECIDE;
1302   osm->n_local           = 0;
1303   osm->n_local_true      = PETSC_DECIDE;
1304   osm->overlap           = 1;
1305   osm->ksp               = 0;
1306   osm->restriction       = 0;
1307   osm->lprolongation     = 0;
1308   osm->lrestriction      = 0;
1309   osm->x                 = 0;
1310   osm->y                 = 0;
1311   osm->is                = 0;
1312   osm->is_local          = 0;
1313   osm->mat               = 0;
1314   osm->pmat              = 0;
1315   osm->type              = PC_ASM_RESTRICT;
1316   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1317   osm->same_local_solves = PETSC_TRUE;
1318   osm->sort_indices      = PETSC_TRUE;
1319   osm->dm_subdomains     = PETSC_FALSE;
1320   osm->sub_mat_type      = NULL;
1321 
1322   pc->data                 = (void*)osm;
1323   pc->ops->apply           = PCApply_ASM;
1324   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1325   pc->ops->setup           = PCSetUp_ASM;
1326   pc->ops->reset           = PCReset_ASM;
1327   pc->ops->destroy         = PCDestroy_ASM;
1328   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1329   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1330   pc->ops->view            = PCView_ASM;
1331   pc->ops->applyrichardson = 0;
1332 
1333   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1334   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1335   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1336   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1337   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1338   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1339   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1340   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1341   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1342   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1343   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1344   PetscFunctionReturn(0);
1345 }
1346 
1347 /*@C
1348    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1349    preconditioner for a any problem on a general grid.
1350 
1351    Collective
1352 
1353    Input Parameters:
1354 +  A - The global matrix operator
1355 -  n - the number of local blocks
1356 
1357    Output Parameters:
1358 .  outis - the array of index sets defining the subdomains
1359 
1360    Level: advanced
1361 
1362    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1363     from these if you use PCASMSetLocalSubdomains()
1364 
1365     In the Fortran version you must provide the array outis[] already allocated of length n.
1366 
1367 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1368 
1369 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1370 @*/
1371 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1372 {
1373   MatPartitioning mpart;
1374   const char      *prefix;
1375   PetscInt        i,j,rstart,rend,bs;
1376   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1377   Mat             Ad     = NULL, adj;
1378   IS              ispart,isnumb,*is;
1379   PetscErrorCode  ierr;
1380 
1381   PetscFunctionBegin;
1382   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1383   PetscValidPointer(outis,3);
1384   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1385 
1386   /* Get prefix, row distribution, and block size */
1387   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1388   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1389   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1390   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1391 
1392   /* Get diagonal block from matrix if possible */
1393   ierr = MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
1394   if (hasop) {
1395     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1396   }
1397   if (Ad) {
1398     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1399     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1400   }
1401   if (Ad && n > 1) {
1402     PetscBool match,done;
1403     /* Try to setup a good matrix partitioning if available */
1404     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1405     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1406     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1407     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1408     if (!match) {
1409       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1410     }
1411     if (!match) { /* assume a "good" partitioner is available */
1412       PetscInt       na;
1413       const PetscInt *ia,*ja;
1414       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1415       if (done) {
1416         /* Build adjacency matrix by hand. Unfortunately a call to
1417            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1418            remove the block-aij structure and we cannot expect
1419            MatPartitioning to split vertices as we need */
1420         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1421         const PetscInt *row;
1422         nnz = 0;
1423         for (i=0; i<na; i++) { /* count number of nonzeros */
1424           len = ia[i+1] - ia[i];
1425           row = ja + ia[i];
1426           for (j=0; j<len; j++) {
1427             if (row[j] == i) { /* don't count diagonal */
1428               len--; break;
1429             }
1430           }
1431           nnz += len;
1432         }
1433         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1434         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1435         nnz    = 0;
1436         iia[0] = 0;
1437         for (i=0; i<na; i++) { /* fill adjacency */
1438           cnt = 0;
1439           len = ia[i+1] - ia[i];
1440           row = ja + ia[i];
1441           for (j=0; j<len; j++) {
1442             if (row[j] != i) { /* if not diagonal */
1443               jja[nnz+cnt++] = row[j];
1444             }
1445           }
1446           nnz     += cnt;
1447           iia[i+1] = nnz;
1448         }
1449         /* Partitioning of the adjacency matrix */
1450         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1451         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1452         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1453         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1454         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1455         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1456         foundpart = PETSC_TRUE;
1457       }
1458       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1459     }
1460     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1461   }
1462 
1463   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1464   *outis = is;
1465 
1466   if (!foundpart) {
1467 
1468     /* Partitioning by contiguous chunks of rows */
1469 
1470     PetscInt mbs   = (rend-rstart)/bs;
1471     PetscInt start = rstart;
1472     for (i=0; i<n; i++) {
1473       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1474       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1475       start += count;
1476     }
1477 
1478   } else {
1479 
1480     /* Partitioning by adjacency of diagonal block  */
1481 
1482     const PetscInt *numbering;
1483     PetscInt       *count,nidx,*indices,*newidx,start=0;
1484     /* Get node count in each partition */
1485     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1486     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1487     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1488       for (i=0; i<n; i++) count[i] *= bs;
1489     }
1490     /* Build indices from node numbering */
1491     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1492     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1493     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1494     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1495     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1496     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1497     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1498       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1499       for (i=0; i<nidx; i++) {
1500         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1501       }
1502       ierr    = PetscFree(indices);CHKERRQ(ierr);
1503       nidx   *= bs;
1504       indices = newidx;
1505     }
1506     /* Shift to get global indices */
1507     for (i=0; i<nidx; i++) indices[i] += rstart;
1508 
1509     /* Build the index sets for each block */
1510     for (i=0; i<n; i++) {
1511       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1512       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1513       start += count[i];
1514     }
1515 
1516     ierr = PetscFree(count);CHKERRQ(ierr);
1517     ierr = PetscFree(indices);CHKERRQ(ierr);
1518     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1519     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1520 
1521   }
1522   PetscFunctionReturn(0);
1523 }
1524 
1525 /*@C
1526    PCASMDestroySubdomains - Destroys the index sets created with
1527    PCASMCreateSubdomains(). Should be called after setting subdomains
1528    with PCASMSetLocalSubdomains().
1529 
1530    Collective
1531 
1532    Input Parameters:
1533 +  n - the number of index sets
1534 .  is - the array of index sets
1535 -  is_local - the array of local index sets, can be NULL
1536 
1537    Level: advanced
1538 
1539 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1540 
1541 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1542 @*/
1543 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1544 {
1545   PetscInt       i;
1546   PetscErrorCode ierr;
1547 
1548   PetscFunctionBegin;
1549   if (n <= 0) PetscFunctionReturn(0);
1550   if (is) {
1551     PetscValidPointer(is,2);
1552     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1553     ierr = PetscFree(is);CHKERRQ(ierr);
1554   }
1555   if (is_local) {
1556     PetscValidPointer(is_local,3);
1557     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1558     ierr = PetscFree(is_local);CHKERRQ(ierr);
1559   }
1560   PetscFunctionReturn(0);
1561 }
1562 
1563 /*@
1564    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1565    preconditioner for a two-dimensional problem on a regular grid.
1566 
1567    Not Collective
1568 
1569    Input Parameters:
1570 +  m, n - the number of mesh points in the x and y directions
1571 .  M, N - the number of subdomains in the x and y directions
1572 .  dof - degrees of freedom per node
1573 -  overlap - overlap in mesh lines
1574 
1575    Output Parameters:
1576 +  Nsub - the number of subdomains created
1577 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1578 -  is_local - array of index sets defining non-overlapping subdomains
1579 
1580    Note:
1581    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1582    preconditioners.  More general related routines are
1583    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1584 
1585    Level: advanced
1586 
1587 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1588 
1589 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1590           PCASMSetOverlap()
1591 @*/
1592 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1593 {
1594   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1595   PetscErrorCode ierr;
1596   PetscInt       nidx,*idx,loc,ii,jj,count;
1597 
1598   PetscFunctionBegin;
1599   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1600 
1601   *Nsub     = N*M;
1602   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1603   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1604   ystart    = 0;
1605   loc_outer = 0;
1606   for (i=0; i<N; i++) {
1607     height = n/N + ((n % N) > i); /* height of subdomain */
1608     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1609     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1610     yright = ystart + height + overlap; if (yright > n) yright = n;
1611     xstart = 0;
1612     for (j=0; j<M; j++) {
1613       width = m/M + ((m % M) > j); /* width of subdomain */
1614       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1615       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1616       xright = xstart + width + overlap; if (xright > m) xright = m;
1617       nidx   = (xright - xleft)*(yright - yleft);
1618       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1619       loc    = 0;
1620       for (ii=yleft; ii<yright; ii++) {
1621         count = m*ii + xleft;
1622         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1623       }
1624       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1625       if (overlap == 0) {
1626         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1627 
1628         (*is_local)[loc_outer] = (*is)[loc_outer];
1629       } else {
1630         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1631           for (jj=xstart; jj<xstart+width; jj++) {
1632             idx[loc++] = m*ii + jj;
1633           }
1634         }
1635         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1636       }
1637       ierr    = PetscFree(idx);CHKERRQ(ierr);
1638       xstart += width;
1639       loc_outer++;
1640     }
1641     ystart += height;
1642   }
1643   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1644   PetscFunctionReturn(0);
1645 }
1646 
1647 /*@C
1648     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1649     only) for the additive Schwarz preconditioner.
1650 
1651     Not Collective
1652 
1653     Input Parameter:
1654 .   pc - the preconditioner context
1655 
1656     Output Parameters:
1657 +   n - the number of subdomains for this processor (default value = 1)
1658 .   is - the index sets that define the subdomains for this processor
1659 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1660 
1661 
1662     Notes:
1663     The IS numbering is in the parallel, global numbering of the vector.
1664 
1665     Level: advanced
1666 
1667 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1668 
1669 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1670           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1671 @*/
1672 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1673 {
1674   PC_ASM         *osm = (PC_ASM*)pc->data;
1675   PetscErrorCode ierr;
1676   PetscBool      match;
1677 
1678   PetscFunctionBegin;
1679   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1680   PetscValidIntPointer(n,2);
1681   if (is) PetscValidPointer(is,3);
1682   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1683   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1684   if (n) *n = osm->n_local_true;
1685   if (is) *is = osm->is;
1686   if (is_local) *is_local = osm->is_local;
1687   PetscFunctionReturn(0);
1688 }
1689 
1690 /*@C
1691     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1692     only) for the additive Schwarz preconditioner.
1693 
1694     Not Collective
1695 
1696     Input Parameter:
1697 .   pc - the preconditioner context
1698 
1699     Output Parameters:
1700 +   n - the number of matrices for this processor (default value = 1)
1701 -   mat - the matrices
1702 
1703     Level: advanced
1704 
1705     Notes:
1706     Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1707 
1708            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1709 
1710 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1711 
1712 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1713           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1714 @*/
1715 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1716 {
1717   PC_ASM         *osm;
1718   PetscErrorCode ierr;
1719   PetscBool      match;
1720 
1721   PetscFunctionBegin;
1722   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1723   PetscValidIntPointer(n,2);
1724   if (mat) PetscValidPointer(mat,3);
1725   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1726   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1727   if (!match) {
1728     if (n) *n = 0;
1729     if (mat) *mat = NULL;
1730   } else {
1731     osm = (PC_ASM*)pc->data;
1732     if (n) *n = osm->n_local_true;
1733     if (mat) *mat = osm->pmat;
1734   }
1735   PetscFunctionReturn(0);
1736 }
1737 
1738 /*@
1739     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1740 
1741     Logically Collective
1742 
1743     Input Parameter:
1744 +   pc  - the preconditioner
1745 -   flg - boolean indicating whether to use subdomains defined by the DM
1746 
1747     Options Database Key:
1748 .   -pc_asm_dm_subdomains
1749 
1750     Level: intermediate
1751 
1752     Notes:
1753     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1754     so setting either of the first two effectively turns the latter off.
1755 
1756 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1757 
1758 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1759           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1760 @*/
1761 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1762 {
1763   PC_ASM         *osm = (PC_ASM*)pc->data;
1764   PetscErrorCode ierr;
1765   PetscBool      match;
1766 
1767   PetscFunctionBegin;
1768   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1769   PetscValidLogicalCollectiveBool(pc,flg,2);
1770   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1771   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1772   if (match) {
1773     osm->dm_subdomains = flg;
1774   }
1775   PetscFunctionReturn(0);
1776 }
1777 
1778 /*@
1779     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1780     Not Collective
1781 
1782     Input Parameter:
1783 .   pc  - the preconditioner
1784 
1785     Output Parameter:
1786 .   flg - boolean indicating whether to use subdomains defined by the DM
1787 
1788     Level: intermediate
1789 
1790 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1791 
1792 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1793           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1794 @*/
1795 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1796 {
1797   PC_ASM         *osm = (PC_ASM*)pc->data;
1798   PetscErrorCode ierr;
1799   PetscBool      match;
1800 
1801   PetscFunctionBegin;
1802   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1803   PetscValidPointer(flg,2);
1804   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1805   if (match) {
1806     if (flg) *flg = osm->dm_subdomains;
1807   }
1808   PetscFunctionReturn(0);
1809 }
1810 
1811 /*@
1812      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1813 
1814    Not Collective
1815 
1816    Input Parameter:
1817 .  pc - the PC
1818 
1819    Output Parameter:
1820 .  -pc_asm_sub_mat_type - name of matrix type
1821 
1822    Level: advanced
1823 
1824 .keywords: PC, PCASM, MatType, set
1825 
1826 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1827 @*/
1828 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1829   PetscErrorCode ierr;
1830 
1831   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1832   PetscFunctionReturn(0);
1833 }
1834 
1835 /*@
1836      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1837 
1838    Collective on Mat
1839 
1840    Input Parameters:
1841 +  pc             - the PC object
1842 -  sub_mat_type   - matrix type
1843 
1844    Options Database Key:
1845 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1846 
1847    Notes:
1848    See "${PETSC_DIR}/include/petscmat.h" for available types
1849 
1850   Level: advanced
1851 
1852 .keywords: PC, PCASM, MatType, set
1853 
1854 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1855 @*/
1856 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1857 {
1858   PetscErrorCode ierr;
1859 
1860   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1861   PetscFunctionReturn(0);
1862 }
1863