xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision b41ce5d507ea9a58bfa83cf403107a702e77a67d)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion; used for restrict version of algorithms */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
34   /* For multiplicative solve */
35   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
36   Vec        lx, ly;              /* work vectors */
37   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
38 } PC_ASM;
39 
40 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
41 {
42   PC_ASM         *osm = (PC_ASM*)pc->data;
43   PetscErrorCode ierr;
44   PetscMPIInt    rank;
45   PetscInt       i,bsz;
46   PetscBool      iascii,isstring;
47   PetscViewer    sviewer;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
52   if (iascii) {
53     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
54     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
55     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
56     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
57     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
58     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
59     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
60     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
61     if (osm->same_local_solves) {
62       if (osm->ksp) {
63         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
64         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
65         if (!rank) {
66           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
68           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
69         }
70         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71       }
72     } else {
73       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
75       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
80       for (i=0; i<osm->n_local_true; i++) {
81         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
82         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
83         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
85       }
86       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
87       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
90     }
91   } else if (isstring) {
92     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
93     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
94     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
95     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96   }
97   PetscFunctionReturn(0);
98 }
99 
100 static PetscErrorCode PCASMPrintSubdomains(PC pc)
101 {
102   PC_ASM         *osm = (PC_ASM*)pc->data;
103   const char     *prefix;
104   char           fname[PETSC_MAX_PATH_LEN+1];
105   PetscViewer    viewer, sviewer;
106   char           *s;
107   PetscInt       i,j,nidx;
108   const PetscInt *idx;
109   PetscMPIInt    rank, size;
110   PetscErrorCode ierr;
111 
112   PetscFunctionBegin;
113   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
114   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
115   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
116   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
117   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
118   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
119   for (i=0; i<osm->n_local; i++) {
120     if (i < osm->n_local_true) {
121       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
123       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
124       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
125       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
126       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
127       for (j=0; j<nidx; j++) {
128         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
129       }
130       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
131       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
132       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
133       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
135       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
137       ierr = PetscFree(s);CHKERRQ(ierr);
138       if (osm->is_local) {
139         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
140         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
141         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
142         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
143         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
144         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
145         for (j=0; j<nidx; j++) {
146           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
147         }
148         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
149         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
150         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
151         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
152         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
153         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
155         ierr = PetscFree(s);CHKERRQ(ierr);
156       }
157     } else {
158       /* Participate in collective viewer calls. */
159       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
160       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
161       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
162       /* Assume either all ranks have is_local or none do. */
163       if (osm->is_local) {
164         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
165         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
166         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
167       }
168     }
169   }
170   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
171   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
172   PetscFunctionReturn(0);
173 }
174 
175 static PetscErrorCode PCSetUp_ASM(PC pc)
176 {
177   PC_ASM         *osm = (PC_ASM*)pc->data;
178   PetscErrorCode ierr;
179   PetscBool      symset,flg;
180   PetscInt       i,m,m_local;
181   MatReuse       scall = MAT_REUSE_MATRIX;
182   IS             isl;
183   KSP            ksp;
184   PC             subpc;
185   const char     *prefix,*pprefix;
186   Vec            vec;
187   DM             *domain_dm = NULL;
188 
189   PetscFunctionBegin;
190   if (!pc->setupcalled) {
191 
192     if (!osm->type_set) {
193       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
194       if (symset && flg) osm->type = PC_ASM_BASIC;
195     }
196 
197     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
198     if (osm->n_local_true == PETSC_DECIDE) {
199       /* no subdomains given */
200       /* try pc->dm first, if allowed */
201       if (osm->dm_subdomains && pc->dm) {
202         PetscInt  num_domains, d;
203         char      **domain_names;
204         IS        *inner_domain_is, *outer_domain_is;
205         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
206         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
207                               A future improvement of this code might allow one to use
208                               DM-defined subdomains and also increase the overlap,
209                               but that is not currently supported */
210         if (num_domains) {
211           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
212         }
213         for (d = 0; d < num_domains; ++d) {
214           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
215           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
216           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
217         }
218         ierr = PetscFree(domain_names);CHKERRQ(ierr);
219         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
220         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
221       }
222       if (osm->n_local_true == PETSC_DECIDE) {
223         /* still no subdomains; use one subdomain per processor */
224         osm->n_local_true = 1;
225       }
226     }
227     { /* determine the global and max number of subdomains */
228       struct {PetscInt max,sum;} inwork,outwork;
229       PetscMPIInt size;
230 
231       inwork.max   = osm->n_local_true;
232       inwork.sum   = osm->n_local_true;
233       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
234       osm->n_local = outwork.max;
235       osm->n       = outwork.sum;
236 
237       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
238       if (outwork.max == 1 && outwork.sum == size) {
239         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
240         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
241       }
242     }
243     if (!osm->is) { /* create the index sets */
244       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
245     }
246     if (osm->n_local_true > 1 && !osm->is_local) {
247       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
248       for (i=0; i<osm->n_local_true; i++) {
249         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
250           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
251           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
252         } else {
253           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
254           osm->is_local[i] = osm->is[i];
255         }
256       }
257     }
258     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
259     flg  = PETSC_FALSE;
260     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
261     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
262 
263     if (osm->overlap > 0) {
264       /* Extend the "overlapping" regions by a number of steps */
265       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
266     }
267     if (osm->sort_indices) {
268       for (i=0; i<osm->n_local_true; i++) {
269         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
270         if (osm->is_local) {
271           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
272         }
273       }
274     }
275 
276     if (!osm->ksp) {
277       /* Create the local solvers */
278       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
279       if (domain_dm) {
280         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
281       }
282       for (i=0; i<osm->n_local_true; i++) {
283         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
284         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
285         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
286         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
287         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
288         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
289         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
290         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
291         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
292         if (domain_dm) {
293           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
294           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
295           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
296         }
297         osm->ksp[i] = ksp;
298       }
299       if (domain_dm) {
300         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
301       }
302     }
303     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
304       PetscInt m;
305 
306       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
307       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
308       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
309       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
310       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
311     }
312     scall = MAT_INITIAL_MATRIX;
313   } else {
314     /*
315        Destroy the blocks from the previous iteration
316     */
317     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
318       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
319       scall = MAT_INITIAL_MATRIX;
320     }
321   }
322 
323   /*
324      Extract out the submatrices
325   */
326   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
327   if (scall == MAT_INITIAL_MATRIX) {
328     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
329     for (i=0; i<osm->n_local_true; i++) {
330       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
331       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
332     }
333   }
334 
335   /* Convert the types of the submatrices (if needbe) */
336   if (osm->sub_mat_type) {
337     for (i=0; i<osm->n_local_true; i++) {
338       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
339     }
340   }
341 
342   if(!pc->setupcalled){
343     /* Create the local work vectors (from the local matrices) and scatter contexts */
344     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
345     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
346     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
347     if (osm->is_local && osm->type == PC_ASM_RESTRICT) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
348     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
349     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
350     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
351     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
352     for (i=0; i<osm->n_local_true; ++i) {
353       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
354       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
355       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
356       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
357       ierr = ISDestroy(&isl);CHKERRQ(ierr);
358       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
359       if (osm->localization) {
360         ISLocalToGlobalMapping ltog;
361         IS                     isll;
362         const PetscInt         *idx_local;
363         PetscInt               *idx,nout;
364 
365         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
366         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
367         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
368         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
369         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
370         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
371         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
372         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
373         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
374         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
375         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
376         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
377         ierr = ISDestroy(&isll);CHKERRQ(ierr);
378 
379         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
380         ierr = ISDestroy(&isl);CHKERRQ(ierr);
381       } else {
382         osm->y_local[i] = osm->y[i];
383         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
384         osm->prolongation[i] = osm->restriction[i];
385         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
386       }
387     }
388     for (i=osm->n_local_true; i<osm->n_local; i++) {
389       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
390       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
391       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
392       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
393       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
394       if (osm->localization) {
395         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
396         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
397       } else {
398         osm->prolongation[i] = osm->restriction[i];
399         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
400       }
401       ierr = ISDestroy(&isl);CHKERRQ(ierr);
402     }
403     ierr = VecDestroy(&vec);CHKERRQ(ierr);
404   }
405 
406   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
407     IS      *cis;
408     PetscInt c;
409 
410     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
411     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
412     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
413     ierr = PetscFree(cis);CHKERRQ(ierr);
414   }
415 
416   /* Return control to the user so that the submatrices can be modified (e.g., to apply
417      different boundary conditions for the submatrices than for the global problem) */
418   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
419 
420   /*
421      Loop over subdomains putting them into local ksp
422   */
423   for (i=0; i<osm->n_local_true; i++) {
424     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
425     if (!pc->setupcalled) {
426       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
427     }
428   }
429   PetscFunctionReturn(0);
430 }
431 
432 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
433 {
434   PC_ASM             *osm = (PC_ASM*)pc->data;
435   PetscErrorCode     ierr;
436   PetscInt           i;
437   KSPConvergedReason reason;
438 
439   PetscFunctionBegin;
440   for (i=0; i<osm->n_local_true; i++) {
441     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
442     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
443     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
444       pc->failedreason = PC_SUBPC_ERROR;
445     }
446   }
447   PetscFunctionReturn(0);
448 }
449 
450 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
451 {
452   PC_ASM         *osm = (PC_ASM*)pc->data;
453   PetscErrorCode ierr;
454   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
455   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
456 
457   PetscFunctionBegin;
458   /*
459      Support for limiting the restriction or interpolation to only local
460      subdomain values (leaving the other values 0).
461   */
462   if (!(osm->type & PC_ASM_RESTRICT)) {
463     forward = SCATTER_FORWARD_LOCAL;
464     /* have to zero the work RHS since scatter may leave some slots empty */
465     for (i=0; i<n_local_true; i++) {
466       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
467     }
468   }
469   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
470 
471   switch (osm->loctype)
472   {
473   case PC_COMPOSITE_ADDITIVE:
474     for (i=0; i<n_local; i++) {
475       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
476     }
477     ierr = VecZeroEntries(y);CHKERRQ(ierr);
478     /* do the local solves */
479     for (i=0; i<n_local_true; i++) {
480       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
481       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
482       if (osm->localization) {
483         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
484         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
485       }
486       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
487     }
488     /* handle the rest of the scatters that do not have local solves */
489     for (i=n_local_true; i<n_local; i++) {
490       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
491       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
492     }
493     for (i=0; i<n_local; i++) {
494       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
495     }
496     break;
497   case PC_COMPOSITE_MULTIPLICATIVE:
498     ierr = VecZeroEntries(y);CHKERRQ(ierr);
499     /* do the local solves */
500     for (i = 0; i < n_local_true; ++i) {
501       if (i > 0) {
502         /* Update rhs */
503         ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
504         ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
505       } else {
506         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
507       }
508       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
509       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
510       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
511       if (osm->localization) {
512         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
513         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
514       }
515       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
516       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
517       if (i < n_local_true-1) {
518         ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
519         ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
520         ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
521         ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr);
522         ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
523         ierr = VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
524         ierr = VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
525       }
526     }
527     /* handle the rest of the scatters that do not have local solves */
528     for (i = n_local_true; i < n_local; ++i) {
529       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
530       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
531       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
532       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
533     }
534     break;
535   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
536   }
537   PetscFunctionReturn(0);
538 }
539 
540 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
541 {
542   PC_ASM         *osm = (PC_ASM*)pc->data;
543   PetscErrorCode ierr;
544   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
545   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
546 
547   PetscFunctionBegin;
548   /*
549      Support for limiting the restriction or interpolation to only local
550      subdomain values (leaving the other values 0).
551 
552      Note: these are reversed from the PCApply_ASM() because we are applying the
553      transpose of the three terms
554   */
555   if (!(osm->type & PC_ASM_INTERPOLATE)) {
556     forward = SCATTER_FORWARD_LOCAL;
557     /* have to zero the work RHS since scatter may leave some slots empty */
558     for (i=0; i<n_local_true; i++) {
559       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
560     }
561   }
562   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
563 
564   for (i=0; i<n_local; i++) {
565     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
566   }
567   ierr = VecZeroEntries(y);CHKERRQ(ierr);
568   /* do the local solves */
569   for (i=0; i<n_local_true; i++) {
570     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
571     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
572     if (osm->localization) {
573       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
574       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
575     }
576     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
577   }
578   /* handle the rest of the scatters that do not have local solves */
579   for (i=n_local_true; i<n_local; i++) {
580     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
581     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
582   }
583   for (i=0; i<n_local; i++) {
584     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
585   }
586   PetscFunctionReturn(0);
587 }
588 
589 static PetscErrorCode PCReset_ASM(PC pc)
590 {
591   PC_ASM         *osm = (PC_ASM*)pc->data;
592   PetscErrorCode ierr;
593   PetscInt       i;
594 
595   PetscFunctionBegin;
596   if (osm->ksp) {
597     for (i=0; i<osm->n_local_true; i++) {
598       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
599     }
600   }
601   if (osm->pmat) {
602     if (osm->n_local_true > 0) {
603       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
604     }
605   }
606   if (osm->restriction) {
607     for (i=0; i<osm->n_local; i++) {
608       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
609       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
610       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
611       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
612       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
613       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
614     }
615     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
616     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
617     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
618     ierr = PetscFree(osm->x);CHKERRQ(ierr);
619     ierr = PetscFree(osm->y);CHKERRQ(ierr);
620     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
621   }
622   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
623   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
624     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
625     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
626     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
627     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
628   }
629 
630   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
631 
632   osm->is       = 0;
633   osm->is_local = 0;
634   PetscFunctionReturn(0);
635 }
636 
637 static PetscErrorCode PCDestroy_ASM(PC pc)
638 {
639   PC_ASM         *osm = (PC_ASM*)pc->data;
640   PetscErrorCode ierr;
641   PetscInt       i;
642 
643   PetscFunctionBegin;
644   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
645   if (osm->ksp) {
646     for (i=0; i<osm->n_local_true; i++) {
647       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
648     }
649     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
650   }
651   ierr = PetscFree(pc->data);CHKERRQ(ierr);
652   PetscFunctionReturn(0);
653 }
654 
655 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
656 {
657   PC_ASM         *osm = (PC_ASM*)pc->data;
658   PetscErrorCode ierr;
659   PetscInt       blocks,ovl;
660   PetscBool      symset,flg;
661   PCASMType      asmtype;
662   PCCompositeType loctype;
663   char           sub_mat_type[256];
664 
665   PetscFunctionBegin;
666   /* set the type to symmetric if matrix is symmetric */
667   if (!osm->type_set && pc->pmat) {
668     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
669     if (symset && flg) osm->type = PC_ASM_BASIC;
670   }
671   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
672   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
673   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
674   if (flg) {
675     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
676     osm->dm_subdomains = PETSC_FALSE;
677   }
678   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
679   if (flg) {
680     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
681     osm->dm_subdomains = PETSC_FALSE;
682   }
683   flg  = PETSC_FALSE;
684   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
685   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
686   flg  = PETSC_FALSE;
687   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
688   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
689   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
690   if(flg){
691     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
692   }
693   ierr = PetscOptionsTail();CHKERRQ(ierr);
694   PetscFunctionReturn(0);
695 }
696 
697 /*------------------------------------------------------------------------------------*/
698 
699 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
700 {
701   PC_ASM         *osm = (PC_ASM*)pc->data;
702   PetscErrorCode ierr;
703   PetscInt       i;
704 
705   PetscFunctionBegin;
706   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
707   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
708 
709   if (!pc->setupcalled) {
710     if (is) {
711       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
712     }
713     if (is_local) {
714       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
715     }
716     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
717 
718     osm->n_local_true = n;
719     osm->is           = 0;
720     osm->is_local     = 0;
721     if (is) {
722       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
723       for (i=0; i<n; i++) osm->is[i] = is[i];
724       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
725       osm->overlap = -1;
726     }
727     if (is_local) {
728       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
729       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
730       if (!is) {
731         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
732         for (i=0; i<osm->n_local_true; i++) {
733           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
734             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
735             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
736           } else {
737             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
738             osm->is[i] = osm->is_local[i];
739           }
740         }
741       }
742     }
743   }
744   PetscFunctionReturn(0);
745 }
746 
747 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
748 {
749   PC_ASM         *osm = (PC_ASM*)pc->data;
750   PetscErrorCode ierr;
751   PetscMPIInt    rank,size;
752   PetscInt       n;
753 
754   PetscFunctionBegin;
755   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
756   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
757 
758   /*
759      Split the subdomains equally among all processors
760   */
761   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
762   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
763   n    = N/size + ((N % size) > rank);
764   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
765   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
766   if (!pc->setupcalled) {
767     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
768 
769     osm->n_local_true = n;
770     osm->is           = 0;
771     osm->is_local     = 0;
772   }
773   PetscFunctionReturn(0);
774 }
775 
776 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
777 {
778   PC_ASM *osm = (PC_ASM*)pc->data;
779 
780   PetscFunctionBegin;
781   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
782   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
783   if (!pc->setupcalled) osm->overlap = ovl;
784   PetscFunctionReturn(0);
785 }
786 
787 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
788 {
789   PC_ASM *osm = (PC_ASM*)pc->data;
790 
791   PetscFunctionBegin;
792   osm->type     = type;
793   osm->type_set = PETSC_TRUE;
794   PetscFunctionReturn(0);
795 }
796 
797 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
798 {
799   PC_ASM *osm = (PC_ASM*)pc->data;
800 
801   PetscFunctionBegin;
802   *type = osm->type;
803   PetscFunctionReturn(0);
804 }
805 
806 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
807 {
808   PC_ASM *osm = (PC_ASM *) pc->data;
809 
810   PetscFunctionBegin;
811   osm->loctype = type;
812   PetscFunctionReturn(0);
813 }
814 
815 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
816 {
817   PC_ASM *osm = (PC_ASM *) pc->data;
818 
819   PetscFunctionBegin;
820   *type = osm->loctype;
821   PetscFunctionReturn(0);
822 }
823 
824 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
825 {
826   PC_ASM *osm = (PC_ASM*)pc->data;
827 
828   PetscFunctionBegin;
829   osm->sort_indices = doSort;
830   PetscFunctionReturn(0);
831 }
832 
833 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
834 {
835   PC_ASM         *osm = (PC_ASM*)pc->data;
836   PetscErrorCode ierr;
837 
838   PetscFunctionBegin;
839   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
840 
841   if (n_local) *n_local = osm->n_local_true;
842   if (first_local) {
843     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
844     *first_local -= osm->n_local_true;
845   }
846   if (ksp) {
847     /* Assume that local solves are now different; not necessarily
848        true though!  This flag is used only for PCView_ASM() */
849     *ksp                   = osm->ksp;
850     osm->same_local_solves = PETSC_FALSE;
851   }
852   PetscFunctionReturn(0);
853 }
854 
855 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
856 {
857   PC_ASM         *osm = (PC_ASM*)pc->data;
858 
859   PetscFunctionBegin;
860   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
861   PetscValidPointer(sub_mat_type,2);
862   *sub_mat_type = osm->sub_mat_type;
863   PetscFunctionReturn(0);
864 }
865 
866 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
867 {
868   PetscErrorCode    ierr;
869   PC_ASM            *osm = (PC_ASM*)pc->data;
870 
871   PetscFunctionBegin;
872   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
873   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
874   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
875   PetscFunctionReturn(0);
876 }
877 
878 /*@C
879     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
880 
881     Collective on PC
882 
883     Input Parameters:
884 +   pc - the preconditioner context
885 .   n - the number of subdomains for this processor (default value = 1)
886 .   is - the index set that defines the subdomains for this processor
887          (or NULL for PETSc to determine subdomains)
888 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
889          (or NULL to not provide these)
890 
891     Notes:
892     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
893 
894     By default the ASM preconditioner uses 1 block per processor.
895 
896     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
897 
898     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
899     back to form the global solution (this is the standard restricted additive Schwarz method)
900 
901     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
902     no code to handle that case.
903 
904     Level: advanced
905 
906 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
907 
908 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
909           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
910 @*/
911 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
912 {
913   PetscErrorCode ierr;
914 
915   PetscFunctionBegin;
916   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
917   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
918   PetscFunctionReturn(0);
919 }
920 
921 /*@C
922     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
923     additive Schwarz preconditioner.  Either all or no processors in the
924     PC communicator must call this routine, with the same index sets.
925 
926     Collective on PC
927 
928     Input Parameters:
929 +   pc - the preconditioner context
930 .   N  - the number of subdomains for all processors
931 .   is - the index sets that define the subdomains for all processors
932          (or NULL to ask PETSc to compe up with subdomains)
933 -   is_local - the index sets that define the local part of the subdomains for this processor
934          (or NULL to not provide this information)
935 
936     Options Database Key:
937     To set the total number of subdomain blocks rather than specify the
938     index sets, use the option
939 .    -pc_asm_blocks <blks> - Sets total blocks
940 
941     Notes:
942     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
943 
944     By default the ASM preconditioner uses 1 block per processor.
945 
946     These index sets cannot be destroyed until after completion of the
947     linear solves for which the ASM preconditioner is being used.
948 
949     Use PCASMSetLocalSubdomains() to set local subdomains.
950 
951     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
952 
953     Level: advanced
954 
955 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
956 
957 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
958           PCASMCreateSubdomains2D()
959 @*/
960 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
961 {
962   PetscErrorCode ierr;
963 
964   PetscFunctionBegin;
965   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
966   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
967   PetscFunctionReturn(0);
968 }
969 
970 /*@
971     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
972     additive Schwarz preconditioner.  Either all or no processors in the
973     PC communicator must call this routine.
974 
975     Logically Collective on PC
976 
977     Input Parameters:
978 +   pc  - the preconditioner context
979 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
980 
981     Options Database Key:
982 .   -pc_asm_overlap <ovl> - Sets overlap
983 
984     Notes:
985     By default the ASM preconditioner uses 1 block per processor.  To use
986     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
987     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
988 
989     The overlap defaults to 1, so if one desires that no additional
990     overlap be computed beyond what may have been set with a call to
991     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
992     must be set to be 0.  In particular, if one does not explicitly set
993     the subdomains an application code, then all overlap would be computed
994     internally by PETSc, and using an overlap of 0 would result in an ASM
995     variant that is equivalent to the block Jacobi preconditioner.
996 
997     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
998     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
999 
1000     Note that one can define initial index sets with any overlap via
1001     PCASMSetLocalSubdomains(); the routine
1002     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1003     if desired.
1004 
1005     Level: intermediate
1006 
1007 .keywords: PC, ASM, set, overlap
1008 
1009 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1010           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1011 @*/
1012 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1013 {
1014   PetscErrorCode ierr;
1015 
1016   PetscFunctionBegin;
1017   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1018   PetscValidLogicalCollectiveInt(pc,ovl,2);
1019   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 /*@
1024     PCASMSetType - Sets the type of restriction and interpolation used
1025     for local problems in the additive Schwarz method.
1026 
1027     Logically Collective on PC
1028 
1029     Input Parameters:
1030 +   pc  - the preconditioner context
1031 -   type - variant of ASM, one of
1032 .vb
1033       PC_ASM_BASIC       - full interpolation and restriction
1034       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1035       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1036       PC_ASM_NONE        - local processor restriction and interpolation
1037 .ve
1038 
1039     Options Database Key:
1040 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1041 
1042     Notes: if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1043     to limit the local processor interpolation
1044 
1045     Level: intermediate
1046 
1047 .keywords: PC, ASM, set, type
1048 
1049 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1050           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1051 @*/
1052 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1053 {
1054   PetscErrorCode ierr;
1055 
1056   PetscFunctionBegin;
1057   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1058   PetscValidLogicalCollectiveEnum(pc,type,2);
1059   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1060   PetscFunctionReturn(0);
1061 }
1062 
1063 /*@
1064     PCASMGetType - Gets the type of restriction and interpolation used
1065     for local problems in the additive Schwarz method.
1066 
1067     Logically Collective on PC
1068 
1069     Input Parameter:
1070 .   pc  - the preconditioner context
1071 
1072     Output Parameter:
1073 .   type - variant of ASM, one of
1074 
1075 .vb
1076       PC_ASM_BASIC       - full interpolation and restriction
1077       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1078       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1079       PC_ASM_NONE        - local processor restriction and interpolation
1080 .ve
1081 
1082     Options Database Key:
1083 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1084 
1085     Level: intermediate
1086 
1087 .keywords: PC, ASM, set, type
1088 
1089 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1090           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1091 @*/
1092 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1093 {
1094   PetscErrorCode ierr;
1095 
1096   PetscFunctionBegin;
1097   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1098   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1099   PetscFunctionReturn(0);
1100 }
1101 
1102 /*@
1103   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1104 
1105   Logically Collective on PC
1106 
1107   Input Parameters:
1108 + pc  - the preconditioner context
1109 - type - type of composition, one of
1110 .vb
1111   PC_COMPOSITE_ADDITIVE       - local additive combination
1112   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1113 .ve
1114 
1115   Options Database Key:
1116 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1117 
1118   Level: intermediate
1119 
1120 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1121 @*/
1122 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1123 {
1124   PetscErrorCode ierr;
1125 
1126   PetscFunctionBegin;
1127   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1128   PetscValidLogicalCollectiveEnum(pc, type, 2);
1129   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1130   PetscFunctionReturn(0);
1131 }
1132 
1133 /*@
1134   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1135 
1136   Logically Collective on PC
1137 
1138   Input Parameter:
1139 . pc  - the preconditioner context
1140 
1141   Output Parameter:
1142 . type - type of composition, one of
1143 .vb
1144   PC_COMPOSITE_ADDITIVE       - local additive combination
1145   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1146 .ve
1147 
1148   Options Database Key:
1149 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1150 
1151   Level: intermediate
1152 
1153 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1154 @*/
1155 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1156 {
1157   PetscErrorCode ierr;
1158 
1159   PetscFunctionBegin;
1160   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1161   PetscValidPointer(type, 2);
1162   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1163   PetscFunctionReturn(0);
1164 }
1165 
1166 /*@
1167     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1168 
1169     Logically Collective on PC
1170 
1171     Input Parameters:
1172 +   pc  - the preconditioner context
1173 -   doSort - sort the subdomain indices
1174 
1175     Level: intermediate
1176 
1177 .keywords: PC, ASM, set, type
1178 
1179 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1180           PCASMCreateSubdomains2D()
1181 @*/
1182 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1183 {
1184   PetscErrorCode ierr;
1185 
1186   PetscFunctionBegin;
1187   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1188   PetscValidLogicalCollectiveBool(pc,doSort,2);
1189   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1190   PetscFunctionReturn(0);
1191 }
1192 
1193 /*@C
1194    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1195    this processor.
1196 
1197    Collective on PC iff first_local is requested
1198 
1199    Input Parameter:
1200 .  pc - the preconditioner context
1201 
1202    Output Parameters:
1203 +  n_local - the number of blocks on this processor or NULL
1204 .  first_local - the global number of the first block on this processor or NULL,
1205                  all processors must request or all must pass NULL
1206 -  ksp - the array of KSP contexts
1207 
1208    Note:
1209    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1210 
1211    You must call KSPSetUp() before calling PCASMGetSubKSP().
1212 
1213    Fortran note:
1214    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1215 
1216    Level: advanced
1217 
1218 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1219 
1220 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1221           PCASMCreateSubdomains2D(),
1222 @*/
1223 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1224 {
1225   PetscErrorCode ierr;
1226 
1227   PetscFunctionBegin;
1228   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1229   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1230   PetscFunctionReturn(0);
1231 }
1232 
1233 /* -------------------------------------------------------------------------------------*/
1234 /*MC
1235    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1236            its own KSP object.
1237 
1238    Options Database Keys:
1239 +  -pc_asm_blocks <blks> - Sets total blocks
1240 .  -pc_asm_overlap <ovl> - Sets overlap
1241 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1242 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1243 
1244      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1245       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1246       -pc_asm_type basic to use the standard ASM.
1247 
1248    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1249      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1250 
1251      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1252         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1253 
1254      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1255          and set the options directly on the resulting KSP object (you can access its PC
1256          with KSPGetPC())
1257 
1258    Level: beginner
1259 
1260    Concepts: additive Schwarz method
1261 
1262     References:
1263 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1264      Courant Institute, New York University Technical report
1265 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1266     Cambridge University Press.
1267 
1268 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1269            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1270            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1271 
1272 M*/
1273 
1274 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1275 {
1276   PetscErrorCode ierr;
1277   PC_ASM         *osm;
1278 
1279   PetscFunctionBegin;
1280   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1281 
1282   osm->n                 = PETSC_DECIDE;
1283   osm->n_local           = 0;
1284   osm->n_local_true      = PETSC_DECIDE;
1285   osm->overlap           = 1;
1286   osm->ksp               = 0;
1287   osm->restriction       = 0;
1288   osm->localization      = 0;
1289   osm->prolongation      = 0;
1290   osm->x                 = 0;
1291   osm->y                 = 0;
1292   osm->y_local           = 0;
1293   osm->is                = 0;
1294   osm->is_local          = 0;
1295   osm->mat               = 0;
1296   osm->pmat              = 0;
1297   osm->type              = PC_ASM_RESTRICT;
1298   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1299   osm->same_local_solves = PETSC_TRUE;
1300   osm->sort_indices      = PETSC_TRUE;
1301   osm->dm_subdomains     = PETSC_FALSE;
1302   osm->sub_mat_type      = NULL;
1303 
1304   pc->data                 = (void*)osm;
1305   pc->ops->apply           = PCApply_ASM;
1306   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1307   pc->ops->setup           = PCSetUp_ASM;
1308   pc->ops->reset           = PCReset_ASM;
1309   pc->ops->destroy         = PCDestroy_ASM;
1310   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1311   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1312   pc->ops->view            = PCView_ASM;
1313   pc->ops->applyrichardson = 0;
1314 
1315   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1316   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1317   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1318   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1319   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1320   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1321   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1322   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1323   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1324   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1325   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1326   PetscFunctionReturn(0);
1327 }
1328 
1329 /*@C
1330    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1331    preconditioner for a any problem on a general grid.
1332 
1333    Collective
1334 
1335    Input Parameters:
1336 +  A - The global matrix operator
1337 -  n - the number of local blocks
1338 
1339    Output Parameters:
1340 .  outis - the array of index sets defining the subdomains
1341 
1342    Level: advanced
1343 
1344    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1345     from these if you use PCASMSetLocalSubdomains()
1346 
1347     In the Fortran version you must provide the array outis[] already allocated of length n.
1348 
1349 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1350 
1351 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1352 @*/
1353 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1354 {
1355   MatPartitioning mpart;
1356   const char      *prefix;
1357   void            (*f)(void);
1358   PetscInt        i,j,rstart,rend,bs;
1359   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1360   Mat             Ad     = NULL, adj;
1361   IS              ispart,isnumb,*is;
1362   PetscErrorCode  ierr;
1363 
1364   PetscFunctionBegin;
1365   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1366   PetscValidPointer(outis,3);
1367   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1368 
1369   /* Get prefix, row distribution, and block size */
1370   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1371   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1372   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1373   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1374 
1375   /* Get diagonal block from matrix if possible */
1376   ierr = MatShellGetOperation(A,MATOP_GET_DIAGONAL_BLOCK,&f);CHKERRQ(ierr);
1377   if (f) {
1378     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1379   }
1380   if (Ad) {
1381     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1382     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1383   }
1384   if (Ad && n > 1) {
1385     PetscBool match,done;
1386     /* Try to setup a good matrix partitioning if available */
1387     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1388     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1389     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1390     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1391     if (!match) {
1392       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1393     }
1394     if (!match) { /* assume a "good" partitioner is available */
1395       PetscInt       na;
1396       const PetscInt *ia,*ja;
1397       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1398       if (done) {
1399         /* Build adjacency matrix by hand. Unfortunately a call to
1400            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1401            remove the block-aij structure and we cannot expect
1402            MatPartitioning to split vertices as we need */
1403         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1404         const PetscInt *row;
1405         nnz = 0;
1406         for (i=0; i<na; i++) { /* count number of nonzeros */
1407           len = ia[i+1] - ia[i];
1408           row = ja + ia[i];
1409           for (j=0; j<len; j++) {
1410             if (row[j] == i) { /* don't count diagonal */
1411               len--; break;
1412             }
1413           }
1414           nnz += len;
1415         }
1416         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1417         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1418         nnz    = 0;
1419         iia[0] = 0;
1420         for (i=0; i<na; i++) { /* fill adjacency */
1421           cnt = 0;
1422           len = ia[i+1] - ia[i];
1423           row = ja + ia[i];
1424           for (j=0; j<len; j++) {
1425             if (row[j] != i) { /* if not diagonal */
1426               jja[nnz+cnt++] = row[j];
1427             }
1428           }
1429           nnz     += cnt;
1430           iia[i+1] = nnz;
1431         }
1432         /* Partitioning of the adjacency matrix */
1433         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1434         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1435         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1436         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1437         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1438         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1439         foundpart = PETSC_TRUE;
1440       }
1441       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1442     }
1443     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1444   }
1445 
1446   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1447   *outis = is;
1448 
1449   if (!foundpart) {
1450 
1451     /* Partitioning by contiguous chunks of rows */
1452 
1453     PetscInt mbs   = (rend-rstart)/bs;
1454     PetscInt start = rstart;
1455     for (i=0; i<n; i++) {
1456       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1457       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1458       start += count;
1459     }
1460 
1461   } else {
1462 
1463     /* Partitioning by adjacency of diagonal block  */
1464 
1465     const PetscInt *numbering;
1466     PetscInt       *count,nidx,*indices,*newidx,start=0;
1467     /* Get node count in each partition */
1468     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1469     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1470     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1471       for (i=0; i<n; i++) count[i] *= bs;
1472     }
1473     /* Build indices from node numbering */
1474     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1475     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1476     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1477     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1478     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1479     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1480     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1481       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1482       for (i=0; i<nidx; i++) {
1483         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1484       }
1485       ierr    = PetscFree(indices);CHKERRQ(ierr);
1486       nidx   *= bs;
1487       indices = newidx;
1488     }
1489     /* Shift to get global indices */
1490     for (i=0; i<nidx; i++) indices[i] += rstart;
1491 
1492     /* Build the index sets for each block */
1493     for (i=0; i<n; i++) {
1494       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1495       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1496       start += count[i];
1497     }
1498 
1499     ierr = PetscFree(count);CHKERRQ(ierr);
1500     ierr = PetscFree(indices);CHKERRQ(ierr);
1501     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1502     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1503 
1504   }
1505   PetscFunctionReturn(0);
1506 }
1507 
1508 /*@C
1509    PCASMDestroySubdomains - Destroys the index sets created with
1510    PCASMCreateSubdomains(). Should be called after setting subdomains
1511    with PCASMSetLocalSubdomains().
1512 
1513    Collective
1514 
1515    Input Parameters:
1516 +  n - the number of index sets
1517 .  is - the array of index sets
1518 -  is_local - the array of local index sets, can be NULL
1519 
1520    Level: advanced
1521 
1522 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1523 
1524 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1525 @*/
1526 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1527 {
1528   PetscInt       i;
1529   PetscErrorCode ierr;
1530 
1531   PetscFunctionBegin;
1532   if (n <= 0) PetscFunctionReturn(0);
1533   if (is) {
1534     PetscValidPointer(is,2);
1535     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1536     ierr = PetscFree(is);CHKERRQ(ierr);
1537   }
1538   if (is_local) {
1539     PetscValidPointer(is_local,3);
1540     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1541     ierr = PetscFree(is_local);CHKERRQ(ierr);
1542   }
1543   PetscFunctionReturn(0);
1544 }
1545 
1546 /*@
1547    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1548    preconditioner for a two-dimensional problem on a regular grid.
1549 
1550    Not Collective
1551 
1552    Input Parameters:
1553 +  m, n - the number of mesh points in the x and y directions
1554 .  M, N - the number of subdomains in the x and y directions
1555 .  dof - degrees of freedom per node
1556 -  overlap - overlap in mesh lines
1557 
1558    Output Parameters:
1559 +  Nsub - the number of subdomains created
1560 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1561 -  is_local - array of index sets defining non-overlapping subdomains
1562 
1563    Note:
1564    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1565    preconditioners.  More general related routines are
1566    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1567 
1568    Level: advanced
1569 
1570 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1571 
1572 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1573           PCASMSetOverlap()
1574 @*/
1575 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1576 {
1577   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1578   PetscErrorCode ierr;
1579   PetscInt       nidx,*idx,loc,ii,jj,count;
1580 
1581   PetscFunctionBegin;
1582   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1583 
1584   *Nsub     = N*M;
1585   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1586   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1587   ystart    = 0;
1588   loc_outer = 0;
1589   for (i=0; i<N; i++) {
1590     height = n/N + ((n % N) > i); /* height of subdomain */
1591     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1592     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1593     yright = ystart + height + overlap; if (yright > n) yright = n;
1594     xstart = 0;
1595     for (j=0; j<M; j++) {
1596       width = m/M + ((m % M) > j); /* width of subdomain */
1597       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1598       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1599       xright = xstart + width + overlap; if (xright > m) xright = m;
1600       nidx   = (xright - xleft)*(yright - yleft);
1601       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1602       loc    = 0;
1603       for (ii=yleft; ii<yright; ii++) {
1604         count = m*ii + xleft;
1605         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1606       }
1607       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1608       if (overlap == 0) {
1609         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1610 
1611         (*is_local)[loc_outer] = (*is)[loc_outer];
1612       } else {
1613         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1614           for (jj=xstart; jj<xstart+width; jj++) {
1615             idx[loc++] = m*ii + jj;
1616           }
1617         }
1618         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1619       }
1620       ierr    = PetscFree(idx);CHKERRQ(ierr);
1621       xstart += width;
1622       loc_outer++;
1623     }
1624     ystart += height;
1625   }
1626   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1627   PetscFunctionReturn(0);
1628 }
1629 
1630 /*@C
1631     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1632     only) for the additive Schwarz preconditioner.
1633 
1634     Not Collective
1635 
1636     Input Parameter:
1637 .   pc - the preconditioner context
1638 
1639     Output Parameters:
1640 +   n - the number of subdomains for this processor (default value = 1)
1641 .   is - the index sets that define the subdomains for this processor
1642 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1643 
1644 
1645     Notes:
1646     The IS numbering is in the parallel, global numbering of the vector.
1647 
1648     Level: advanced
1649 
1650 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1651 
1652 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1653           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1654 @*/
1655 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1656 {
1657   PC_ASM         *osm = (PC_ASM*)pc->data;
1658   PetscErrorCode ierr;
1659   PetscBool      match;
1660 
1661   PetscFunctionBegin;
1662   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1663   PetscValidIntPointer(n,2);
1664   if (is) PetscValidPointer(is,3);
1665   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1666   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1667   if (n) *n = osm->n_local_true;
1668   if (is) *is = osm->is;
1669   if (is_local) *is_local = osm->is_local;
1670   PetscFunctionReturn(0);
1671 }
1672 
1673 /*@C
1674     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1675     only) for the additive Schwarz preconditioner.
1676 
1677     Not Collective
1678 
1679     Input Parameter:
1680 .   pc - the preconditioner context
1681 
1682     Output Parameters:
1683 +   n - the number of matrices for this processor (default value = 1)
1684 -   mat - the matrices
1685 
1686     Level: advanced
1687 
1688     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1689 
1690            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1691 
1692 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1693 
1694 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1695           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1696 @*/
1697 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1698 {
1699   PC_ASM         *osm;
1700   PetscErrorCode ierr;
1701   PetscBool      match;
1702 
1703   PetscFunctionBegin;
1704   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1705   PetscValidIntPointer(n,2);
1706   if (mat) PetscValidPointer(mat,3);
1707   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1708   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1709   if (!match) {
1710     if (n) *n = 0;
1711     if (mat) *mat = NULL;
1712   } else {
1713     osm = (PC_ASM*)pc->data;
1714     if (n) *n = osm->n_local_true;
1715     if (mat) *mat = osm->pmat;
1716   }
1717   PetscFunctionReturn(0);
1718 }
1719 
1720 /*@
1721     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1722 
1723     Logically Collective
1724 
1725     Input Parameter:
1726 +   pc  - the preconditioner
1727 -   flg - boolean indicating whether to use subdomains defined by the DM
1728 
1729     Options Database Key:
1730 .   -pc_asm_dm_subdomains
1731 
1732     Level: intermediate
1733 
1734     Notes:
1735     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1736     so setting either of the first two effectively turns the latter off.
1737 
1738 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1739 
1740 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1741           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1742 @*/
1743 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1744 {
1745   PC_ASM         *osm = (PC_ASM*)pc->data;
1746   PetscErrorCode ierr;
1747   PetscBool      match;
1748 
1749   PetscFunctionBegin;
1750   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1751   PetscValidLogicalCollectiveBool(pc,flg,2);
1752   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1753   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1754   if (match) {
1755     osm->dm_subdomains = flg;
1756   }
1757   PetscFunctionReturn(0);
1758 }
1759 
1760 /*@
1761     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1762     Not Collective
1763 
1764     Input Parameter:
1765 .   pc  - the preconditioner
1766 
1767     Output Parameter:
1768 .   flg - boolean indicating whether to use subdomains defined by the DM
1769 
1770     Level: intermediate
1771 
1772 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1773 
1774 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1775           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1776 @*/
1777 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1778 {
1779   PC_ASM         *osm = (PC_ASM*)pc->data;
1780   PetscErrorCode ierr;
1781   PetscBool      match;
1782 
1783   PetscFunctionBegin;
1784   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1785   PetscValidPointer(flg,2);
1786   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1787   if (match) {
1788     if (flg) *flg = osm->dm_subdomains;
1789   }
1790   PetscFunctionReturn(0);
1791 }
1792 
1793 /*@
1794      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1795 
1796    Not Collective
1797 
1798    Input Parameter:
1799 .  pc - the PC
1800 
1801    Output Parameter:
1802 .  -pc_asm_sub_mat_type - name of matrix type
1803 
1804    Level: advanced
1805 
1806 .keywords: PC, PCASM, MatType, set
1807 
1808 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1809 @*/
1810 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1811   PetscErrorCode ierr;
1812 
1813   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1814   PetscFunctionReturn(0);
1815 }
1816 
1817 /*@
1818      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1819 
1820    Collective on Mat
1821 
1822    Input Parameters:
1823 +  pc             - the PC object
1824 -  sub_mat_type   - matrix type
1825 
1826    Options Database Key:
1827 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1828 
1829    Notes:
1830    See "${PETSC_DIR}/include/petscmat.h" for available types
1831 
1832   Level: advanced
1833 
1834 .keywords: PC, PCASM, MatType, set
1835 
1836 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1837 @*/
1838 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1839 {
1840   PetscErrorCode ierr;
1841 
1842   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1843   PetscFunctionReturn(0);
1844 }
1845