xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision d2fc88d660d899f5d302fca28cab2ba057217d62)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter restriction;         /* mapping from global to overlapping (process) subdomain*/
21   VecScatter *lrestriction;       /* mapping from subregion to overlapping (process) subdomain */
22   VecScatter *lprolongation;      /* mapping from non-overlapping subregion to overlapping (process) subdomain; used for restrict additive version of algorithms */
23   Vec        lx, ly;              /* work vectors */
24   Vec        *x,*y;  		  /* work vectors */
25   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
26   IS         *is;                 /* index set that defines each overlapping subdomain */
27   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
28   Mat        *mat,*pmat;          /* mat is not currently used */
29   PCASMType  type;                /* use reduced interpolation, restriction or both */
30   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
31   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
32   PetscBool  sort_indices;        /* flag to sort subdomain indices */
33   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
34   PCCompositeType loctype;        /* the type of composition for local solves */
35   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
36   /* For multiplicative solve */
37   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
38 } PC_ASM;
39 
40 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
41 {
42   PC_ASM         *osm = (PC_ASM*)pc->data;
43   PetscErrorCode ierr;
44   PetscMPIInt    rank;
45   PetscInt       i,bsz;
46   PetscBool      iascii,isstring;
47   PetscViewer    sviewer;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
52   if (iascii) {
53     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
54     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
55     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
56     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
57     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
58     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
59     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
60     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
61     if (osm->same_local_solves) {
62       if (osm->ksp) {
63         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
64         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
65         if (!rank) {
66           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
68           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
69         }
70         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71       }
72     } else {
73       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
75       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
80       for (i=0; i<osm->n_local_true; i++) {
81         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
82         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
83         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
85       }
86       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
87       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
90     }
91   } else if (isstring) {
92     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
93     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
94     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
95     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96   }
97   PetscFunctionReturn(0);
98 }
99 
100 static PetscErrorCode PCASMPrintSubdomains(PC pc)
101 {
102   PC_ASM         *osm = (PC_ASM*)pc->data;
103   const char     *prefix;
104   char           fname[PETSC_MAX_PATH_LEN+1];
105   PetscViewer    viewer, sviewer;
106   char           *s;
107   PetscInt       i,j,nidx;
108   const PetscInt *idx;
109   PetscMPIInt    rank, size;
110   PetscErrorCode ierr;
111 
112   PetscFunctionBegin;
113   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
114   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
115   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
116   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
117   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
118   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
119   for (i=0; i<osm->n_local; i++) {
120     if (i < osm->n_local_true) {
121       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
123       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
124       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
125       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
126       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
127       for (j=0; j<nidx; j++) {
128         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
129       }
130       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
131       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
132       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
133       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
135       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
137       ierr = PetscFree(s);CHKERRQ(ierr);
138       if (osm->is_local) {
139         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
140         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
141         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
142         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
143         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
144         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
145         for (j=0; j<nidx; j++) {
146           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
147         }
148         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
149         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
150         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
151         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
152         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
153         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
155         ierr = PetscFree(s);CHKERRQ(ierr);
156       }
157     } else {
158       /* Participate in collective viewer calls. */
159       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
160       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
161       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
162       /* Assume either all ranks have is_local or none do. */
163       if (osm->is_local) {
164         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
165         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
166         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
167       }
168     }
169   }
170   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
171   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
172   PetscFunctionReturn(0);
173 }
174 
175 static PetscErrorCode PCSetUp_ASM(PC pc)
176 {
177   PC_ASM         *osm = (PC_ASM*)pc->data;
178   PetscErrorCode ierr;
179   PetscBool      symset,flg;
180   PetscInt       i,m,m_local;
181   MatReuse       scall = MAT_REUSE_MATRIX;
182   IS             isl;
183   KSP            ksp;
184   PC             subpc;
185   const char     *prefix,*pprefix;
186   Vec            vec;
187   DM             *domain_dm = NULL;
188 
189   PetscFunctionBegin;
190   if (!pc->setupcalled) {
191     PetscInt m;
192 
193     if (!osm->type_set) {
194       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
195       if (symset && flg) osm->type = PC_ASM_BASIC;
196     }
197 
198     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
199     if (osm->n_local_true == PETSC_DECIDE) {
200       /* no subdomains given */
201       /* try pc->dm first, if allowed */
202       if (osm->dm_subdomains && pc->dm) {
203         PetscInt  num_domains, d;
204         char      **domain_names;
205         IS        *inner_domain_is, *outer_domain_is;
206         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
207         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
208                               A future improvement of this code might allow one to use
209                               DM-defined subdomains and also increase the overlap,
210                               but that is not currently supported */
211         if (num_domains) {
212           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
213         }
214         for (d = 0; d < num_domains; ++d) {
215           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
216           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
217           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
218         }
219         ierr = PetscFree(domain_names);CHKERRQ(ierr);
220         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
221         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
222       }
223       if (osm->n_local_true == PETSC_DECIDE) {
224         /* still no subdomains; use one subdomain per processor */
225         osm->n_local_true = 1;
226       }
227     }
228     { /* determine the global and max number of subdomains */
229       struct {PetscInt max,sum;} inwork,outwork;
230       PetscMPIInt size;
231 
232       inwork.max   = osm->n_local_true;
233       inwork.sum   = osm->n_local_true;
234       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
235       osm->n_local = outwork.max;
236       osm->n       = outwork.sum;
237 
238       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
239       if (outwork.max == 1 && outwork.sum == size) {
240         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
241         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
242       }
243     }
244     if (!osm->is) { /* create the index sets */
245       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
246     }
247     if (osm->n_local_true > 1 && !osm->is_local) {
248       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
249       for (i=0; i<osm->n_local_true; i++) {
250         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
251           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
252           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
253         } else {
254           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
255           osm->is_local[i] = osm->is[i];
256         }
257       }
258     }
259     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
260     flg  = PETSC_FALSE;
261     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
262     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
263 
264     if (osm->overlap > 0) {
265       /* Extend the "overlapping" regions by a number of steps */
266       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
267     }
268     if (osm->sort_indices) {
269       for (i=0; i<osm->n_local_true; i++) {
270         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
271         if (osm->is_local) {
272           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
273         }
274       }
275     }
276 
277     if (!osm->ksp) {
278       /* Create the local solvers */
279       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
280       if (domain_dm) {
281         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
282       }
283       for (i=0; i<osm->n_local_true; i++) {
284         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
285         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
286         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
287         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
288         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
289         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
290         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
291         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
292         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
293         if (domain_dm) {
294           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
295           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
296           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
297         }
298         osm->ksp[i] = ksp;
299       }
300       if (domain_dm) {
301         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
302       }
303     }
304 
305     ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
306     ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
307     ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
308     ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
309     ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
310 
311     scall = MAT_INITIAL_MATRIX;
312   } else {
313     /*
314        Destroy the blocks from the previous iteration
315     */
316     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
317       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
318       scall = MAT_INITIAL_MATRIX;
319     }
320   }
321 
322   /*
323      Extract out the submatrices
324   */
325   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
326   if (scall == MAT_INITIAL_MATRIX) {
327     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
328     for (i=0; i<osm->n_local_true; i++) {
329       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
330       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
331     }
332   }
333 
334   /* Convert the types of the submatrices (if needbe) */
335   if (osm->sub_mat_type) {
336     for (i=0; i<osm->n_local_true; i++) {
337       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
338     }
339   }
340 
341   if(!pc->setupcalled){
342     /* Create the local work vectors (from the local matrices) and scatter contexts */
343     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
344 
345     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
346     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
347       ierr = PetscMalloc1(osm->n_local_true,&osm->lprolongation);CHKERRQ(ierr);
348     }
349     ierr = PetscMalloc1(osm->n_local_true,&osm->lrestriction);CHKERRQ(ierr);
350     ierr = PetscMalloc1(osm->n_local_true,&osm->x);CHKERRQ(ierr);
351     ierr = PetscMalloc1(osm->n_local_true,&osm->y);CHKERRQ(ierr);
352 
353     ierr = ISGetLocalSize(osm->lis,&m);CHKERRQ(ierr);
354     ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
355     ierr = VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);CHKERRQ(ierr);
356     ierr = ISDestroy(&isl);CHKERRQ(ierr);
357 
358 
359     for (i=0; i<osm->n_local_true; ++i) {
360       ISLocalToGlobalMapping ltog;
361       IS                     isll;
362       const PetscInt         *idx_is;
363       PetscInt               *idx_lis,nout;
364 
365       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
366       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
367       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
368 
369       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
370       ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
371       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
372       ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
373       ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
374       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
375       if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
376       ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
377       ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
378       ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
379       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
380       ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);CHKERRQ(ierr);
381       ierr = ISDestroy(&isll);CHKERRQ(ierr);
382       ierr = ISDestroy(&isl);CHKERRQ(ierr);
383       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overalapping is_local[i] entries */
384 	ISLocalToGlobalMapping ltog;
385         IS                     isll,isll_local;
386         const PetscInt         *idx_local;
387         PetscInt               *idx1, *idx2, nout;
388 
389         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
390         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
391 
392 	ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
393 	ierr = PetscMalloc1(m_local,&idx1);CHKERRQ(ierr);
394 	ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);CHKERRQ(ierr);
395         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
396         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
397         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
398 
399 	ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
400 	ierr = PetscMalloc1(m_local,&idx2);CHKERRQ(ierr);
401       	ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);CHKERRQ(ierr);
402         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
403         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
404         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);CHKERRQ(ierr);
405 
406 	ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
407 	ierr = VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);CHKERRQ(ierr);
408 
409         ierr = ISDestroy(&isll);CHKERRQ(ierr);
410 	ierr = ISDestroy(&isll_local);CHKERRQ(ierr);
411       }
412     }
413     ierr = VecDestroy(&vec);CHKERRQ(ierr);
414   }
415 
416   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
417     IS      *cis;
418     PetscInt c;
419 
420     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
421     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
422     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
423     ierr = PetscFree(cis);CHKERRQ(ierr);
424   }
425 
426   /* Return control to the user so that the submatrices can be modified (e.g., to apply
427      different boundary conditions for the submatrices than for the global problem) */
428   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
429 
430   /*
431      Loop over subdomains putting them into local ksp
432   */
433   for (i=0; i<osm->n_local_true; i++) {
434     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
435     if (!pc->setupcalled) {
436       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
437     }
438   }
439   PetscFunctionReturn(0);
440 }
441 
442 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
443 {
444   PC_ASM             *osm = (PC_ASM*)pc->data;
445   PetscErrorCode     ierr;
446   PetscInt           i;
447   KSPConvergedReason reason;
448 
449   PetscFunctionBegin;
450   for (i=0; i<osm->n_local_true; i++) {
451     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
452     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
453     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
454       pc->failedreason = PC_SUBPC_ERROR;
455     }
456   }
457   PetscFunctionReturn(0);
458 }
459 
460 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
461 {
462   PC_ASM         *osm = (PC_ASM*)pc->data;
463   PetscErrorCode ierr;
464   PetscInt       i,n_local_true = osm->n_local_true;
465   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
466 
467   PetscFunctionBegin;
468   /*
469      Support for limiting the restriction or interpolation to only local
470      subdomain values (leaving the other values 0).
471   */
472   if (!(osm->type & PC_ASM_RESTRICT)) {
473     forward = SCATTER_FORWARD_LOCAL;
474     /* have to zero the work RHS since scatter may leave some slots empty */
475     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
476   }
477   if (!(osm->type & PC_ASM_INTERPOLATE)) {
478     reverse = SCATTER_REVERSE_LOCAL;
479   }
480 
481   if(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE){
482     /* zero the global and the local solutions */
483     ierr = VecZeroEntries(y);CHKERRQ(ierr);
484     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
485 
486     /* Copy the global RHS to local RHS including the ghost nodes */
487     ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
488     ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
489 
490     /* Restrict local RHS to the overlapping 0-block RHS */
491     ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
492     ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
493 
494     /* do the local solves */
495     for (i = 0; i < n_local_true; ++i) {
496 
497       /* solve the overlapping i-block */
498       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
499 
500       if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution (only for restrictive additive) */
501  	ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
502  	ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
503       }
504       else{ /* interpolate the overalapping i-block solution to the local solution */
505         ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
506 	ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
507       }
508 
509       if (i < n_local_true-1) {
510 	/* Restrict local RHS to the overlapping (i+1)-block RHS */
511 	ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
512 	ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
513 
514 	if ( osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
515 	  /* udpdate the overlapping (i+1)-block RHS using the current local solution */
516 	  ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
517 	  ierr = VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]); CHKERRQ(ierr);
518 	}
519       }
520     }
521     /* Add the local solution to the global solution including the ghost nodes */
522     ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
523     ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
524   }else{
525     SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
526   }
527   PetscFunctionReturn(0);
528 }
529 
530 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
531 {
532   PC_ASM         *osm = (PC_ASM*)pc->data;
533   PetscErrorCode ierr;
534   PetscInt       i,n_local_true = osm->n_local_true;
535   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
536 
537   PetscFunctionBegin;
538   /*
539      Support for limiting the restriction or interpolation to only local
540      subdomain values (leaving the other values 0).
541 
542      Note: these are reversed from the PCApply_ASM() because we are applying the
543      transpose of the three terms
544   */
545 
546   if (!(osm->type & PC_ASM_INTERPOLATE)) {
547     forward = SCATTER_FORWARD_LOCAL;
548     /* have to zero the work RHS since scatter may leave some slots empty */
549     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
550   }
551   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
552 
553   /* zero the global and the local solutions */
554   ierr = VecZeroEntries(y);CHKERRQ(ierr);
555   ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
556 
557   /* Copy the global RHS to local RHS including the ghost nodes */
558   ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
559   ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
560 
561   /* Restrict local RHS to the overlapping 0-block RHS */
562   ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
563   ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
564 
565   /* do the local solves */
566   for (i = 0; i < n_local_true; ++i) {
567 
568     /* solve the overlapping i-block */
569     ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
570 
571     if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution */
572      ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
573      ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
574     }
575     else{ /* interpolate the overalapping i-block solution to the local solution */
576       ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
577       ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
578     }
579 
580     if (i < n_local_true-1) {
581       /* Restrict local RHS to the overlapping (i+1)-block RHS */
582       ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
583       ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
584     }
585   }
586   /* Add the local solution to the global solution including the ghost nodes */
587   ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
588   ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
589 
590   PetscFunctionReturn(0);
591 
592 }
593 
594 static PetscErrorCode PCReset_ASM(PC pc)
595 {
596   PC_ASM         *osm = (PC_ASM*)pc->data;
597   PetscErrorCode ierr;
598   PetscInt       i;
599 
600   PetscFunctionBegin;
601   if (osm->ksp) {
602     for (i=0; i<osm->n_local_true; i++) {
603       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
604     }
605   }
606   if (osm->pmat) {
607     if (osm->n_local_true > 0) {
608       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
609     }
610   }
611   if (osm->lrestriction) {
612     ierr = VecScatterDestroy(&osm->restriction);CHKERRQ(ierr);
613     for (i=0; i<osm->n_local_true; i++) {
614       ierr = VecScatterDestroy(&osm->lrestriction[i]);CHKERRQ(ierr);
615       if (osm->lprolongation) {ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);}
616       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
617       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
618     }
619     ierr = PetscFree(osm->lrestriction);CHKERRQ(ierr);
620     if (osm->lprolongation) {ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);}
621     ierr = PetscFree(osm->x);CHKERRQ(ierr);
622     ierr = PetscFree(osm->y);CHKERRQ(ierr);
623 
624   }
625   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
626   ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
627   ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
628   ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
629   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
630     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
631   }
632 
633   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
634 
635   osm->is       = 0;
636   osm->is_local = 0;
637   PetscFunctionReturn(0);
638 }
639 
640 static PetscErrorCode PCDestroy_ASM(PC pc)
641 {
642   PC_ASM         *osm = (PC_ASM*)pc->data;
643   PetscErrorCode ierr;
644   PetscInt       i;
645 
646   PetscFunctionBegin;
647   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
648   if (osm->ksp) {
649     for (i=0; i<osm->n_local_true; i++) {
650       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
651     }
652     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
653   }
654   ierr = PetscFree(pc->data);CHKERRQ(ierr);
655   PetscFunctionReturn(0);
656 }
657 
658 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
659 {
660   PC_ASM         *osm = (PC_ASM*)pc->data;
661   PetscErrorCode ierr;
662   PetscInt       blocks,ovl;
663   PetscBool      symset,flg;
664   PCASMType      asmtype;
665   PCCompositeType loctype;
666   char           sub_mat_type[256];
667 
668   PetscFunctionBegin;
669   /* set the type to symmetric if matrix is symmetric */
670   if (!osm->type_set && pc->pmat) {
671     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
672     if (symset && flg) osm->type = PC_ASM_BASIC;
673   }
674   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
675   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
676   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
677   if (flg) {
678     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
679     osm->dm_subdomains = PETSC_FALSE;
680   }
681   ierr = PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);CHKERRQ(ierr);
682   if (flg) {
683     ierr = PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
684     osm->dm_subdomains = PETSC_FALSE;
685   }
686   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
687   if (flg) {
688     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
689     osm->dm_subdomains = PETSC_FALSE;
690   }
691   flg  = PETSC_FALSE;
692   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
693   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
694   flg  = PETSC_FALSE;
695   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
696   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
697   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
698   if(flg){
699     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
700   }
701   ierr = PetscOptionsTail();CHKERRQ(ierr);
702   PetscFunctionReturn(0);
703 }
704 
705 /*------------------------------------------------------------------------------------*/
706 
707 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
708 {
709   PC_ASM         *osm = (PC_ASM*)pc->data;
710   PetscErrorCode ierr;
711   PetscInt       i;
712 
713   PetscFunctionBegin;
714   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
715   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
716 
717   if (!pc->setupcalled) {
718     if (is) {
719       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
720     }
721     if (is_local) {
722       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
723     }
724     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
725 
726     osm->n_local_true = n;
727     osm->is           = 0;
728     osm->is_local     = 0;
729     if (is) {
730       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
731       for (i=0; i<n; i++) osm->is[i] = is[i];
732       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
733       osm->overlap = -1;
734     }
735     if (is_local) {
736       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
737       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
738       if (!is) {
739         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
740         for (i=0; i<osm->n_local_true; i++) {
741           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
742             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
743             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
744           } else {
745             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
746             osm->is[i] = osm->is_local[i];
747           }
748         }
749       }
750     }
751   }
752   PetscFunctionReturn(0);
753 }
754 
755 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
756 {
757   PC_ASM         *osm = (PC_ASM*)pc->data;
758   PetscErrorCode ierr;
759   PetscMPIInt    rank,size;
760   PetscInt       n;
761 
762   PetscFunctionBegin;
763   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
764   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
765 
766   /*
767      Split the subdomains equally among all processors
768   */
769   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
770   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
771   n    = N/size + ((N % size) > rank);
772   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
773   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
774   if (!pc->setupcalled) {
775     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
776 
777     osm->n_local_true = n;
778     osm->is           = 0;
779     osm->is_local     = 0;
780   }
781   PetscFunctionReturn(0);
782 }
783 
784 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
785 {
786   PC_ASM *osm = (PC_ASM*)pc->data;
787 
788   PetscFunctionBegin;
789   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
790   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
791   if (!pc->setupcalled) osm->overlap = ovl;
792   PetscFunctionReturn(0);
793 }
794 
795 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
796 {
797   PC_ASM *osm = (PC_ASM*)pc->data;
798 
799   PetscFunctionBegin;
800   osm->type     = type;
801   osm->type_set = PETSC_TRUE;
802   PetscFunctionReturn(0);
803 }
804 
805 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
806 {
807   PC_ASM *osm = (PC_ASM*)pc->data;
808 
809   PetscFunctionBegin;
810   *type = osm->type;
811   PetscFunctionReturn(0);
812 }
813 
814 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
815 {
816   PC_ASM *osm = (PC_ASM *) pc->data;
817 
818   PetscFunctionBegin;
819   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
820   osm->loctype = type;
821   PetscFunctionReturn(0);
822 }
823 
824 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
825 {
826   PC_ASM *osm = (PC_ASM *) pc->data;
827 
828   PetscFunctionBegin;
829   *type = osm->loctype;
830   PetscFunctionReturn(0);
831 }
832 
833 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
834 {
835   PC_ASM *osm = (PC_ASM*)pc->data;
836 
837   PetscFunctionBegin;
838   osm->sort_indices = doSort;
839   PetscFunctionReturn(0);
840 }
841 
842 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
843 {
844   PC_ASM         *osm = (PC_ASM*)pc->data;
845   PetscErrorCode ierr;
846 
847   PetscFunctionBegin;
848   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
849 
850   if (n_local) *n_local = osm->n_local_true;
851   if (first_local) {
852     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
853     *first_local -= osm->n_local_true;
854   }
855   if (ksp) {
856     /* Assume that local solves are now different; not necessarily
857        true though!  This flag is used only for PCView_ASM() */
858     *ksp                   = osm->ksp;
859     osm->same_local_solves = PETSC_FALSE;
860   }
861   PetscFunctionReturn(0);
862 }
863 
864 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
865 {
866   PC_ASM         *osm = (PC_ASM*)pc->data;
867 
868   PetscFunctionBegin;
869   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
870   PetscValidPointer(sub_mat_type,2);
871   *sub_mat_type = osm->sub_mat_type;
872   PetscFunctionReturn(0);
873 }
874 
875 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
876 {
877   PetscErrorCode    ierr;
878   PC_ASM            *osm = (PC_ASM*)pc->data;
879 
880   PetscFunctionBegin;
881   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
882   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
883   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
884   PetscFunctionReturn(0);
885 }
886 
887 /*@C
888     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
889 
890     Collective on PC
891 
892     Input Parameters:
893 +   pc - the preconditioner context
894 .   n - the number of subdomains for this processor (default value = 1)
895 .   is - the index set that defines the subdomains for this processor
896          (or NULL for PETSc to determine subdomains)
897 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
898          (or NULL to not provide these)
899 
900     Options Database Key:
901     To set the total number of subdomain blocks rather than specify the
902     index sets, use the option
903 .    -pc_asm_local_blocks <blks> - Sets local blocks
904 
905     Notes:
906     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
907 
908     By default the ASM preconditioner uses 1 block per processor.
909 
910     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
911 
912     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
913     back to form the global solution (this is the standard restricted additive Schwarz method)
914 
915     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
916     no code to handle that case.
917 
918     Level: advanced
919 
920 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
921 
922 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
923           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
924 @*/
925 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
926 {
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
931   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 /*@C
936     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
937     additive Schwarz preconditioner.  Either all or no processors in the
938     PC communicator must call this routine, with the same index sets.
939 
940     Collective on PC
941 
942     Input Parameters:
943 +   pc - the preconditioner context
944 .   N  - the number of subdomains for all processors
945 .   is - the index sets that define the subdomains for all processors
946          (or NULL to ask PETSc to determine the subdomains)
947 -   is_local - the index sets that define the local part of the subdomains for this processor
948          (or NULL to not provide this information)
949 
950     Options Database Key:
951     To set the total number of subdomain blocks rather than specify the
952     index sets, use the option
953 .    -pc_asm_blocks <blks> - Sets total blocks
954 
955     Notes:
956     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
957 
958     By default the ASM preconditioner uses 1 block per processor.
959 
960     These index sets cannot be destroyed until after completion of the
961     linear solves for which the ASM preconditioner is being used.
962 
963     Use PCASMSetLocalSubdomains() to set local subdomains.
964 
965     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
966 
967     Level: advanced
968 
969 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
970 
971 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
972           PCASMCreateSubdomains2D()
973 @*/
974 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
975 {
976   PetscErrorCode ierr;
977 
978   PetscFunctionBegin;
979   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
980   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
981   PetscFunctionReturn(0);
982 }
983 
984 /*@
985     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
986     additive Schwarz preconditioner.  Either all or no processors in the
987     PC communicator must call this routine.
988 
989     Logically Collective on PC
990 
991     Input Parameters:
992 +   pc  - the preconditioner context
993 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
994 
995     Options Database Key:
996 .   -pc_asm_overlap <ovl> - Sets overlap
997 
998     Notes:
999     By default the ASM preconditioner uses 1 block per processor.  To use
1000     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1001     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1002 
1003     The overlap defaults to 1, so if one desires that no additional
1004     overlap be computed beyond what may have been set with a call to
1005     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1006     must be set to be 0.  In particular, if one does not explicitly set
1007     the subdomains an application code, then all overlap would be computed
1008     internally by PETSc, and using an overlap of 0 would result in an ASM
1009     variant that is equivalent to the block Jacobi preconditioner.
1010 
1011     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1012     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1013 
1014     Note that one can define initial index sets with any overlap via
1015     PCASMSetLocalSubdomains(); the routine
1016     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1017     if desired.
1018 
1019     Level: intermediate
1020 
1021 .keywords: PC, ASM, set, overlap
1022 
1023 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1024           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1025 @*/
1026 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1027 {
1028   PetscErrorCode ierr;
1029 
1030   PetscFunctionBegin;
1031   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1032   PetscValidLogicalCollectiveInt(pc,ovl,2);
1033   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1034   PetscFunctionReturn(0);
1035 }
1036 
1037 /*@
1038     PCASMSetType - Sets the type of restriction and interpolation used
1039     for local problems in the additive Schwarz method.
1040 
1041     Logically Collective on PC
1042 
1043     Input Parameters:
1044 +   pc  - the preconditioner context
1045 -   type - variant of ASM, one of
1046 .vb
1047       PC_ASM_BASIC       - full interpolation and restriction
1048       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1049       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1050       PC_ASM_NONE        - local processor restriction and interpolation
1051 .ve
1052 
1053     Options Database Key:
1054 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1055 
1056     Notes:
1057     if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1058     to limit the local processor interpolation
1059 
1060     Level: intermediate
1061 
1062 .keywords: PC, ASM, set, type
1063 
1064 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1065           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1066 @*/
1067 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1068 {
1069   PetscErrorCode ierr;
1070 
1071   PetscFunctionBegin;
1072   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1073   PetscValidLogicalCollectiveEnum(pc,type,2);
1074   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1075   PetscFunctionReturn(0);
1076 }
1077 
1078 /*@
1079     PCASMGetType - Gets the type of restriction and interpolation used
1080     for local problems in the additive Schwarz method.
1081 
1082     Logically Collective on PC
1083 
1084     Input Parameter:
1085 .   pc  - the preconditioner context
1086 
1087     Output Parameter:
1088 .   type - variant of ASM, one of
1089 
1090 .vb
1091       PC_ASM_BASIC       - full interpolation and restriction
1092       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1093       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1094       PC_ASM_NONE        - local processor restriction and interpolation
1095 .ve
1096 
1097     Options Database Key:
1098 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1099 
1100     Level: intermediate
1101 
1102 .keywords: PC, ASM, set, type
1103 
1104 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1105           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1106 @*/
1107 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1108 {
1109   PetscErrorCode ierr;
1110 
1111   PetscFunctionBegin;
1112   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1113   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1114   PetscFunctionReturn(0);
1115 }
1116 
1117 /*@
1118   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1119 
1120   Logically Collective on PC
1121 
1122   Input Parameters:
1123 + pc  - the preconditioner context
1124 - type - type of composition, one of
1125 .vb
1126   PC_COMPOSITE_ADDITIVE       - local additive combination
1127   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1128 .ve
1129 
1130   Options Database Key:
1131 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1132 
1133   Level: intermediate
1134 
1135 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1136 @*/
1137 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1138 {
1139   PetscErrorCode ierr;
1140 
1141   PetscFunctionBegin;
1142   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1143   PetscValidLogicalCollectiveEnum(pc, type, 2);
1144   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 /*@
1149   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1150 
1151   Logically Collective on PC
1152 
1153   Input Parameter:
1154 . pc  - the preconditioner context
1155 
1156   Output Parameter:
1157 . type - type of composition, one of
1158 .vb
1159   PC_COMPOSITE_ADDITIVE       - local additive combination
1160   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1161 .ve
1162 
1163   Options Database Key:
1164 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1165 
1166   Level: intermediate
1167 
1168 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1169 @*/
1170 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1171 {
1172   PetscErrorCode ierr;
1173 
1174   PetscFunctionBegin;
1175   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1176   PetscValidPointer(type, 2);
1177   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1178   PetscFunctionReturn(0);
1179 }
1180 
1181 /*@
1182     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1183 
1184     Logically Collective on PC
1185 
1186     Input Parameters:
1187 +   pc  - the preconditioner context
1188 -   doSort - sort the subdomain indices
1189 
1190     Level: intermediate
1191 
1192 .keywords: PC, ASM, set, type
1193 
1194 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1195           PCASMCreateSubdomains2D()
1196 @*/
1197 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1198 {
1199   PetscErrorCode ierr;
1200 
1201   PetscFunctionBegin;
1202   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1203   PetscValidLogicalCollectiveBool(pc,doSort,2);
1204   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1205   PetscFunctionReturn(0);
1206 }
1207 
1208 /*@C
1209    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1210    this processor.
1211 
1212    Collective on PC iff first_local is requested
1213 
1214    Input Parameter:
1215 .  pc - the preconditioner context
1216 
1217    Output Parameters:
1218 +  n_local - the number of blocks on this processor or NULL
1219 .  first_local - the global number of the first block on this processor or NULL,
1220                  all processors must request or all must pass NULL
1221 -  ksp - the array of KSP contexts
1222 
1223    Note:
1224    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1225 
1226    You must call KSPSetUp() before calling PCASMGetSubKSP().
1227 
1228    Fortran note:
1229    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1230 
1231    Level: advanced
1232 
1233 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1234 
1235 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1236           PCASMCreateSubdomains2D(),
1237 @*/
1238 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1239 {
1240   PetscErrorCode ierr;
1241 
1242   PetscFunctionBegin;
1243   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1244   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1245   PetscFunctionReturn(0);
1246 }
1247 
1248 /* -------------------------------------------------------------------------------------*/
1249 /*MC
1250    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1251            its own KSP object.
1252 
1253    Options Database Keys:
1254 +  -pc_asm_blocks <blks> - Sets total blocks
1255 .  -pc_asm_overlap <ovl> - Sets overlap
1256 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1257 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1258 
1259      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1260       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1261       -pc_asm_type basic to use the standard ASM.
1262 
1263    Notes:
1264     Each processor can have one or more blocks, but a block cannot be shared by more
1265      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1266 
1267      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1268         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1269 
1270      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1271          and set the options directly on the resulting KSP object (you can access its PC
1272          with KSPGetPC())
1273 
1274    Level: beginner
1275 
1276    Concepts: additive Schwarz method
1277 
1278     References:
1279 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1280      Courant Institute, New York University Technical report
1281 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1282     Cambridge University Press.
1283 
1284 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1285            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1286            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1287 
1288 M*/
1289 
1290 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1291 {
1292   PetscErrorCode ierr;
1293   PC_ASM         *osm;
1294 
1295   PetscFunctionBegin;
1296   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1297 
1298   osm->n                 = PETSC_DECIDE;
1299   osm->n_local           = 0;
1300   osm->n_local_true      = PETSC_DECIDE;
1301   osm->overlap           = 1;
1302   osm->ksp               = 0;
1303   osm->restriction       = 0;
1304   osm->lprolongation     = 0;
1305   osm->lrestriction      = 0;
1306   osm->x                 = 0;
1307   osm->y                 = 0;
1308   osm->is                = 0;
1309   osm->is_local          = 0;
1310   osm->mat               = 0;
1311   osm->pmat              = 0;
1312   osm->type              = PC_ASM_RESTRICT;
1313   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1314   osm->same_local_solves = PETSC_TRUE;
1315   osm->sort_indices      = PETSC_TRUE;
1316   osm->dm_subdomains     = PETSC_FALSE;
1317   osm->sub_mat_type      = NULL;
1318 
1319   pc->data                 = (void*)osm;
1320   pc->ops->apply           = PCApply_ASM;
1321   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1322   pc->ops->setup           = PCSetUp_ASM;
1323   pc->ops->reset           = PCReset_ASM;
1324   pc->ops->destroy         = PCDestroy_ASM;
1325   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1326   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1327   pc->ops->view            = PCView_ASM;
1328   pc->ops->applyrichardson = 0;
1329 
1330   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1331   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1332   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1333   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1334   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1335   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1336   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1337   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1338   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1339   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1340   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1341   PetscFunctionReturn(0);
1342 }
1343 
1344 /*@C
1345    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1346    preconditioner for a any problem on a general grid.
1347 
1348    Collective
1349 
1350    Input Parameters:
1351 +  A - The global matrix operator
1352 -  n - the number of local blocks
1353 
1354    Output Parameters:
1355 .  outis - the array of index sets defining the subdomains
1356 
1357    Level: advanced
1358 
1359    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1360     from these if you use PCASMSetLocalSubdomains()
1361 
1362     In the Fortran version you must provide the array outis[] already allocated of length n.
1363 
1364 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1365 
1366 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1367 @*/
1368 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1369 {
1370   MatPartitioning mpart;
1371   const char      *prefix;
1372   PetscInt        i,j,rstart,rend,bs;
1373   PetscBool       hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1374   Mat             Ad     = NULL, adj;
1375   IS              ispart,isnumb,*is;
1376   PetscErrorCode  ierr;
1377 
1378   PetscFunctionBegin;
1379   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1380   PetscValidPointer(outis,3);
1381   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1382 
1383   /* Get prefix, row distribution, and block size */
1384   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1385   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1386   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1387   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1388 
1389   /* Get diagonal block from matrix if possible */
1390   ierr = MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
1391   if (hasop) {
1392     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1393   }
1394   if (Ad) {
1395     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1396     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1397   }
1398   if (Ad && n > 1) {
1399     PetscBool match,done;
1400     /* Try to setup a good matrix partitioning if available */
1401     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1402     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1403     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1404     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1405     if (!match) {
1406       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1407     }
1408     if (!match) { /* assume a "good" partitioner is available */
1409       PetscInt       na;
1410       const PetscInt *ia,*ja;
1411       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1412       if (done) {
1413         /* Build adjacency matrix by hand. Unfortunately a call to
1414            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1415            remove the block-aij structure and we cannot expect
1416            MatPartitioning to split vertices as we need */
1417         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1418         const PetscInt *row;
1419         nnz = 0;
1420         for (i=0; i<na; i++) { /* count number of nonzeros */
1421           len = ia[i+1] - ia[i];
1422           row = ja + ia[i];
1423           for (j=0; j<len; j++) {
1424             if (row[j] == i) { /* don't count diagonal */
1425               len--; break;
1426             }
1427           }
1428           nnz += len;
1429         }
1430         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1431         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1432         nnz    = 0;
1433         iia[0] = 0;
1434         for (i=0; i<na; i++) { /* fill adjacency */
1435           cnt = 0;
1436           len = ia[i+1] - ia[i];
1437           row = ja + ia[i];
1438           for (j=0; j<len; j++) {
1439             if (row[j] != i) { /* if not diagonal */
1440               jja[nnz+cnt++] = row[j];
1441             }
1442           }
1443           nnz     += cnt;
1444           iia[i+1] = nnz;
1445         }
1446         /* Partitioning of the adjacency matrix */
1447         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1448         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1449         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1450         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1451         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1452         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1453         foundpart = PETSC_TRUE;
1454       }
1455       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1456     }
1457     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1458   }
1459 
1460   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1461   *outis = is;
1462 
1463   if (!foundpart) {
1464 
1465     /* Partitioning by contiguous chunks of rows */
1466 
1467     PetscInt mbs   = (rend-rstart)/bs;
1468     PetscInt start = rstart;
1469     for (i=0; i<n; i++) {
1470       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1471       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1472       start += count;
1473     }
1474 
1475   } else {
1476 
1477     /* Partitioning by adjacency of diagonal block  */
1478 
1479     const PetscInt *numbering;
1480     PetscInt       *count,nidx,*indices,*newidx,start=0;
1481     /* Get node count in each partition */
1482     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1483     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1484     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1485       for (i=0; i<n; i++) count[i] *= bs;
1486     }
1487     /* Build indices from node numbering */
1488     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1489     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1490     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1491     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1492     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1493     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1494     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1495       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1496       for (i=0; i<nidx; i++) {
1497         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1498       }
1499       ierr    = PetscFree(indices);CHKERRQ(ierr);
1500       nidx   *= bs;
1501       indices = newidx;
1502     }
1503     /* Shift to get global indices */
1504     for (i=0; i<nidx; i++) indices[i] += rstart;
1505 
1506     /* Build the index sets for each block */
1507     for (i=0; i<n; i++) {
1508       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1509       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1510       start += count[i];
1511     }
1512 
1513     ierr = PetscFree(count);CHKERRQ(ierr);
1514     ierr = PetscFree(indices);CHKERRQ(ierr);
1515     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1516     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1517 
1518   }
1519   PetscFunctionReturn(0);
1520 }
1521 
1522 /*@C
1523    PCASMDestroySubdomains - Destroys the index sets created with
1524    PCASMCreateSubdomains(). Should be called after setting subdomains
1525    with PCASMSetLocalSubdomains().
1526 
1527    Collective
1528 
1529    Input Parameters:
1530 +  n - the number of index sets
1531 .  is - the array of index sets
1532 -  is_local - the array of local index sets, can be NULL
1533 
1534    Level: advanced
1535 
1536 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1537 
1538 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1539 @*/
1540 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1541 {
1542   PetscInt       i;
1543   PetscErrorCode ierr;
1544 
1545   PetscFunctionBegin;
1546   if (n <= 0) PetscFunctionReturn(0);
1547   if (is) {
1548     PetscValidPointer(is,2);
1549     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1550     ierr = PetscFree(is);CHKERRQ(ierr);
1551   }
1552   if (is_local) {
1553     PetscValidPointer(is_local,3);
1554     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1555     ierr = PetscFree(is_local);CHKERRQ(ierr);
1556   }
1557   PetscFunctionReturn(0);
1558 }
1559 
1560 /*@
1561    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1562    preconditioner for a two-dimensional problem on a regular grid.
1563 
1564    Not Collective
1565 
1566    Input Parameters:
1567 +  m, n - the number of mesh points in the x and y directions
1568 .  M, N - the number of subdomains in the x and y directions
1569 .  dof - degrees of freedom per node
1570 -  overlap - overlap in mesh lines
1571 
1572    Output Parameters:
1573 +  Nsub - the number of subdomains created
1574 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1575 -  is_local - array of index sets defining non-overlapping subdomains
1576 
1577    Note:
1578    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1579    preconditioners.  More general related routines are
1580    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1581 
1582    Level: advanced
1583 
1584 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1585 
1586 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1587           PCASMSetOverlap()
1588 @*/
1589 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1590 {
1591   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1592   PetscErrorCode ierr;
1593   PetscInt       nidx,*idx,loc,ii,jj,count;
1594 
1595   PetscFunctionBegin;
1596   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1597 
1598   *Nsub     = N*M;
1599   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1600   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1601   ystart    = 0;
1602   loc_outer = 0;
1603   for (i=0; i<N; i++) {
1604     height = n/N + ((n % N) > i); /* height of subdomain */
1605     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1606     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1607     yright = ystart + height + overlap; if (yright > n) yright = n;
1608     xstart = 0;
1609     for (j=0; j<M; j++) {
1610       width = m/M + ((m % M) > j); /* width of subdomain */
1611       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1612       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1613       xright = xstart + width + overlap; if (xright > m) xright = m;
1614       nidx   = (xright - xleft)*(yright - yleft);
1615       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1616       loc    = 0;
1617       for (ii=yleft; ii<yright; ii++) {
1618         count = m*ii + xleft;
1619         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1620       }
1621       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1622       if (overlap == 0) {
1623         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1624 
1625         (*is_local)[loc_outer] = (*is)[loc_outer];
1626       } else {
1627         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1628           for (jj=xstart; jj<xstart+width; jj++) {
1629             idx[loc++] = m*ii + jj;
1630           }
1631         }
1632         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1633       }
1634       ierr    = PetscFree(idx);CHKERRQ(ierr);
1635       xstart += width;
1636       loc_outer++;
1637     }
1638     ystart += height;
1639   }
1640   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1641   PetscFunctionReturn(0);
1642 }
1643 
1644 /*@C
1645     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1646     only) for the additive Schwarz preconditioner.
1647 
1648     Not Collective
1649 
1650     Input Parameter:
1651 .   pc - the preconditioner context
1652 
1653     Output Parameters:
1654 +   n - the number of subdomains for this processor (default value = 1)
1655 .   is - the index sets that define the subdomains for this processor
1656 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1657 
1658 
1659     Notes:
1660     The IS numbering is in the parallel, global numbering of the vector.
1661 
1662     Level: advanced
1663 
1664 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1665 
1666 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1667           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1668 @*/
1669 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1670 {
1671   PC_ASM         *osm = (PC_ASM*)pc->data;
1672   PetscErrorCode ierr;
1673   PetscBool      match;
1674 
1675   PetscFunctionBegin;
1676   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1677   PetscValidIntPointer(n,2);
1678   if (is) PetscValidPointer(is,3);
1679   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1680   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1681   if (n) *n = osm->n_local_true;
1682   if (is) *is = osm->is;
1683   if (is_local) *is_local = osm->is_local;
1684   PetscFunctionReturn(0);
1685 }
1686 
1687 /*@C
1688     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1689     only) for the additive Schwarz preconditioner.
1690 
1691     Not Collective
1692 
1693     Input Parameter:
1694 .   pc - the preconditioner context
1695 
1696     Output Parameters:
1697 +   n - the number of matrices for this processor (default value = 1)
1698 -   mat - the matrices
1699 
1700     Level: advanced
1701 
1702     Notes:
1703     Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1704 
1705            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1706 
1707 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1708 
1709 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1710           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1711 @*/
1712 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1713 {
1714   PC_ASM         *osm;
1715   PetscErrorCode ierr;
1716   PetscBool      match;
1717 
1718   PetscFunctionBegin;
1719   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1720   PetscValidIntPointer(n,2);
1721   if (mat) PetscValidPointer(mat,3);
1722   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1723   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1724   if (!match) {
1725     if (n) *n = 0;
1726     if (mat) *mat = NULL;
1727   } else {
1728     osm = (PC_ASM*)pc->data;
1729     if (n) *n = osm->n_local_true;
1730     if (mat) *mat = osm->pmat;
1731   }
1732   PetscFunctionReturn(0);
1733 }
1734 
1735 /*@
1736     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1737 
1738     Logically Collective
1739 
1740     Input Parameter:
1741 +   pc  - the preconditioner
1742 -   flg - boolean indicating whether to use subdomains defined by the DM
1743 
1744     Options Database Key:
1745 .   -pc_asm_dm_subdomains
1746 
1747     Level: intermediate
1748 
1749     Notes:
1750     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1751     so setting either of the first two effectively turns the latter off.
1752 
1753 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1754 
1755 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1756           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1757 @*/
1758 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1759 {
1760   PC_ASM         *osm = (PC_ASM*)pc->data;
1761   PetscErrorCode ierr;
1762   PetscBool      match;
1763 
1764   PetscFunctionBegin;
1765   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1766   PetscValidLogicalCollectiveBool(pc,flg,2);
1767   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1768   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1769   if (match) {
1770     osm->dm_subdomains = flg;
1771   }
1772   PetscFunctionReturn(0);
1773 }
1774 
1775 /*@
1776     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1777     Not Collective
1778 
1779     Input Parameter:
1780 .   pc  - the preconditioner
1781 
1782     Output Parameter:
1783 .   flg - boolean indicating whether to use subdomains defined by the DM
1784 
1785     Level: intermediate
1786 
1787 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1788 
1789 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1790           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1791 @*/
1792 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1793 {
1794   PC_ASM         *osm = (PC_ASM*)pc->data;
1795   PetscErrorCode ierr;
1796   PetscBool      match;
1797 
1798   PetscFunctionBegin;
1799   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1800   PetscValidPointer(flg,2);
1801   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1802   if (match) {
1803     if (flg) *flg = osm->dm_subdomains;
1804   }
1805   PetscFunctionReturn(0);
1806 }
1807 
1808 /*@
1809      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1810 
1811    Not Collective
1812 
1813    Input Parameter:
1814 .  pc - the PC
1815 
1816    Output Parameter:
1817 .  -pc_asm_sub_mat_type - name of matrix type
1818 
1819    Level: advanced
1820 
1821 .keywords: PC, PCASM, MatType, set
1822 
1823 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1824 @*/
1825 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1826   PetscErrorCode ierr;
1827 
1828   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1829   PetscFunctionReturn(0);
1830 }
1831 
1832 /*@
1833      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1834 
1835    Collective on Mat
1836 
1837    Input Parameters:
1838 +  pc             - the PC object
1839 -  sub_mat_type   - matrix type
1840 
1841    Options Database Key:
1842 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1843 
1844    Notes:
1845    See "${PETSC_DIR}/include/petscmat.h" for available types
1846 
1847   Level: advanced
1848 
1849 .keywords: PC, PCASM, MatType, set
1850 
1851 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1852 @*/
1853 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1854 {
1855   PetscErrorCode ierr;
1856 
1857   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1858   PetscFunctionReturn(0);
1859 }
1860