xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 2bf68e3e0f2a61f71e7c65bee250bfa1c8ce0cdb)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
34   /* For multiplicative solve */
35   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
36   Vec        lx, ly;              /* work vectors */
37   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
38 } PC_ASM;
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "PCView_ASM"
42 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
43 {
44   PC_ASM         *osm = (PC_ASM*)pc->data;
45   PetscErrorCode ierr;
46   PetscMPIInt    rank;
47   PetscInt       i,bsz;
48   PetscBool      iascii,isstring;
49   PetscViewer    sviewer;
50 
51   PetscFunctionBegin;
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
53   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
54   if (iascii) {
55     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
56     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
57     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
58     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
59     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
60     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
61     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
62     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
63     if (osm->same_local_solves) {
64       if (osm->ksp) {
65         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
66         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
67         if (!rank) {
68           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
69           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
70           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
71         }
72         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
73       }
74     } else {
75       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
77       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
79       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
80       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
81       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
82       for (i=0; i<osm->n_local_true; i++) {
83         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
85         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
86         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
87       }
88       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
90       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
91       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
92     }
93   } else if (isstring) {
94     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
95     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
97     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
98   }
99   PetscFunctionReturn(0);
100 }
101 
102 #undef __FUNCT__
103 #define __FUNCT__ "PCASMPrintSubdomains"
104 static PetscErrorCode PCASMPrintSubdomains(PC pc)
105 {
106   PC_ASM         *osm = (PC_ASM*)pc->data;
107   const char     *prefix;
108   char           fname[PETSC_MAX_PATH_LEN+1];
109   PetscViewer    viewer, sviewer;
110   char           *s;
111   PetscInt       i,j,nidx;
112   const PetscInt *idx;
113   PetscMPIInt    rank, size;
114   PetscErrorCode ierr;
115 
116   PetscFunctionBegin;
117   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
118   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
119   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
120   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
121   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
122   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
123   for (i=0; i<osm->n_local; i++) {
124     if (i < osm->n_local_true) {
125       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
126       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
127       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
128       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
129       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
130       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
131       for (j=0; j<nidx; j++) {
132         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
133       }
134       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
135       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
136       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
137       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
138       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
139       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
140       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
141       ierr = PetscFree(s);CHKERRQ(ierr);
142       if (osm->is_local) {
143         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
144         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
145         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
146         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
147         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
148         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
149         for (j=0; j<nidx; j++) {
150           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
151         }
152         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
153         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
154         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
155         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
156         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
157         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
158         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
159         ierr = PetscFree(s);CHKERRQ(ierr);
160       }
161     } else {
162       /* Participate in collective viewer calls. */
163       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
164       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
165       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
166       /* Assume either all ranks have is_local or none do. */
167       if (osm->is_local) {
168         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
169         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
170         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
171       }
172     }
173   }
174   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
175   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
176   PetscFunctionReturn(0);
177 }
178 
179 #undef __FUNCT__
180 #define __FUNCT__ "PCSetUp_ASM"
181 static PetscErrorCode PCSetUp_ASM(PC pc)
182 {
183   PC_ASM         *osm = (PC_ASM*)pc->data;
184   PetscErrorCode ierr;
185   PetscBool      symset,flg;
186   PetscInt       i,m,m_local;
187   MatReuse       scall = MAT_REUSE_MATRIX;
188   IS             isl;
189   KSP            ksp;
190   PC             subpc;
191   const char     *prefix,*pprefix;
192   Vec            vec;
193   DM             *domain_dm = NULL;
194 
195   PetscFunctionBegin;
196   if (!pc->setupcalled) {
197 
198     if (!osm->type_set) {
199       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
200       if (symset && flg) osm->type = PC_ASM_BASIC;
201     }
202 
203     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
204     if (osm->n_local_true == PETSC_DECIDE) {
205       /* no subdomains given */
206       /* try pc->dm first, if allowed */
207       if (osm->dm_subdomains && pc->dm) {
208         PetscInt  num_domains, d;
209         char      **domain_names;
210         IS        *inner_domain_is, *outer_domain_is;
211         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
212         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
213                               A future improvement of this code might allow one to use
214                               DM-defined subdomains and also increase the overlap,
215                               but that is not currently supported */
216         if (num_domains) {
217           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
218         }
219         for (d = 0; d < num_domains; ++d) {
220           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
221           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
222           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
223         }
224         ierr = PetscFree(domain_names);CHKERRQ(ierr);
225         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
226         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
227       }
228       if (osm->n_local_true == PETSC_DECIDE) {
229         /* still no subdomains; use one subdomain per processor */
230         osm->n_local_true = 1;
231       }
232     }
233     { /* determine the global and max number of subdomains */
234       struct {PetscInt max,sum;} inwork,outwork;
235       PetscMPIInt size;
236 
237       inwork.max   = osm->n_local_true;
238       inwork.sum   = osm->n_local_true;
239       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
240       osm->n_local = outwork.max;
241       osm->n       = outwork.sum;
242 
243       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
244       if (outwork.max == 1 && outwork.sum == size) {
245         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatGetSubMatrices() implementation */
246         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
247       }
248     }
249     if (!osm->is) { /* create the index sets */
250       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
251     }
252     if (osm->n_local_true > 1 && !osm->is_local) {
253       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
254       for (i=0; i<osm->n_local_true; i++) {
255         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
256           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
257           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
258         } else {
259           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
260           osm->is_local[i] = osm->is[i];
261         }
262       }
263     }
264     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
265     flg  = PETSC_FALSE;
266     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
267     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
268 
269     if (osm->overlap > 0) {
270       /* Extend the "overlapping" regions by a number of steps */
271       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
272     }
273     if (osm->sort_indices) {
274       for (i=0; i<osm->n_local_true; i++) {
275         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
276         if (osm->is_local) {
277           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
278         }
279       }
280     }
281 
282     if (!osm->ksp) {
283       /* Create the local solvers */
284       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
285       if (domain_dm) {
286         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
287       }
288       for (i=0; i<osm->n_local_true; i++) {
289         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
290         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
291         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
292         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
293         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
294         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
295         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
296         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
297         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
298         if (domain_dm) {
299           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
300           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
301           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
302         }
303         osm->ksp[i] = ksp;
304       }
305       if (domain_dm) {
306         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
307       }
308     }
309     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
310       PetscInt m;
311 
312       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
313       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
314       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
315       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
316       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
317     }
318     scall = MAT_INITIAL_MATRIX;
319   } else {
320     /*
321        Destroy the blocks from the previous iteration
322     */
323     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
324       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
325       scall = MAT_INITIAL_MATRIX;
326     }
327   }
328 
329   /*
330      Extract out the submatrices
331   */
332   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
333   if (scall == MAT_INITIAL_MATRIX) {
334     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
335     for (i=0; i<osm->n_local_true; i++) {
336       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
337       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
338     }
339   }
340 
341   /* Convert the types of the submatrices (if needbe) */
342   if (osm->sub_mat_type) {
343     for (i=0; i<osm->n_local_true; i++) {
344       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
345     }
346   }
347 
348   if(!pc->setupcalled){
349     /* Create the local work vectors (from the local matrices) and scatter contexts */
350     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
351     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
352     if (osm->is_local) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
353     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
354     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
355     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
356     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
357     for (i=0; i<osm->n_local_true; ++i) {
358       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
359       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
360       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
361       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
362       ierr = ISDestroy(&isl);CHKERRQ(ierr);
363       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
364       if (osm->is_local) {
365         ISLocalToGlobalMapping ltog;
366         IS                     isll;
367         const PetscInt         *idx_local;
368         PetscInt               *idx,nout;
369 
370         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
371         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
372         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
373         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
374         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
375         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
376         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
377         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
378         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
379         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
380         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
381         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
382         ierr = ISDestroy(&isll);CHKERRQ(ierr);
383 
384         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
385         ierr = ISDestroy(&isl);CHKERRQ(ierr);
386       } else {
387         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
388 
389         osm->y_local[i] = osm->y[i];
390 
391         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
392 
393         osm->prolongation[i] = osm->restriction[i];
394 
395         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
396       }
397     }
398     for (i=osm->n_local_true; i<osm->n_local; i++) {
399       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
400       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
401       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
402       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
403       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
404       if (osm->is_local) {
405         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
406         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
407       } else {
408         osm->prolongation[i] = osm->restriction[i];
409         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
410       }
411       ierr = ISDestroy(&isl);CHKERRQ(ierr);
412     }
413     ierr = VecDestroy(&vec);CHKERRQ(ierr);
414   }
415 
416   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
417     IS      *cis;
418     PetscInt c;
419 
420     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
421     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
422     ierr = MatGetSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
423     ierr = PetscFree(cis);CHKERRQ(ierr);
424   }
425 
426   /* Return control to the user so that the submatrices can be modified (e.g., to apply
427      different boundary conditions for the submatrices than for the global problem) */
428   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
429 
430   /*
431      Loop over subdomains putting them into local ksp
432   */
433   for (i=0; i<osm->n_local_true; i++) {
434     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
435     if (!pc->setupcalled) {
436       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
437     }
438   }
439   PetscFunctionReturn(0);
440 }
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
444 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
445 {
446   PC_ASM             *osm = (PC_ASM*)pc->data;
447   PetscErrorCode     ierr;
448   PetscInt           i;
449   KSPConvergedReason reason;
450 
451   PetscFunctionBegin;
452   for (i=0; i<osm->n_local_true; i++) {
453     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
454     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
455     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
456       pc->failedreason = PC_SUBPC_ERROR;
457     }
458   }
459   PetscFunctionReturn(0);
460 }
461 
462 #undef __FUNCT__
463 #define __FUNCT__ "PCApply_ASM"
464 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
465 {
466   PC_ASM         *osm = (PC_ASM*)pc->data;
467   PetscErrorCode ierr;
468   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
469   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
470 
471   PetscFunctionBegin;
472   /*
473      Support for limiting the restriction or interpolation to only local
474      subdomain values (leaving the other values 0).
475   */
476   if (!(osm->type & PC_ASM_RESTRICT)) {
477     forward = SCATTER_FORWARD_LOCAL;
478     /* have to zero the work RHS since scatter may leave some slots empty */
479     for (i=0; i<n_local_true; i++) {
480       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
481     }
482   }
483   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
484 
485   switch (osm->loctype)
486   {
487   case PC_COMPOSITE_ADDITIVE:
488     for (i=0; i<n_local; i++) {
489       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
490     }
491     ierr = VecZeroEntries(y);CHKERRQ(ierr);
492     /* do the local solves */
493     for (i=0; i<n_local_true; i++) {
494       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
495       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
496       if (osm->localization) {
497         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
498         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
499       }
500       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
501     }
502     /* handle the rest of the scatters that do not have local solves */
503     for (i=n_local_true; i<n_local; i++) {
504       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
505       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
506     }
507     for (i=0; i<n_local; i++) {
508       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
509     }
510     break;
511   case PC_COMPOSITE_MULTIPLICATIVE:
512     ierr = VecZeroEntries(y);CHKERRQ(ierr);
513     /* do the local solves */
514     for (i = 0; i < n_local_true; ++i) {
515       if (i > 0) {
516         /* Update rhs */
517         ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
518         ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
519       } else {
520         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
521       }
522       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
523       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
524       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
525       if (osm->localization) {
526         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
527         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
528       }
529       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
530       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
531       if (i < n_local_true-1) {
532         ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
533         ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
534         ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
535         ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr);
536         ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
537         ierr = VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
538         ierr = VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
539       }
540     }
541     /* handle the rest of the scatters that do not have local solves */
542     for (i = n_local_true; i < n_local; ++i) {
543       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
544       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
545       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
546       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
547     }
548     break;
549   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
550   }
551   PetscFunctionReturn(0);
552 }
553 
554 #undef __FUNCT__
555 #define __FUNCT__ "PCApplyTranspose_ASM"
556 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
557 {
558   PC_ASM         *osm = (PC_ASM*)pc->data;
559   PetscErrorCode ierr;
560   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
561   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
562 
563   PetscFunctionBegin;
564   /*
565      Support for limiting the restriction or interpolation to only local
566      subdomain values (leaving the other values 0).
567 
568      Note: these are reversed from the PCApply_ASM() because we are applying the
569      transpose of the three terms
570   */
571   if (!(osm->type & PC_ASM_INTERPOLATE)) {
572     forward = SCATTER_FORWARD_LOCAL;
573     /* have to zero the work RHS since scatter may leave some slots empty */
574     for (i=0; i<n_local_true; i++) {
575       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
576     }
577   }
578   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
579 
580   for (i=0; i<n_local; i++) {
581     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
582   }
583   ierr = VecZeroEntries(y);CHKERRQ(ierr);
584   /* do the local solves */
585   for (i=0; i<n_local_true; i++) {
586     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
587     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
588     if (osm->localization) {
589       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
590       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
591     }
592     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
593   }
594   /* handle the rest of the scatters that do not have local solves */
595   for (i=n_local_true; i<n_local; i++) {
596     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
597     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
598   }
599   for (i=0; i<n_local; i++) {
600     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
601   }
602   PetscFunctionReturn(0);
603 }
604 
605 #undef __FUNCT__
606 #define __FUNCT__ "PCReset_ASM"
607 static PetscErrorCode PCReset_ASM(PC pc)
608 {
609   PC_ASM         *osm = (PC_ASM*)pc->data;
610   PetscErrorCode ierr;
611   PetscInt       i;
612 
613   PetscFunctionBegin;
614   if (osm->ksp) {
615     for (i=0; i<osm->n_local_true; i++) {
616       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
617     }
618   }
619   if (osm->pmat) {
620     if (osm->n_local_true > 0) {
621       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
622     }
623   }
624   if (osm->restriction) {
625     for (i=0; i<osm->n_local; i++) {
626       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
627       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
628       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
629       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
630       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
631       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
632     }
633     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
634     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
635     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
636     ierr = PetscFree(osm->x);CHKERRQ(ierr);
637     ierr = PetscFree(osm->y);CHKERRQ(ierr);
638     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
639   }
640   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
641   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
642     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
643     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
644     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
645     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
646   }
647 
648   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
649 
650   osm->is       = 0;
651   osm->is_local = 0;
652   PetscFunctionReturn(0);
653 }
654 
655 #undef __FUNCT__
656 #define __FUNCT__ "PCDestroy_ASM"
657 static PetscErrorCode PCDestroy_ASM(PC pc)
658 {
659   PC_ASM         *osm = (PC_ASM*)pc->data;
660   PetscErrorCode ierr;
661   PetscInt       i;
662 
663   PetscFunctionBegin;
664   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
665   if (osm->ksp) {
666     for (i=0; i<osm->n_local_true; i++) {
667       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
668     }
669     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
670   }
671   ierr = PetscFree(pc->data);CHKERRQ(ierr);
672   PetscFunctionReturn(0);
673 }
674 
675 #undef __FUNCT__
676 #define __FUNCT__ "PCSetFromOptions_ASM"
677 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
678 {
679   PC_ASM         *osm = (PC_ASM*)pc->data;
680   PetscErrorCode ierr;
681   PetscInt       blocks,ovl;
682   PetscBool      symset,flg;
683   PCASMType      asmtype;
684   PCCompositeType loctype;
685   char           sub_mat_type[256];
686 
687   PetscFunctionBegin;
688   /* set the type to symmetric if matrix is symmetric */
689   if (!osm->type_set && pc->pmat) {
690     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
691     if (symset && flg) osm->type = PC_ASM_BASIC;
692   }
693   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
694   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
695   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
696   if (flg) {
697     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
698     osm->dm_subdomains = PETSC_FALSE;
699   }
700   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
701   if (flg) {
702     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
703     osm->dm_subdomains = PETSC_FALSE;
704   }
705   flg  = PETSC_FALSE;
706   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
707   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
708   flg  = PETSC_FALSE;
709   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
710   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
711   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
712   if(flg){
713     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
714   }
715   ierr = PetscOptionsTail();CHKERRQ(ierr);
716   PetscFunctionReturn(0);
717 }
718 
719 /*------------------------------------------------------------------------------------*/
720 
721 #undef __FUNCT__
722 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
723 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
724 {
725   PC_ASM         *osm = (PC_ASM*)pc->data;
726   PetscErrorCode ierr;
727   PetscInt       i;
728 
729   PetscFunctionBegin;
730   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
731   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
732 
733   if (!pc->setupcalled) {
734     if (is) {
735       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
736     }
737     if (is_local) {
738       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
739     }
740     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
741 
742     osm->n_local_true = n;
743     osm->is           = 0;
744     osm->is_local     = 0;
745     if (is) {
746       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
747       for (i=0; i<n; i++) osm->is[i] = is[i];
748       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
749       osm->overlap = -1;
750     }
751     if (is_local) {
752       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
753       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
754       if (!is) {
755         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
756         for (i=0; i<osm->n_local_true; i++) {
757           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
758             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
759             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
760           } else {
761             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
762             osm->is[i] = osm->is_local[i];
763           }
764         }
765       }
766     }
767   }
768   PetscFunctionReturn(0);
769 }
770 
771 #undef __FUNCT__
772 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
773 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
774 {
775   PC_ASM         *osm = (PC_ASM*)pc->data;
776   PetscErrorCode ierr;
777   PetscMPIInt    rank,size;
778   PetscInt       n;
779 
780   PetscFunctionBegin;
781   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
782   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
783 
784   /*
785      Split the subdomains equally among all processors
786   */
787   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
788   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
789   n    = N/size + ((N % size) > rank);
790   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
791   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
792   if (!pc->setupcalled) {
793     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
794 
795     osm->n_local_true = n;
796     osm->is           = 0;
797     osm->is_local     = 0;
798   }
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "PCASMSetOverlap_ASM"
804 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
805 {
806   PC_ASM *osm = (PC_ASM*)pc->data;
807 
808   PetscFunctionBegin;
809   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
810   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
811   if (!pc->setupcalled) osm->overlap = ovl;
812   PetscFunctionReturn(0);
813 }
814 
815 #undef __FUNCT__
816 #define __FUNCT__ "PCASMSetType_ASM"
817 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
818 {
819   PC_ASM *osm = (PC_ASM*)pc->data;
820 
821   PetscFunctionBegin;
822   osm->type     = type;
823   osm->type_set = PETSC_TRUE;
824   PetscFunctionReturn(0);
825 }
826 
827 #undef __FUNCT__
828 #define __FUNCT__ "PCASMGetType_ASM"
829 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
830 {
831   PC_ASM *osm = (PC_ASM*)pc->data;
832 
833   PetscFunctionBegin;
834   *type = osm->type;
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "PCASMSetLocalType_ASM"
840 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
841 {
842   PC_ASM *osm = (PC_ASM *) pc->data;
843 
844   PetscFunctionBegin;
845   osm->loctype = type;
846   PetscFunctionReturn(0);
847 }
848 
849 #undef __FUNCT__
850 #define __FUNCT__ "PCASMGetLocalType_ASM"
851 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
852 {
853   PC_ASM *osm = (PC_ASM *) pc->data;
854 
855   PetscFunctionBegin;
856   *type = osm->loctype;
857   PetscFunctionReturn(0);
858 }
859 
860 #undef __FUNCT__
861 #define __FUNCT__ "PCASMSetSortIndices_ASM"
862 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
863 {
864   PC_ASM *osm = (PC_ASM*)pc->data;
865 
866   PetscFunctionBegin;
867   osm->sort_indices = doSort;
868   PetscFunctionReturn(0);
869 }
870 
871 #undef __FUNCT__
872 #define __FUNCT__ "PCASMGetSubKSP_ASM"
873 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
874 {
875   PC_ASM         *osm = (PC_ASM*)pc->data;
876   PetscErrorCode ierr;
877 
878   PetscFunctionBegin;
879   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
880 
881   if (n_local) *n_local = osm->n_local_true;
882   if (first_local) {
883     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
884     *first_local -= osm->n_local_true;
885   }
886   if (ksp) {
887     /* Assume that local solves are now different; not necessarily
888        true though!  This flag is used only for PCView_ASM() */
889     *ksp                   = osm->ksp;
890     osm->same_local_solves = PETSC_FALSE;
891   }
892   PetscFunctionReturn(0);
893 }
894 
895 #undef __FUNCT__
896 #define __FUNCT__ "PCASMGetSubMatType_ASM"
897 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
898 {
899   PC_ASM         *osm = (PC_ASM*)pc->data;
900 
901   PetscFunctionBegin;
902   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
903   PetscValidPointer(sub_mat_type,2);
904   *sub_mat_type = osm->sub_mat_type;
905   PetscFunctionReturn(0);
906 }
907 
908 #undef __FUNCT__
909 #define __FUNCT__ "PCASMSetSubMatType_ASM"
910 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
911 {
912   PetscErrorCode    ierr;
913   PC_ASM            *osm = (PC_ASM*)pc->data;
914 
915   PetscFunctionBegin;
916   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
917   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
918   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
919   PetscFunctionReturn(0);
920 }
921 
922 #undef __FUNCT__
923 #define __FUNCT__ "PCASMSetLocalSubdomains"
924 /*@C
925     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
926 
927     Collective on PC
928 
929     Input Parameters:
930 +   pc - the preconditioner context
931 .   n - the number of subdomains for this processor (default value = 1)
932 .   is - the index set that defines the subdomains for this processor
933          (or NULL for PETSc to determine subdomains)
934 -   is_local - the index sets that define the local part of the subdomains for this processor
935          (or NULL to use the default of 1 subdomain per process)
936 
937     Notes:
938     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
939 
940     By default the ASM preconditioner uses 1 block per processor.
941 
942     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
943 
944     Level: advanced
945 
946 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
947 
948 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
949           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
950 @*/
951 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
952 {
953   PetscErrorCode ierr;
954 
955   PetscFunctionBegin;
956   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
957   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
958   PetscFunctionReturn(0);
959 }
960 
961 #undef __FUNCT__
962 #define __FUNCT__ "PCASMSetTotalSubdomains"
963 /*@C
964     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
965     additive Schwarz preconditioner.  Either all or no processors in the
966     PC communicator must call this routine, with the same index sets.
967 
968     Collective on PC
969 
970     Input Parameters:
971 +   pc - the preconditioner context
972 .   N  - the number of subdomains for all processors
973 .   is - the index sets that define the subdomains for all processors
974          (or NULL to ask PETSc to compe up with subdomains)
975 -   is_local - the index sets that define the local part of the subdomains for this processor
976          (or NULL to use the default of 1 subdomain per process)
977 
978     Options Database Key:
979     To set the total number of subdomain blocks rather than specify the
980     index sets, use the option
981 .    -pc_asm_blocks <blks> - Sets total blocks
982 
983     Notes:
984     Currently you cannot use this to set the actual subdomains with the argument is.
985 
986     By default the ASM preconditioner uses 1 block per processor.
987 
988     These index sets cannot be destroyed until after completion of the
989     linear solves for which the ASM preconditioner is being used.
990 
991     Use PCASMSetLocalSubdomains() to set local subdomains.
992 
993     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
994 
995     Level: advanced
996 
997 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
998 
999 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1000           PCASMCreateSubdomains2D()
1001 @*/
1002 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
1003 {
1004   PetscErrorCode ierr;
1005 
1006   PetscFunctionBegin;
1007   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1008   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
1009   PetscFunctionReturn(0);
1010 }
1011 
1012 #undef __FUNCT__
1013 #define __FUNCT__ "PCASMSetOverlap"
1014 /*@
1015     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1016     additive Schwarz preconditioner.  Either all or no processors in the
1017     PC communicator must call this routine. If MatIncreaseOverlap is used,
1018     use option -mat_increase_overlap when the problem size large.
1019 
1020     Logically Collective on PC
1021 
1022     Input Parameters:
1023 +   pc  - the preconditioner context
1024 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1025 
1026     Options Database Key:
1027 .   -pc_asm_overlap <ovl> - Sets overlap
1028 
1029     Notes:
1030     By default the ASM preconditioner uses 1 block per processor.  To use
1031     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1032     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1033 
1034     The overlap defaults to 1, so if one desires that no additional
1035     overlap be computed beyond what may have been set with a call to
1036     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1037     must be set to be 0.  In particular, if one does not explicitly set
1038     the subdomains an application code, then all overlap would be computed
1039     internally by PETSc, and using an overlap of 0 would result in an ASM
1040     variant that is equivalent to the block Jacobi preconditioner.
1041 
1042     Note that one can define initial index sets with any overlap via
1043     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
1044     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1045     if desired.
1046 
1047     Level: intermediate
1048 
1049 .keywords: PC, ASM, set, overlap
1050 
1051 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1052           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
1053 @*/
1054 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1055 {
1056   PetscErrorCode ierr;
1057 
1058   PetscFunctionBegin;
1059   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1060   PetscValidLogicalCollectiveInt(pc,ovl,2);
1061   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1062   PetscFunctionReturn(0);
1063 }
1064 
1065 #undef __FUNCT__
1066 #define __FUNCT__ "PCASMSetType"
1067 /*@
1068     PCASMSetType - Sets the type of restriction and interpolation used
1069     for local problems in the additive Schwarz method.
1070 
1071     Logically Collective on PC
1072 
1073     Input Parameters:
1074 +   pc  - the preconditioner context
1075 -   type - variant of ASM, one of
1076 .vb
1077       PC_ASM_BASIC       - full interpolation and restriction
1078       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1079       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1080       PC_ASM_NONE        - local processor restriction and interpolation
1081 .ve
1082 
1083     Options Database Key:
1084 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1085 
1086     Level: intermediate
1087 
1088 .keywords: PC, ASM, set, type
1089 
1090 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1091           PCASMCreateSubdomains2D()
1092 @*/
1093 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1094 {
1095   PetscErrorCode ierr;
1096 
1097   PetscFunctionBegin;
1098   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1099   PetscValidLogicalCollectiveEnum(pc,type,2);
1100   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1101   PetscFunctionReturn(0);
1102 }
1103 
1104 #undef __FUNCT__
1105 #define __FUNCT__ "PCASMGetType"
1106 /*@
1107     PCASMGetType - Gets the type of restriction and interpolation used
1108     for local problems in the additive Schwarz method.
1109 
1110     Logically Collective on PC
1111 
1112     Input Parameter:
1113 .   pc  - the preconditioner context
1114 
1115     Output Parameter:
1116 .   type - variant of ASM, one of
1117 
1118 .vb
1119       PC_ASM_BASIC       - full interpolation and restriction
1120       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1121       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1122       PC_ASM_NONE        - local processor restriction and interpolation
1123 .ve
1124 
1125     Options Database Key:
1126 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1127 
1128     Level: intermediate
1129 
1130 .keywords: PC, ASM, set, type
1131 
1132 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1133           PCASMCreateSubdomains2D()
1134 @*/
1135 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1136 {
1137   PetscErrorCode ierr;
1138 
1139   PetscFunctionBegin;
1140   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1141   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 #undef __FUNCT__
1146 #define __FUNCT__ "PCASMSetLocalType"
1147 /*@
1148   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1149 
1150   Logically Collective on PC
1151 
1152   Input Parameters:
1153 + pc  - the preconditioner context
1154 - type - type of composition, one of
1155 .vb
1156   PC_COMPOSITE_ADDITIVE       - local additive combination
1157   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1158 .ve
1159 
1160   Options Database Key:
1161 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1162 
1163   Level: intermediate
1164 
1165 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASMCreate()
1166 @*/
1167 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1168 {
1169   PetscErrorCode ierr;
1170 
1171   PetscFunctionBegin;
1172   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1173   PetscValidLogicalCollectiveEnum(pc, type, 2);
1174   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1175   PetscFunctionReturn(0);
1176 }
1177 
1178 #undef __FUNCT__
1179 #define __FUNCT__ "PCASMGetLocalType"
1180 /*@
1181   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1182 
1183   Logically Collective on PC
1184 
1185   Input Parameter:
1186 . pc  - the preconditioner context
1187 
1188   Output Parameter:
1189 . type - type of composition, one of
1190 .vb
1191   PC_COMPOSITE_ADDITIVE       - local additive combination
1192   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1193 .ve
1194 
1195   Options Database Key:
1196 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1197 
1198   Level: intermediate
1199 
1200 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate()
1201 @*/
1202 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1203 {
1204   PetscErrorCode ierr;
1205 
1206   PetscFunctionBegin;
1207   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1208   PetscValidPointer(type, 2);
1209   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1210   PetscFunctionReturn(0);
1211 }
1212 
1213 #undef __FUNCT__
1214 #define __FUNCT__ "PCASMSetSortIndices"
1215 /*@
1216     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1217 
1218     Logically Collective on PC
1219 
1220     Input Parameters:
1221 +   pc  - the preconditioner context
1222 -   doSort - sort the subdomain indices
1223 
1224     Level: intermediate
1225 
1226 .keywords: PC, ASM, set, type
1227 
1228 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1229           PCASMCreateSubdomains2D()
1230 @*/
1231 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1232 {
1233   PetscErrorCode ierr;
1234 
1235   PetscFunctionBegin;
1236   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1237   PetscValidLogicalCollectiveBool(pc,doSort,2);
1238   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1239   PetscFunctionReturn(0);
1240 }
1241 
1242 #undef __FUNCT__
1243 #define __FUNCT__ "PCASMGetSubKSP"
1244 /*@C
1245    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1246    this processor.
1247 
1248    Collective on PC iff first_local is requested
1249 
1250    Input Parameter:
1251 .  pc - the preconditioner context
1252 
1253    Output Parameters:
1254 +  n_local - the number of blocks on this processor or NULL
1255 .  first_local - the global number of the first block on this processor or NULL,
1256                  all processors must request or all must pass NULL
1257 -  ksp - the array of KSP contexts
1258 
1259    Note:
1260    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1261 
1262    Currently for some matrix implementations only 1 block per processor
1263    is supported.
1264 
1265    You must call KSPSetUp() before calling PCASMGetSubKSP().
1266 
1267    Fortran note:
1268    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1269 
1270    Level: advanced
1271 
1272 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1273 
1274 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1275           PCASMCreateSubdomains2D(),
1276 @*/
1277 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1278 {
1279   PetscErrorCode ierr;
1280 
1281   PetscFunctionBegin;
1282   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1283   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1284   PetscFunctionReturn(0);
1285 }
1286 
1287 /* -------------------------------------------------------------------------------------*/
1288 /*MC
1289    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1290            its own KSP object.
1291 
1292    Options Database Keys:
1293 +  -pc_asm_blocks <blks> - Sets total blocks
1294 .  -pc_asm_overlap <ovl> - Sets overlap
1295 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1296 
1297      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1298       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1299       -pc_asm_type basic to use the standard ASM.
1300 
1301    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1302      than one processor. Defaults to one block per processor.
1303 
1304      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1305         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1306 
1307      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1308          and set the options directly on the resulting KSP object (you can access its PC
1309          with KSPGetPC())
1310 
1311 
1312    Level: beginner
1313 
1314    Concepts: additive Schwarz method
1315 
1316     References:
1317 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1318      Courant Institute, New York University Technical report
1319 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1320     Cambridge University Press.
1321 
1322 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1323            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1324            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1325 
1326 M*/
1327 
1328 #undef __FUNCT__
1329 #define __FUNCT__ "PCCreate_ASM"
1330 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1331 {
1332   PetscErrorCode ierr;
1333   PC_ASM         *osm;
1334 
1335   PetscFunctionBegin;
1336   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1337 
1338   osm->n                 = PETSC_DECIDE;
1339   osm->n_local           = 0;
1340   osm->n_local_true      = PETSC_DECIDE;
1341   osm->overlap           = 1;
1342   osm->ksp               = 0;
1343   osm->restriction       = 0;
1344   osm->localization      = 0;
1345   osm->prolongation      = 0;
1346   osm->x                 = 0;
1347   osm->y                 = 0;
1348   osm->y_local           = 0;
1349   osm->is                = 0;
1350   osm->is_local          = 0;
1351   osm->mat               = 0;
1352   osm->pmat              = 0;
1353   osm->type              = PC_ASM_RESTRICT;
1354   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1355   osm->same_local_solves = PETSC_TRUE;
1356   osm->sort_indices      = PETSC_TRUE;
1357   osm->dm_subdomains     = PETSC_FALSE;
1358   osm->sub_mat_type      = NULL;
1359 
1360   pc->data                 = (void*)osm;
1361   pc->ops->apply           = PCApply_ASM;
1362   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1363   pc->ops->setup           = PCSetUp_ASM;
1364   pc->ops->reset           = PCReset_ASM;
1365   pc->ops->destroy         = PCDestroy_ASM;
1366   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1367   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1368   pc->ops->view            = PCView_ASM;
1369   pc->ops->applyrichardson = 0;
1370 
1371   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1372   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1373   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1374   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1375   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1376   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1377   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1378   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1379   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1380   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1381   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1382   PetscFunctionReturn(0);
1383 }
1384 
1385 #undef __FUNCT__
1386 #define __FUNCT__ "PCASMCreateSubdomains"
1387 /*@C
1388    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1389    preconditioner for a any problem on a general grid.
1390 
1391    Collective
1392 
1393    Input Parameters:
1394 +  A - The global matrix operator
1395 -  n - the number of local blocks
1396 
1397    Output Parameters:
1398 .  outis - the array of index sets defining the subdomains
1399 
1400    Level: advanced
1401 
1402    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1403     from these if you use PCASMSetLocalSubdomains()
1404 
1405     In the Fortran version you must provide the array outis[] already allocated of length n.
1406 
1407 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1408 
1409 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1410 @*/
1411 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1412 {
1413   MatPartitioning mpart;
1414   const char      *prefix;
1415   void            (*f)(void);
1416   PetscInt        i,j,rstart,rend,bs;
1417   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1418   Mat             Ad     = NULL, adj;
1419   IS              ispart,isnumb,*is;
1420   PetscErrorCode  ierr;
1421 
1422   PetscFunctionBegin;
1423   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1424   PetscValidPointer(outis,3);
1425   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1426 
1427   /* Get prefix, row distribution, and block size */
1428   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1429   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1430   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1431   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1432 
1433   /* Get diagonal block from matrix if possible */
1434   ierr = MatShellGetOperation(A,MATOP_GET_DIAGONAL_BLOCK,&f);CHKERRQ(ierr);
1435   if (f) {
1436     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1437   }
1438   if (Ad) {
1439     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1440     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1441   }
1442   if (Ad && n > 1) {
1443     PetscBool match,done;
1444     /* Try to setup a good matrix partitioning if available */
1445     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1446     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1447     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1448     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1449     if (!match) {
1450       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1451     }
1452     if (!match) { /* assume a "good" partitioner is available */
1453       PetscInt       na;
1454       const PetscInt *ia,*ja;
1455       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1456       if (done) {
1457         /* Build adjacency matrix by hand. Unfortunately a call to
1458            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1459            remove the block-aij structure and we cannot expect
1460            MatPartitioning to split vertices as we need */
1461         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1462         const PetscInt *row;
1463         nnz = 0;
1464         for (i=0; i<na; i++) { /* count number of nonzeros */
1465           len = ia[i+1] - ia[i];
1466           row = ja + ia[i];
1467           for (j=0; j<len; j++) {
1468             if (row[j] == i) { /* don't count diagonal */
1469               len--; break;
1470             }
1471           }
1472           nnz += len;
1473         }
1474         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1475         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1476         nnz    = 0;
1477         iia[0] = 0;
1478         for (i=0; i<na; i++) { /* fill adjacency */
1479           cnt = 0;
1480           len = ia[i+1] - ia[i];
1481           row = ja + ia[i];
1482           for (j=0; j<len; j++) {
1483             if (row[j] != i) { /* if not diagonal */
1484               jja[nnz+cnt++] = row[j];
1485             }
1486           }
1487           nnz     += cnt;
1488           iia[i+1] = nnz;
1489         }
1490         /* Partitioning of the adjacency matrix */
1491         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1492         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1493         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1494         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1495         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1496         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1497         foundpart = PETSC_TRUE;
1498       }
1499       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1500     }
1501     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1502   }
1503 
1504   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1505   *outis = is;
1506 
1507   if (!foundpart) {
1508 
1509     /* Partitioning by contiguous chunks of rows */
1510 
1511     PetscInt mbs   = (rend-rstart)/bs;
1512     PetscInt start = rstart;
1513     for (i=0; i<n; i++) {
1514       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1515       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1516       start += count;
1517     }
1518 
1519   } else {
1520 
1521     /* Partitioning by adjacency of diagonal block  */
1522 
1523     const PetscInt *numbering;
1524     PetscInt       *count,nidx,*indices,*newidx,start=0;
1525     /* Get node count in each partition */
1526     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1527     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1528     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1529       for (i=0; i<n; i++) count[i] *= bs;
1530     }
1531     /* Build indices from node numbering */
1532     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1533     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1534     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1535     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1536     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1537     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1538     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1539       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1540       for (i=0; i<nidx; i++) {
1541         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1542       }
1543       ierr    = PetscFree(indices);CHKERRQ(ierr);
1544       nidx   *= bs;
1545       indices = newidx;
1546     }
1547     /* Shift to get global indices */
1548     for (i=0; i<nidx; i++) indices[i] += rstart;
1549 
1550     /* Build the index sets for each block */
1551     for (i=0; i<n; i++) {
1552       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1553       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1554       start += count[i];
1555     }
1556 
1557     ierr = PetscFree(count);CHKERRQ(ierr);
1558     ierr = PetscFree(indices);CHKERRQ(ierr);
1559     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1560     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1561 
1562   }
1563   PetscFunctionReturn(0);
1564 }
1565 
1566 #undef __FUNCT__
1567 #define __FUNCT__ "PCASMDestroySubdomains"
1568 /*@C
1569    PCASMDestroySubdomains - Destroys the index sets created with
1570    PCASMCreateSubdomains(). Should be called after setting subdomains
1571    with PCASMSetLocalSubdomains().
1572 
1573    Collective
1574 
1575    Input Parameters:
1576 +  n - the number of index sets
1577 .  is - the array of index sets
1578 -  is_local - the array of local index sets, can be NULL
1579 
1580    Level: advanced
1581 
1582 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1583 
1584 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1585 @*/
1586 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1587 {
1588   PetscInt       i;
1589   PetscErrorCode ierr;
1590 
1591   PetscFunctionBegin;
1592   if (n <= 0) PetscFunctionReturn(0);
1593   if (is) {
1594     PetscValidPointer(is,2);
1595     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1596     ierr = PetscFree(is);CHKERRQ(ierr);
1597   }
1598   if (is_local) {
1599     PetscValidPointer(is_local,3);
1600     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1601     ierr = PetscFree(is_local);CHKERRQ(ierr);
1602   }
1603   PetscFunctionReturn(0);
1604 }
1605 
1606 #undef __FUNCT__
1607 #define __FUNCT__ "PCASMCreateSubdomains2D"
1608 /*@
1609    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1610    preconditioner for a two-dimensional problem on a regular grid.
1611 
1612    Not Collective
1613 
1614    Input Parameters:
1615 +  m, n - the number of mesh points in the x and y directions
1616 .  M, N - the number of subdomains in the x and y directions
1617 .  dof - degrees of freedom per node
1618 -  overlap - overlap in mesh lines
1619 
1620    Output Parameters:
1621 +  Nsub - the number of subdomains created
1622 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1623 -  is_local - array of index sets defining non-overlapping subdomains
1624 
1625    Note:
1626    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1627    preconditioners.  More general related routines are
1628    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1629 
1630    Level: advanced
1631 
1632 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1633 
1634 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1635           PCASMSetOverlap()
1636 @*/
1637 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1638 {
1639   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1640   PetscErrorCode ierr;
1641   PetscInt       nidx,*idx,loc,ii,jj,count;
1642 
1643   PetscFunctionBegin;
1644   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1645 
1646   *Nsub     = N*M;
1647   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1648   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1649   ystart    = 0;
1650   loc_outer = 0;
1651   for (i=0; i<N; i++) {
1652     height = n/N + ((n % N) > i); /* height of subdomain */
1653     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1654     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1655     yright = ystart + height + overlap; if (yright > n) yright = n;
1656     xstart = 0;
1657     for (j=0; j<M; j++) {
1658       width = m/M + ((m % M) > j); /* width of subdomain */
1659       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1660       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1661       xright = xstart + width + overlap; if (xright > m) xright = m;
1662       nidx   = (xright - xleft)*(yright - yleft);
1663       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1664       loc    = 0;
1665       for (ii=yleft; ii<yright; ii++) {
1666         count = m*ii + xleft;
1667         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1668       }
1669       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1670       if (overlap == 0) {
1671         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1672 
1673         (*is_local)[loc_outer] = (*is)[loc_outer];
1674       } else {
1675         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1676           for (jj=xstart; jj<xstart+width; jj++) {
1677             idx[loc++] = m*ii + jj;
1678           }
1679         }
1680         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1681       }
1682       ierr    = PetscFree(idx);CHKERRQ(ierr);
1683       xstart += width;
1684       loc_outer++;
1685     }
1686     ystart += height;
1687   }
1688   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1689   PetscFunctionReturn(0);
1690 }
1691 
1692 #undef __FUNCT__
1693 #define __FUNCT__ "PCASMGetLocalSubdomains"
1694 /*@C
1695     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1696     only) for the additive Schwarz preconditioner.
1697 
1698     Not Collective
1699 
1700     Input Parameter:
1701 .   pc - the preconditioner context
1702 
1703     Output Parameters:
1704 +   n - the number of subdomains for this processor (default value = 1)
1705 .   is - the index sets that define the subdomains for this processor
1706 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1707 
1708 
1709     Notes:
1710     The IS numbering is in the parallel, global numbering of the vector.
1711 
1712     Level: advanced
1713 
1714 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1715 
1716 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1717           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1718 @*/
1719 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1720 {
1721   PC_ASM         *osm = (PC_ASM*)pc->data;
1722   PetscErrorCode ierr;
1723   PetscBool      match;
1724 
1725   PetscFunctionBegin;
1726   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1727   PetscValidIntPointer(n,2);
1728   if (is) PetscValidPointer(is,3);
1729   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1730   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1731   if (n) *n = osm->n_local_true;
1732   if (is) *is = osm->is;
1733   if (is_local) *is_local = osm->is_local;
1734   PetscFunctionReturn(0);
1735 }
1736 
1737 #undef __FUNCT__
1738 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1739 /*@C
1740     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1741     only) for the additive Schwarz preconditioner.
1742 
1743     Not Collective
1744 
1745     Input Parameter:
1746 .   pc - the preconditioner context
1747 
1748     Output Parameters:
1749 +   n - the number of matrices for this processor (default value = 1)
1750 -   mat - the matrices
1751 
1752 
1753     Level: advanced
1754 
1755     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1756 
1757            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1758 
1759 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1760 
1761 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1762           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1763 @*/
1764 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1765 {
1766   PC_ASM         *osm;
1767   PetscErrorCode ierr;
1768   PetscBool      match;
1769 
1770   PetscFunctionBegin;
1771   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1772   PetscValidIntPointer(n,2);
1773   if (mat) PetscValidPointer(mat,3);
1774   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1775   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1776   if (!match) {
1777     if (n) *n = 0;
1778     if (mat) *mat = NULL;
1779   } else {
1780     osm = (PC_ASM*)pc->data;
1781     if (n) *n = osm->n_local_true;
1782     if (mat) *mat = osm->pmat;
1783   }
1784   PetscFunctionReturn(0);
1785 }
1786 
1787 #undef __FUNCT__
1788 #define __FUNCT__ "PCASMSetDMSubdomains"
1789 /*@
1790     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1791     Logically Collective
1792 
1793     Input Parameter:
1794 +   pc  - the preconditioner
1795 -   flg - boolean indicating whether to use subdomains defined by the DM
1796 
1797     Options Database Key:
1798 .   -pc_asm_dm_subdomains
1799 
1800     Level: intermediate
1801 
1802     Notes:
1803     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1804     so setting either of the first two effectively turns the latter off.
1805 
1806 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1807 
1808 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1809           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1810 @*/
1811 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1812 {
1813   PC_ASM         *osm = (PC_ASM*)pc->data;
1814   PetscErrorCode ierr;
1815   PetscBool      match;
1816 
1817   PetscFunctionBegin;
1818   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1819   PetscValidLogicalCollectiveBool(pc,flg,2);
1820   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1821   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1822   if (match) {
1823     osm->dm_subdomains = flg;
1824   }
1825   PetscFunctionReturn(0);
1826 }
1827 
1828 #undef __FUNCT__
1829 #define __FUNCT__ "PCASMGetDMSubdomains"
1830 /*@
1831     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1832     Not Collective
1833 
1834     Input Parameter:
1835 .   pc  - the preconditioner
1836 
1837     Output Parameter:
1838 .   flg - boolean indicating whether to use subdomains defined by the DM
1839 
1840     Level: intermediate
1841 
1842 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1843 
1844 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1845           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1846 @*/
1847 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1848 {
1849   PC_ASM         *osm = (PC_ASM*)pc->data;
1850   PetscErrorCode ierr;
1851   PetscBool      match;
1852 
1853   PetscFunctionBegin;
1854   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1855   PetscValidPointer(flg,2);
1856   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1857   if (match) {
1858     if (flg) *flg = osm->dm_subdomains;
1859   }
1860   PetscFunctionReturn(0);
1861 }
1862 
1863 
1864 #undef __FUNCT__
1865 #define __FUNCT__ "PCASMGetSubMatType"
1866 /*@
1867 PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1868 
1869    Not Collective
1870 
1871    Input Parameter:
1872 .  pc - the PC
1873 
1874    Output Parameter:
1875 .  sub_mat_type - name of matrix type
1876 
1877    Level: advanced
1878 
1879 .keywords: PC, PCASM, MatType, set
1880 
1881 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1882 @*/
1883 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1884   PetscErrorCode ierr;
1885 
1886   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1887   PetscFunctionReturn(0);
1888 }
1889 
1890 #undef __FUNCT__
1891 #define __FUNCT__ "PCASMSetSubMatType"
1892 /*@
1893  PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1894 
1895    Collective on Mat
1896 
1897    Input Parameters:
1898 +  pc             - the PC object
1899 -  sub_mat_type   - matrix type
1900 
1901    Options Database Key:
1902 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1903 
1904    Notes:
1905    See "${PETSC_DIR}/include/petscmat.h" for available types
1906 
1907   Level: advanced
1908 
1909 .keywords: PC, PCASM, MatType, set
1910 
1911 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1912 @*/
1913 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1914 {
1915   PetscErrorCode ierr;
1916 
1917   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1918   PetscFunctionReturn(0);
1919 }
1920