xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision a5b7ff6b211d69bb2e6523989771c5d3a0ae7db2)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
34   /* For multiplicative solve */
35   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
36   Vec        lx, ly;              /* work vectors */
37   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
38 } PC_ASM;
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "PCView_ASM"
42 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
43 {
44   PC_ASM         *osm = (PC_ASM*)pc->data;
45   PetscErrorCode ierr;
46   PetscMPIInt    rank;
47   PetscInt       i,bsz;
48   PetscBool      iascii,isstring;
49   PetscViewer    sviewer;
50 
51   PetscFunctionBegin;
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
53   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
54   if (iascii) {
55     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
56     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
57     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
58     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
59     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
60     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
61     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
62     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
63     if (osm->same_local_solves) {
64       if (osm->ksp) {
65         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
66         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
67         if (!rank) {
68           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
69           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
70           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
71         }
72         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
73       }
74     } else {
75       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
77       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
79       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
80       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
81       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
82       for (i=0; i<osm->n_local_true; i++) {
83         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
85         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
86         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
87       }
88       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
90       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
91       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
92     }
93   } else if (isstring) {
94     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
95     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
97     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
98   }
99   PetscFunctionReturn(0);
100 }
101 
102 #undef __FUNCT__
103 #define __FUNCT__ "PCASMPrintSubdomains"
104 static PetscErrorCode PCASMPrintSubdomains(PC pc)
105 {
106   PC_ASM         *osm = (PC_ASM*)pc->data;
107   const char     *prefix;
108   char           fname[PETSC_MAX_PATH_LEN+1];
109   PetscViewer    viewer, sviewer;
110   char           *s;
111   PetscInt       i,j,nidx;
112   const PetscInt *idx;
113   PetscMPIInt    rank, size;
114   PetscErrorCode ierr;
115 
116   PetscFunctionBegin;
117   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
118   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
119   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
120   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
121   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
122   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
123   for (i=0; i<osm->n_local; i++) {
124     if (i < osm->n_local_true) {
125       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
126       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
127       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
128       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
129       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
130       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
131       for (j=0; j<nidx; j++) {
132         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
133       }
134       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
135       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
136       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
137       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
138       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
139       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
140       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
141       ierr = PetscFree(s);CHKERRQ(ierr);
142       if (osm->is_local) {
143         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
144         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
145         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
146         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
147         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
148         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
149         for (j=0; j<nidx; j++) {
150           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
151         }
152         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
153         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
154         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
155         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
156         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
157         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
158         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
159         ierr = PetscFree(s);CHKERRQ(ierr);
160       }
161     } else {
162       /* Participate in collective viewer calls. */
163       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
164       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
165       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
166       /* Assume either all ranks have is_local or none do. */
167       if (osm->is_local) {
168         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
169         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
170         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
171       }
172     }
173   }
174   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
175   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
176   PetscFunctionReturn(0);
177 }
178 
179 #undef __FUNCT__
180 #define __FUNCT__ "PCSetUp_ASM"
181 static PetscErrorCode PCSetUp_ASM(PC pc)
182 {
183   PC_ASM         *osm = (PC_ASM*)pc->data;
184   PetscErrorCode ierr;
185   PetscBool      symset,flg;
186   PetscInt       i,m,m_local;
187   MatReuse       scall = MAT_REUSE_MATRIX;
188   IS             isl;
189   KSP            ksp;
190   PC             subpc;
191   const char     *prefix,*pprefix;
192   Vec            vec;
193   DM             *domain_dm = NULL;
194 
195   PetscFunctionBegin;
196   if (!pc->setupcalled) {
197 
198     if (!osm->type_set) {
199       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
200       if (symset && flg) osm->type = PC_ASM_BASIC;
201     }
202 
203     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
204     if (osm->n_local_true == PETSC_DECIDE) {
205       /* no subdomains given */
206       /* try pc->dm first, if allowed */
207       if (osm->dm_subdomains && pc->dm) {
208         PetscInt  num_domains, d;
209         char      **domain_names;
210         IS        *inner_domain_is, *outer_domain_is;
211         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
212         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
213                               A future improvement of this code might allow one to use
214                               DM-defined subdomains and also increase the overlap,
215                               but that is not currently supported */
216         if (num_domains) {
217           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
218         }
219         for (d = 0; d < num_domains; ++d) {
220           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
221           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
222           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
223         }
224         ierr = PetscFree(domain_names);CHKERRQ(ierr);
225         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
226         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
227       }
228       if (osm->n_local_true == PETSC_DECIDE) {
229         /* still no subdomains; use one subdomain per processor */
230         osm->n_local_true = 1;
231       }
232     }
233     { /* determine the global and max number of subdomains */
234       struct {PetscInt max,sum;} inwork,outwork;
235       inwork.max   = osm->n_local_true;
236       inwork.sum   = osm->n_local_true;
237       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
238       osm->n_local = outwork.max;
239       osm->n       = outwork.sum;
240     }
241     if (!osm->is) { /* create the index sets */
242       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
243     }
244     if (osm->n_local_true > 1 && !osm->is_local) {
245       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
246       for (i=0; i<osm->n_local_true; i++) {
247         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
248           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
249           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
250         } else {
251           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
252           osm->is_local[i] = osm->is[i];
253         }
254       }
255     }
256     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
257     flg  = PETSC_FALSE;
258     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
259     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
260 
261     if (osm->overlap > 0) {
262       /* Extend the "overlapping" regions by a number of steps */
263       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
264     }
265     if (osm->sort_indices) {
266       for (i=0; i<osm->n_local_true; i++) {
267         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
268         if (osm->is_local) {
269           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
270         }
271       }
272     }
273 
274     if (!osm->ksp) {
275       /* Create the local solvers */
276       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
277       if (domain_dm) {
278         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
279       }
280       for (i=0; i<osm->n_local_true; i++) {
281         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
282         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
283         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
284         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
285         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
286         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
287         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
288         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
289         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
290         if (domain_dm) {
291           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
292           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
293           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
294         }
295         osm->ksp[i] = ksp;
296       }
297       if (domain_dm) {
298         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
299       }
300     }
301     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
302       PetscInt m;
303 
304       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
305       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
306       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
307       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
308       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
309     }
310     scall = MAT_INITIAL_MATRIX;
311   } else {
312     /*
313        Destroy the blocks from the previous iteration
314     */
315     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
316       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
317       scall = MAT_INITIAL_MATRIX;
318     }
319   }
320 
321   /*
322      Extract out the submatrices
323   */
324   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
325   if (scall == MAT_INITIAL_MATRIX) {
326     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
327     for (i=0; i<osm->n_local_true; i++) {
328       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
329       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
330     }
331   }
332 
333   /* Convert the types of the submatrices (if needbe) */
334   if (osm->sub_mat_type) {
335     for (i=0; i<osm->n_local_true; i++) {
336       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
337     }
338   }
339 
340   if(!pc->setupcalled){
341     /* Create the local work vectors (from the local matrices) and scatter contexts */
342     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
343     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
344     if (osm->is_local) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
345     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
346     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
347     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
348     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
349     for (i=0; i<osm->n_local_true; ++i) {
350       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
351       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
352       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
353       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
354       ierr = ISDestroy(&isl);CHKERRQ(ierr);
355       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
356       if (osm->is_local) {
357         ISLocalToGlobalMapping ltog;
358         IS                     isll;
359         const PetscInt         *idx_local;
360         PetscInt               *idx,nout;
361 
362         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
363         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
364         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
365         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
366         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
367         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
368         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
369         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
370         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
371         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
372         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
373         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
374         ierr = ISDestroy(&isll);CHKERRQ(ierr);
375 
376         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
377         ierr = ISDestroy(&isl);CHKERRQ(ierr);
378       } else {
379         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
380 
381         osm->y_local[i] = osm->y[i];
382 
383         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
384 
385         osm->prolongation[i] = osm->restriction[i];
386 
387         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
388       }
389     }
390     for (i=osm->n_local_true; i<osm->n_local; i++) {
391       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
392       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
393       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
394       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
395       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
396       if (osm->is_local) {
397         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
398         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
399       } else {
400         osm->prolongation[i] = osm->restriction[i];
401         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
402       }
403       ierr = ISDestroy(&isl);CHKERRQ(ierr);
404     }
405     ierr = VecDestroy(&vec);CHKERRQ(ierr);
406   }
407 
408   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
409     IS      *cis;
410     PetscInt c;
411 
412     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
413     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
414     ierr = MatGetSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
415     ierr = PetscFree(cis);CHKERRQ(ierr);
416   }
417 
418   /* Return control to the user so that the submatrices can be modified (e.g., to apply
419      different boundary conditions for the submatrices than for the global problem) */
420   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
421 
422   /*
423      Loop over subdomains putting them into local ksp
424   */
425   for (i=0; i<osm->n_local_true; i++) {
426     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
427     if (!pc->setupcalled) {
428       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
429     }
430   }
431   PetscFunctionReturn(0);
432 }
433 
434 #undef __FUNCT__
435 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
436 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
437 {
438   PC_ASM             *osm = (PC_ASM*)pc->data;
439   PetscErrorCode     ierr;
440   PetscInt           i;
441   KSPConvergedReason reason;
442 
443   PetscFunctionBegin;
444   for (i=0; i<osm->n_local_true; i++) {
445     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
446     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
447     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
448       pc->failedreason = PC_SUBPC_ERROR;
449     }
450   }
451   PetscFunctionReturn(0);
452 }
453 
454 #undef __FUNCT__
455 #define __FUNCT__ "PCApply_ASM"
456 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
457 {
458   PC_ASM         *osm = (PC_ASM*)pc->data;
459   PetscErrorCode ierr;
460   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
461   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
462 
463   PetscFunctionBegin;
464   /*
465      Support for limiting the restriction or interpolation to only local
466      subdomain values (leaving the other values 0).
467   */
468   if (!(osm->type & PC_ASM_RESTRICT)) {
469     forward = SCATTER_FORWARD_LOCAL;
470     /* have to zero the work RHS since scatter may leave some slots empty */
471     for (i=0; i<n_local_true; i++) {
472       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
473     }
474   }
475   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
476 
477   switch (osm->loctype)
478   {
479   case PC_COMPOSITE_ADDITIVE:
480     for (i=0; i<n_local; i++) {
481       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
482     }
483     ierr = VecZeroEntries(y);CHKERRQ(ierr);
484     /* do the local solves */
485     for (i=0; i<n_local_true; i++) {
486       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
487       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
488       if (osm->localization) {
489         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
490         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
491       }
492       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
493     }
494     /* handle the rest of the scatters that do not have local solves */
495     for (i=n_local_true; i<n_local; i++) {
496       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
497       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
498     }
499     for (i=0; i<n_local; i++) {
500       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
501     }
502     break;
503   case PC_COMPOSITE_MULTIPLICATIVE:
504     ierr = VecZeroEntries(y);CHKERRQ(ierr);
505     /* do the local solves */
506     for (i = 0; i < n_local_true; ++i) {
507       if (i > 0) {
508         /* Update rhs */
509         ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
510         ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
511       } else {
512         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
513       }
514       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
515       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
516       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
517       if (osm->localization) {
518         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
519         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
520       }
521       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
522       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
523       if (i < n_local_true-1) {
524         ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
525         ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
526         ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
527         ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr);
528         ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
529         ierr = VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
530         ierr = VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
531       }
532     }
533     /* handle the rest of the scatters that do not have local solves */
534     for (i = n_local_true; i < n_local; ++i) {
535       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
536       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
537       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
538       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
539     }
540     break;
541   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
542   }
543   PetscFunctionReturn(0);
544 }
545 
546 #undef __FUNCT__
547 #define __FUNCT__ "PCApplyTranspose_ASM"
548 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
549 {
550   PC_ASM         *osm = (PC_ASM*)pc->data;
551   PetscErrorCode ierr;
552   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
553   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
554 
555   PetscFunctionBegin;
556   /*
557      Support for limiting the restriction or interpolation to only local
558      subdomain values (leaving the other values 0).
559 
560      Note: these are reversed from the PCApply_ASM() because we are applying the
561      transpose of the three terms
562   */
563   if (!(osm->type & PC_ASM_INTERPOLATE)) {
564     forward = SCATTER_FORWARD_LOCAL;
565     /* have to zero the work RHS since scatter may leave some slots empty */
566     for (i=0; i<n_local_true; i++) {
567       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
568     }
569   }
570   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
571 
572   for (i=0; i<n_local; i++) {
573     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
574   }
575   ierr = VecZeroEntries(y);CHKERRQ(ierr);
576   /* do the local solves */
577   for (i=0; i<n_local_true; i++) {
578     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
579     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
580     if (osm->localization) {
581       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
582       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
583     }
584     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
585   }
586   /* handle the rest of the scatters that do not have local solves */
587   for (i=n_local_true; i<n_local; i++) {
588     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
589     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
590   }
591   for (i=0; i<n_local; i++) {
592     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
593   }
594   PetscFunctionReturn(0);
595 }
596 
597 #undef __FUNCT__
598 #define __FUNCT__ "PCReset_ASM"
599 static PetscErrorCode PCReset_ASM(PC pc)
600 {
601   PC_ASM         *osm = (PC_ASM*)pc->data;
602   PetscErrorCode ierr;
603   PetscInt       i;
604 
605   PetscFunctionBegin;
606   if (osm->ksp) {
607     for (i=0; i<osm->n_local_true; i++) {
608       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
609     }
610   }
611   if (osm->pmat) {
612     if (osm->n_local_true > 0) {
613       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
614     }
615   }
616   if (osm->restriction) {
617     for (i=0; i<osm->n_local; i++) {
618       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
619       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
620       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
621       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
622       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
623       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
624     }
625     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
626     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
627     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
628     ierr = PetscFree(osm->x);CHKERRQ(ierr);
629     ierr = PetscFree(osm->y);CHKERRQ(ierr);
630     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
631   }
632   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
633   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
634     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
635     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
636     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
637     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
638   }
639 
640   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
641 
642   osm->is       = 0;
643   osm->is_local = 0;
644   PetscFunctionReturn(0);
645 }
646 
647 #undef __FUNCT__
648 #define __FUNCT__ "PCDestroy_ASM"
649 static PetscErrorCode PCDestroy_ASM(PC pc)
650 {
651   PC_ASM         *osm = (PC_ASM*)pc->data;
652   PetscErrorCode ierr;
653   PetscInt       i;
654 
655   PetscFunctionBegin;
656   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
657   if (osm->ksp) {
658     for (i=0; i<osm->n_local_true; i++) {
659       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
660     }
661     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
662   }
663   ierr = PetscFree(pc->data);CHKERRQ(ierr);
664   PetscFunctionReturn(0);
665 }
666 
667 #undef __FUNCT__
668 #define __FUNCT__ "PCSetFromOptions_ASM"
669 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
670 {
671   PC_ASM         *osm = (PC_ASM*)pc->data;
672   PetscErrorCode ierr;
673   PetscInt       blocks,ovl;
674   PetscBool      symset,flg;
675   PCASMType      asmtype;
676   PCCompositeType loctype;
677   char           sub_mat_type[256];
678 
679   PetscFunctionBegin;
680   /* set the type to symmetric if matrix is symmetric */
681   if (!osm->type_set && pc->pmat) {
682     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
683     if (symset && flg) osm->type = PC_ASM_BASIC;
684   }
685   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
686   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
687   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
688   if (flg) {
689     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
690     osm->dm_subdomains = PETSC_FALSE;
691   }
692   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
693   if (flg) {
694     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
695     osm->dm_subdomains = PETSC_FALSE;
696   }
697   flg  = PETSC_FALSE;
698   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
699   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
700   flg  = PETSC_FALSE;
701   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
702   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
703   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
704   if(flg){
705     ierr = PCASMSetSubMatType(pc,sub_mat_type);
706   }
707   ierr = PetscOptionsTail();CHKERRQ(ierr);
708   PetscFunctionReturn(0);
709 }
710 
711 /*------------------------------------------------------------------------------------*/
712 
713 #undef __FUNCT__
714 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
715 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
716 {
717   PC_ASM         *osm = (PC_ASM*)pc->data;
718   PetscErrorCode ierr;
719   PetscInt       i;
720 
721   PetscFunctionBegin;
722   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
723   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
724 
725   if (!pc->setupcalled) {
726     if (is) {
727       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
728     }
729     if (is_local) {
730       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
731     }
732     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
733 
734     osm->n_local_true = n;
735     osm->is           = 0;
736     osm->is_local     = 0;
737     if (is) {
738       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
739       for (i=0; i<n; i++) osm->is[i] = is[i];
740       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
741       osm->overlap = -1;
742     }
743     if (is_local) {
744       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
745       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
746       if (!is) {
747         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
748         for (i=0; i<osm->n_local_true; i++) {
749           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
750             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
751             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
752           } else {
753             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
754             osm->is[i] = osm->is_local[i];
755           }
756         }
757       }
758     }
759   }
760   PetscFunctionReturn(0);
761 }
762 
763 #undef __FUNCT__
764 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
765 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
766 {
767   PC_ASM         *osm = (PC_ASM*)pc->data;
768   PetscErrorCode ierr;
769   PetscMPIInt    rank,size;
770   PetscInt       n;
771 
772   PetscFunctionBegin;
773   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
774   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
775 
776   /*
777      Split the subdomains equally among all processors
778   */
779   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
780   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
781   n    = N/size + ((N % size) > rank);
782   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
783   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
784   if (!pc->setupcalled) {
785     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
786 
787     osm->n_local_true = n;
788     osm->is           = 0;
789     osm->is_local     = 0;
790   }
791   PetscFunctionReturn(0);
792 }
793 
794 #undef __FUNCT__
795 #define __FUNCT__ "PCASMSetOverlap_ASM"
796 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
797 {
798   PC_ASM *osm = (PC_ASM*)pc->data;
799 
800   PetscFunctionBegin;
801   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
802   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
803   if (!pc->setupcalled) osm->overlap = ovl;
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "PCASMSetType_ASM"
809 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
810 {
811   PC_ASM *osm = (PC_ASM*)pc->data;
812 
813   PetscFunctionBegin;
814   osm->type     = type;
815   osm->type_set = PETSC_TRUE;
816   PetscFunctionReturn(0);
817 }
818 
819 #undef __FUNCT__
820 #define __FUNCT__ "PCASMGetType_ASM"
821 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
822 {
823   PC_ASM *osm = (PC_ASM*)pc->data;
824 
825   PetscFunctionBegin;
826   *type = osm->type;
827   PetscFunctionReturn(0);
828 }
829 
830 #undef __FUNCT__
831 #define __FUNCT__ "PCASMSetLocalType_ASM"
832 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
833 {
834   PC_ASM *osm = (PC_ASM *) pc->data;
835 
836   PetscFunctionBegin;
837   osm->loctype = type;
838   PetscFunctionReturn(0);
839 }
840 
841 #undef __FUNCT__
842 #define __FUNCT__ "PCASMGetLocalType_ASM"
843 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
844 {
845   PC_ASM *osm = (PC_ASM *) pc->data;
846 
847   PetscFunctionBegin;
848   *type = osm->loctype;
849   PetscFunctionReturn(0);
850 }
851 
852 #undef __FUNCT__
853 #define __FUNCT__ "PCASMSetSortIndices_ASM"
854 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
855 {
856   PC_ASM *osm = (PC_ASM*)pc->data;
857 
858   PetscFunctionBegin;
859   osm->sort_indices = doSort;
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "PCASMGetSubKSP_ASM"
865 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
866 {
867   PC_ASM         *osm = (PC_ASM*)pc->data;
868   PetscErrorCode ierr;
869 
870   PetscFunctionBegin;
871   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
872 
873   if (n_local) *n_local = osm->n_local_true;
874   if (first_local) {
875     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
876     *first_local -= osm->n_local_true;
877   }
878   if (ksp) {
879     /* Assume that local solves are now different; not necessarily
880        true though!  This flag is used only for PCView_ASM() */
881     *ksp                   = osm->ksp;
882     osm->same_local_solves = PETSC_FALSE;
883   }
884   PetscFunctionReturn(0);
885 }
886 
887 #undef __FUNCT__
888 #define __FUNCT__ "PCASMGetSubMatType_ASM"
889 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
890 {
891   PC_ASM         *osm = (PC_ASM*)pc->data;
892 
893   PetscFunctionBegin;
894   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
895   PetscValidPointer(sub_mat_type,2);
896   *sub_mat_type = osm->sub_mat_type;
897   PetscFunctionReturn(0);
898 }
899 
900 #undef __FUNCT__
901 #define __FUNCT__ "PCASMSetSubMatType_ASM"
902 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
903 {
904   PetscErrorCode    ierr;
905   PC_ASM            *osm = (PC_ASM*)pc->data;
906 
907   PetscFunctionBegin;
908   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
909   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
910   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
911   PetscFunctionReturn(0);
912 }
913 
914 #undef __FUNCT__
915 #define __FUNCT__ "PCASMSetLocalSubdomains"
916 /*@C
917     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
918 
919     Collective on PC
920 
921     Input Parameters:
922 +   pc - the preconditioner context
923 .   n - the number of subdomains for this processor (default value = 1)
924 .   is - the index set that defines the subdomains for this processor
925          (or NULL for PETSc to determine subdomains)
926 -   is_local - the index sets that define the local part of the subdomains for this processor
927          (or NULL to use the default of 1 subdomain per process)
928 
929     Notes:
930     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
931 
932     By default the ASM preconditioner uses 1 block per processor.
933 
934     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
935 
936     Level: advanced
937 
938 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
939 
940 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
941           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
942 @*/
943 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
944 {
945   PetscErrorCode ierr;
946 
947   PetscFunctionBegin;
948   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
949   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
950   PetscFunctionReturn(0);
951 }
952 
953 #undef __FUNCT__
954 #define __FUNCT__ "PCASMSetTotalSubdomains"
955 /*@C
956     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
957     additive Schwarz preconditioner.  Either all or no processors in the
958     PC communicator must call this routine, with the same index sets.
959 
960     Collective on PC
961 
962     Input Parameters:
963 +   pc - the preconditioner context
964 .   N  - the number of subdomains for all processors
965 .   is - the index sets that define the subdomains for all processors
966          (or NULL to ask PETSc to compe up with subdomains)
967 -   is_local - the index sets that define the local part of the subdomains for this processor
968          (or NULL to use the default of 1 subdomain per process)
969 
970     Options Database Key:
971     To set the total number of subdomain blocks rather than specify the
972     index sets, use the option
973 .    -pc_asm_blocks <blks> - Sets total blocks
974 
975     Notes:
976     Currently you cannot use this to set the actual subdomains with the argument is.
977 
978     By default the ASM preconditioner uses 1 block per processor.
979 
980     These index sets cannot be destroyed until after completion of the
981     linear solves for which the ASM preconditioner is being used.
982 
983     Use PCASMSetLocalSubdomains() to set local subdomains.
984 
985     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
986 
987     Level: advanced
988 
989 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
990 
991 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
992           PCASMCreateSubdomains2D()
993 @*/
994 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
995 {
996   PetscErrorCode ierr;
997 
998   PetscFunctionBegin;
999   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1000   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
1001   PetscFunctionReturn(0);
1002 }
1003 
1004 #undef __FUNCT__
1005 #define __FUNCT__ "PCASMSetOverlap"
1006 /*@
1007     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1008     additive Schwarz preconditioner.  Either all or no processors in the
1009     PC communicator must call this routine. If MatIncreaseOverlap is used,
1010     use option -mat_increase_overlap when the problem size large.
1011 
1012     Logically Collective on PC
1013 
1014     Input Parameters:
1015 +   pc  - the preconditioner context
1016 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1017 
1018     Options Database Key:
1019 .   -pc_asm_overlap <ovl> - Sets overlap
1020 
1021     Notes:
1022     By default the ASM preconditioner uses 1 block per processor.  To use
1023     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1024     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1025 
1026     The overlap defaults to 1, so if one desires that no additional
1027     overlap be computed beyond what may have been set with a call to
1028     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1029     must be set to be 0.  In particular, if one does not explicitly set
1030     the subdomains an application code, then all overlap would be computed
1031     internally by PETSc, and using an overlap of 0 would result in an ASM
1032     variant that is equivalent to the block Jacobi preconditioner.
1033 
1034     Note that one can define initial index sets with any overlap via
1035     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
1036     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1037     if desired.
1038 
1039     Level: intermediate
1040 
1041 .keywords: PC, ASM, set, overlap
1042 
1043 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1044           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
1045 @*/
1046 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1047 {
1048   PetscErrorCode ierr;
1049 
1050   PetscFunctionBegin;
1051   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1052   PetscValidLogicalCollectiveInt(pc,ovl,2);
1053   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 #undef __FUNCT__
1058 #define __FUNCT__ "PCASMSetType"
1059 /*@
1060     PCASMSetType - Sets the type of restriction and interpolation used
1061     for local problems in the additive Schwarz method.
1062 
1063     Logically Collective on PC
1064 
1065     Input Parameters:
1066 +   pc  - the preconditioner context
1067 -   type - variant of ASM, one of
1068 .vb
1069       PC_ASM_BASIC       - full interpolation and restriction
1070       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1071       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1072       PC_ASM_NONE        - local processor restriction and interpolation
1073 .ve
1074 
1075     Options Database Key:
1076 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1077 
1078     Level: intermediate
1079 
1080 .keywords: PC, ASM, set, type
1081 
1082 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1083           PCASMCreateSubdomains2D()
1084 @*/
1085 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1086 {
1087   PetscErrorCode ierr;
1088 
1089   PetscFunctionBegin;
1090   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1091   PetscValidLogicalCollectiveEnum(pc,type,2);
1092   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1093   PetscFunctionReturn(0);
1094 }
1095 
1096 #undef __FUNCT__
1097 #define __FUNCT__ "PCASMGetType"
1098 /*@
1099     PCASMGetType - Gets the type of restriction and interpolation used
1100     for local problems in the additive Schwarz method.
1101 
1102     Logically Collective on PC
1103 
1104     Input Parameter:
1105 .   pc  - the preconditioner context
1106 
1107     Output Parameter:
1108 .   type - variant of ASM, one of
1109 
1110 .vb
1111       PC_ASM_BASIC       - full interpolation and restriction
1112       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1113       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1114       PC_ASM_NONE        - local processor restriction and interpolation
1115 .ve
1116 
1117     Options Database Key:
1118 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1119 
1120     Level: intermediate
1121 
1122 .keywords: PC, ASM, set, type
1123 
1124 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1125           PCASMCreateSubdomains2D()
1126 @*/
1127 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1128 {
1129   PetscErrorCode ierr;
1130 
1131   PetscFunctionBegin;
1132   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1133   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 #undef __FUNCT__
1138 #define __FUNCT__ "PCASMSetLocalType"
1139 /*@
1140   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1141 
1142   Logically Collective on PC
1143 
1144   Input Parameters:
1145 + pc  - the preconditioner context
1146 - type - type of composition, one of
1147 .vb
1148   PC_COMPOSITE_ADDITIVE       - local additive combination
1149   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1150 .ve
1151 
1152   Options Database Key:
1153 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1154 
1155   Level: intermediate
1156 
1157 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASMCreate()
1158 @*/
1159 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1160 {
1161   PetscErrorCode ierr;
1162 
1163   PetscFunctionBegin;
1164   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1165   PetscValidLogicalCollectiveEnum(pc, type, 2);
1166   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1167   PetscFunctionReturn(0);
1168 }
1169 
1170 #undef __FUNCT__
1171 #define __FUNCT__ "PCASMGetLocalType"
1172 /*@
1173   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1174 
1175   Logically Collective on PC
1176 
1177   Input Parameter:
1178 . pc  - the preconditioner context
1179 
1180   Output Parameter:
1181 . type - type of composition, one of
1182 .vb
1183   PC_COMPOSITE_ADDITIVE       - local additive combination
1184   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1185 .ve
1186 
1187   Options Database Key:
1188 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1189 
1190   Level: intermediate
1191 
1192 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate()
1193 @*/
1194 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1195 {
1196   PetscErrorCode ierr;
1197 
1198   PetscFunctionBegin;
1199   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1200   PetscValidPointer(type, 2);
1201   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 #undef __FUNCT__
1206 #define __FUNCT__ "PCASMSetSortIndices"
1207 /*@
1208     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1209 
1210     Logically Collective on PC
1211 
1212     Input Parameters:
1213 +   pc  - the preconditioner context
1214 -   doSort - sort the subdomain indices
1215 
1216     Level: intermediate
1217 
1218 .keywords: PC, ASM, set, type
1219 
1220 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1221           PCASMCreateSubdomains2D()
1222 @*/
1223 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1224 {
1225   PetscErrorCode ierr;
1226 
1227   PetscFunctionBegin;
1228   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1229   PetscValidLogicalCollectiveBool(pc,doSort,2);
1230   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1231   PetscFunctionReturn(0);
1232 }
1233 
1234 #undef __FUNCT__
1235 #define __FUNCT__ "PCASMGetSubKSP"
1236 /*@C
1237    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1238    this processor.
1239 
1240    Collective on PC iff first_local is requested
1241 
1242    Input Parameter:
1243 .  pc - the preconditioner context
1244 
1245    Output Parameters:
1246 +  n_local - the number of blocks on this processor or NULL
1247 .  first_local - the global number of the first block on this processor or NULL,
1248                  all processors must request or all must pass NULL
1249 -  ksp - the array of KSP contexts
1250 
1251    Note:
1252    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1253 
1254    Currently for some matrix implementations only 1 block per processor
1255    is supported.
1256 
1257    You must call KSPSetUp() before calling PCASMGetSubKSP().
1258 
1259    Fortran note:
1260    The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.
1261 
1262    Level: advanced
1263 
1264 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1265 
1266 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1267           PCASMCreateSubdomains2D(),
1268 @*/
1269 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1270 {
1271   PetscErrorCode ierr;
1272 
1273   PetscFunctionBegin;
1274   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1275   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1276   PetscFunctionReturn(0);
1277 }
1278 
1279 /* -------------------------------------------------------------------------------------*/
1280 /*MC
1281    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1282            its own KSP object.
1283 
1284    Options Database Keys:
1285 +  -pc_asm_blocks <blks> - Sets total blocks
1286 .  -pc_asm_overlap <ovl> - Sets overlap
1287 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1288 
1289      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1290       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1291       -pc_asm_type basic to use the standard ASM.
1292 
1293    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1294      than one processor. Defaults to one block per processor.
1295 
1296      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1297         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1298 
1299      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1300          and set the options directly on the resulting KSP object (you can access its PC
1301          with KSPGetPC())
1302 
1303 
1304    Level: beginner
1305 
1306    Concepts: additive Schwarz method
1307 
1308     References:
1309 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1310      Courant Institute, New York University Technical report
1311 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1312     Cambridge University Press.
1313 
1314 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1315            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1316            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1317 
1318 M*/
1319 
1320 #undef __FUNCT__
1321 #define __FUNCT__ "PCCreate_ASM"
1322 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1323 {
1324   PetscErrorCode ierr;
1325   PC_ASM         *osm;
1326 
1327   PetscFunctionBegin;
1328   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1329 
1330   osm->n                 = PETSC_DECIDE;
1331   osm->n_local           = 0;
1332   osm->n_local_true      = PETSC_DECIDE;
1333   osm->overlap           = 1;
1334   osm->ksp               = 0;
1335   osm->restriction       = 0;
1336   osm->localization      = 0;
1337   osm->prolongation      = 0;
1338   osm->x                 = 0;
1339   osm->y                 = 0;
1340   osm->y_local           = 0;
1341   osm->is                = 0;
1342   osm->is_local          = 0;
1343   osm->mat               = 0;
1344   osm->pmat              = 0;
1345   osm->type              = PC_ASM_RESTRICT;
1346   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1347   osm->same_local_solves = PETSC_TRUE;
1348   osm->sort_indices      = PETSC_TRUE;
1349   osm->dm_subdomains     = PETSC_FALSE;
1350   osm->sub_mat_type      = NULL;
1351 
1352   pc->data                 = (void*)osm;
1353   pc->ops->apply           = PCApply_ASM;
1354   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1355   pc->ops->setup           = PCSetUp_ASM;
1356   pc->ops->reset           = PCReset_ASM;
1357   pc->ops->destroy         = PCDestroy_ASM;
1358   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1359   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1360   pc->ops->view            = PCView_ASM;
1361   pc->ops->applyrichardson = 0;
1362 
1363   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1364   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1365   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1366   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1367   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1368   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1369   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1370   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1371   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1372   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1373   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1374   PetscFunctionReturn(0);
1375 }
1376 
1377 #undef __FUNCT__
1378 #define __FUNCT__ "PCASMCreateSubdomains"
1379 /*@C
1380    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1381    preconditioner for a any problem on a general grid.
1382 
1383    Collective
1384 
1385    Input Parameters:
1386 +  A - The global matrix operator
1387 -  n - the number of local blocks
1388 
1389    Output Parameters:
1390 .  outis - the array of index sets defining the subdomains
1391 
1392    Level: advanced
1393 
1394    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1395     from these if you use PCASMSetLocalSubdomains()
1396 
1397     In the Fortran version you must provide the array outis[] already allocated of length n.
1398 
1399 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1400 
1401 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1402 @*/
1403 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1404 {
1405   MatPartitioning mpart;
1406   const char      *prefix;
1407   PetscErrorCode  (*f)(Mat,Mat*);
1408   PetscMPIInt     size;
1409   PetscInt        i,j,rstart,rend,bs;
1410   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1411   Mat             Ad     = NULL, adj;
1412   IS              ispart,isnumb,*is;
1413   PetscErrorCode  ierr;
1414 
1415   PetscFunctionBegin;
1416   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1417   PetscValidPointer(outis,3);
1418   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1419 
1420   /* Get prefix, row distribution, and block size */
1421   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1422   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1423   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1424   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1425 
1426   /* Get diagonal block from matrix if possible */
1427   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1428   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
1429   if (f) {
1430     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1431   } else if (size == 1) {
1432     Ad = A;
1433   }
1434   if (Ad) {
1435     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1436     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1437   }
1438   if (Ad && n > 1) {
1439     PetscBool match,done;
1440     /* Try to setup a good matrix partitioning if available */
1441     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1442     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1443     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1444     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1445     if (!match) {
1446       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1447     }
1448     if (!match) { /* assume a "good" partitioner is available */
1449       PetscInt       na;
1450       const PetscInt *ia,*ja;
1451       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1452       if (done) {
1453         /* Build adjacency matrix by hand. Unfortunately a call to
1454            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1455            remove the block-aij structure and we cannot expect
1456            MatPartitioning to split vertices as we need */
1457         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1458         const PetscInt *row;
1459         nnz = 0;
1460         for (i=0; i<na; i++) { /* count number of nonzeros */
1461           len = ia[i+1] - ia[i];
1462           row = ja + ia[i];
1463           for (j=0; j<len; j++) {
1464             if (row[j] == i) { /* don't count diagonal */
1465               len--; break;
1466             }
1467           }
1468           nnz += len;
1469         }
1470         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1471         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1472         nnz    = 0;
1473         iia[0] = 0;
1474         for (i=0; i<na; i++) { /* fill adjacency */
1475           cnt = 0;
1476           len = ia[i+1] - ia[i];
1477           row = ja + ia[i];
1478           for (j=0; j<len; j++) {
1479             if (row[j] != i) { /* if not diagonal */
1480               jja[nnz+cnt++] = row[j];
1481             }
1482           }
1483           nnz     += cnt;
1484           iia[i+1] = nnz;
1485         }
1486         /* Partitioning of the adjacency matrix */
1487         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1488         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1489         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1490         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1491         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1492         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1493         foundpart = PETSC_TRUE;
1494       }
1495       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1496     }
1497     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1498   }
1499 
1500   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1501   *outis = is;
1502 
1503   if (!foundpart) {
1504 
1505     /* Partitioning by contiguous chunks of rows */
1506 
1507     PetscInt mbs   = (rend-rstart)/bs;
1508     PetscInt start = rstart;
1509     for (i=0; i<n; i++) {
1510       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1511       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1512       start += count;
1513     }
1514 
1515   } else {
1516 
1517     /* Partitioning by adjacency of diagonal block  */
1518 
1519     const PetscInt *numbering;
1520     PetscInt       *count,nidx,*indices,*newidx,start=0;
1521     /* Get node count in each partition */
1522     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1523     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1524     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1525       for (i=0; i<n; i++) count[i] *= bs;
1526     }
1527     /* Build indices from node numbering */
1528     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1529     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1530     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1531     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1532     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1533     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1534     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1535       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1536       for (i=0; i<nidx; i++) {
1537         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1538       }
1539       ierr    = PetscFree(indices);CHKERRQ(ierr);
1540       nidx   *= bs;
1541       indices = newidx;
1542     }
1543     /* Shift to get global indices */
1544     for (i=0; i<nidx; i++) indices[i] += rstart;
1545 
1546     /* Build the index sets for each block */
1547     for (i=0; i<n; i++) {
1548       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1549       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1550       start += count[i];
1551     }
1552 
1553     ierr = PetscFree(count);CHKERRQ(ierr);
1554     ierr = PetscFree(indices);CHKERRQ(ierr);
1555     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1556     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1557 
1558   }
1559   PetscFunctionReturn(0);
1560 }
1561 
1562 #undef __FUNCT__
1563 #define __FUNCT__ "PCASMDestroySubdomains"
1564 /*@C
1565    PCASMDestroySubdomains - Destroys the index sets created with
1566    PCASMCreateSubdomains(). Should be called after setting subdomains
1567    with PCASMSetLocalSubdomains().
1568 
1569    Collective
1570 
1571    Input Parameters:
1572 +  n - the number of index sets
1573 .  is - the array of index sets
1574 -  is_local - the array of local index sets, can be NULL
1575 
1576    Level: advanced
1577 
1578 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1579 
1580 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1581 @*/
1582 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1583 {
1584   PetscInt       i;
1585   PetscErrorCode ierr;
1586 
1587   PetscFunctionBegin;
1588   if (n <= 0) PetscFunctionReturn(0);
1589   if (is) {
1590     PetscValidPointer(is,2);
1591     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1592     ierr = PetscFree(is);CHKERRQ(ierr);
1593   }
1594   if (is_local) {
1595     PetscValidPointer(is_local,3);
1596     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1597     ierr = PetscFree(is_local);CHKERRQ(ierr);
1598   }
1599   PetscFunctionReturn(0);
1600 }
1601 
1602 #undef __FUNCT__
1603 #define __FUNCT__ "PCASMCreateSubdomains2D"
1604 /*@
1605    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1606    preconditioner for a two-dimensional problem on a regular grid.
1607 
1608    Not Collective
1609 
1610    Input Parameters:
1611 +  m, n - the number of mesh points in the x and y directions
1612 .  M, N - the number of subdomains in the x and y directions
1613 .  dof - degrees of freedom per node
1614 -  overlap - overlap in mesh lines
1615 
1616    Output Parameters:
1617 +  Nsub - the number of subdomains created
1618 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1619 -  is_local - array of index sets defining non-overlapping subdomains
1620 
1621    Note:
1622    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1623    preconditioners.  More general related routines are
1624    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1625 
1626    Level: advanced
1627 
1628 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1629 
1630 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1631           PCASMSetOverlap()
1632 @*/
1633 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1634 {
1635   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1636   PetscErrorCode ierr;
1637   PetscInt       nidx,*idx,loc,ii,jj,count;
1638 
1639   PetscFunctionBegin;
1640   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1641 
1642   *Nsub     = N*M;
1643   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1644   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1645   ystart    = 0;
1646   loc_outer = 0;
1647   for (i=0; i<N; i++) {
1648     height = n/N + ((n % N) > i); /* height of subdomain */
1649     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1650     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1651     yright = ystart + height + overlap; if (yright > n) yright = n;
1652     xstart = 0;
1653     for (j=0; j<M; j++) {
1654       width = m/M + ((m % M) > j); /* width of subdomain */
1655       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1656       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1657       xright = xstart + width + overlap; if (xright > m) xright = m;
1658       nidx   = (xright - xleft)*(yright - yleft);
1659       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1660       loc    = 0;
1661       for (ii=yleft; ii<yright; ii++) {
1662         count = m*ii + xleft;
1663         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1664       }
1665       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1666       if (overlap == 0) {
1667         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1668 
1669         (*is_local)[loc_outer] = (*is)[loc_outer];
1670       } else {
1671         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1672           for (jj=xstart; jj<xstart+width; jj++) {
1673             idx[loc++] = m*ii + jj;
1674           }
1675         }
1676         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1677       }
1678       ierr    = PetscFree(idx);CHKERRQ(ierr);
1679       xstart += width;
1680       loc_outer++;
1681     }
1682     ystart += height;
1683   }
1684   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1685   PetscFunctionReturn(0);
1686 }
1687 
1688 #undef __FUNCT__
1689 #define __FUNCT__ "PCASMGetLocalSubdomains"
1690 /*@C
1691     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1692     only) for the additive Schwarz preconditioner.
1693 
1694     Not Collective
1695 
1696     Input Parameter:
1697 .   pc - the preconditioner context
1698 
1699     Output Parameters:
1700 +   n - the number of subdomains for this processor (default value = 1)
1701 .   is - the index sets that define the subdomains for this processor
1702 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1703 
1704 
1705     Notes:
1706     The IS numbering is in the parallel, global numbering of the vector.
1707 
1708     Level: advanced
1709 
1710 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1711 
1712 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1713           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1714 @*/
1715 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1716 {
1717   PC_ASM         *osm = (PC_ASM*)pc->data;
1718   PetscErrorCode ierr;
1719   PetscBool      match;
1720 
1721   PetscFunctionBegin;
1722   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1723   PetscValidIntPointer(n,2);
1724   if (is) PetscValidPointer(is,3);
1725   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1726   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1727   if (n) *n = osm->n_local_true;
1728   if (is) *is = osm->is;
1729   if (is_local) *is_local = osm->is_local;
1730   PetscFunctionReturn(0);
1731 }
1732 
1733 #undef __FUNCT__
1734 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1735 /*@C
1736     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1737     only) for the additive Schwarz preconditioner.
1738 
1739     Not Collective
1740 
1741     Input Parameter:
1742 .   pc - the preconditioner context
1743 
1744     Output Parameters:
1745 +   n - the number of matrices for this processor (default value = 1)
1746 -   mat - the matrices
1747 
1748 
1749     Level: advanced
1750 
1751     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1752 
1753            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1754 
1755 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1756 
1757 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1758           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1759 @*/
1760 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1761 {
1762   PC_ASM         *osm;
1763   PetscErrorCode ierr;
1764   PetscBool      match;
1765 
1766   PetscFunctionBegin;
1767   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1768   PetscValidIntPointer(n,2);
1769   if (mat) PetscValidPointer(mat,3);
1770   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1771   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1772   if (!match) {
1773     if (n) *n = 0;
1774     if (mat) *mat = NULL;
1775   } else {
1776     osm = (PC_ASM*)pc->data;
1777     if (n) *n = osm->n_local_true;
1778     if (mat) *mat = osm->pmat;
1779   }
1780   PetscFunctionReturn(0);
1781 }
1782 
1783 #undef __FUNCT__
1784 #define __FUNCT__ "PCASMSetDMSubdomains"
1785 /*@
1786     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1787     Logically Collective
1788 
1789     Input Parameter:
1790 +   pc  - the preconditioner
1791 -   flg - boolean indicating whether to use subdomains defined by the DM
1792 
1793     Options Database Key:
1794 .   -pc_asm_dm_subdomains
1795 
1796     Level: intermediate
1797 
1798     Notes:
1799     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1800     so setting either of the first two effectively turns the latter off.
1801 
1802 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1803 
1804 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1805           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1806 @*/
1807 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1808 {
1809   PC_ASM         *osm = (PC_ASM*)pc->data;
1810   PetscErrorCode ierr;
1811   PetscBool      match;
1812 
1813   PetscFunctionBegin;
1814   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1815   PetscValidLogicalCollectiveBool(pc,flg,2);
1816   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1817   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1818   if (match) {
1819     osm->dm_subdomains = flg;
1820   }
1821   PetscFunctionReturn(0);
1822 }
1823 
1824 #undef __FUNCT__
1825 #define __FUNCT__ "PCASMGetDMSubdomains"
1826 /*@
1827     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1828     Not Collective
1829 
1830     Input Parameter:
1831 .   pc  - the preconditioner
1832 
1833     Output Parameter:
1834 .   flg - boolean indicating whether to use subdomains defined by the DM
1835 
1836     Level: intermediate
1837 
1838 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1839 
1840 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1841           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1842 @*/
1843 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1844 {
1845   PC_ASM         *osm = (PC_ASM*)pc->data;
1846   PetscErrorCode ierr;
1847   PetscBool      match;
1848 
1849   PetscFunctionBegin;
1850   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1851   PetscValidPointer(flg,2);
1852   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1853   if (match) {
1854     if (flg) *flg = osm->dm_subdomains;
1855   }
1856   PetscFunctionReturn(0);
1857 }
1858 
1859 
1860 #undef __FUNCT__
1861 #define __FUNCT__ "PCASMGetSubMatType"
1862 /*@
1863 PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1864 
1865    Not Collective
1866 
1867    Input Parameter:
1868 .  pc - the PC
1869 
1870    Output Parameter:
1871 .  sub_mat_type - name of matrix type
1872 
1873    Level: advanced
1874 
1875 .keywords: PC, PCASM, MatType, set
1876 
1877 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1878 @*/
1879 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1880   PetscErrorCode ierr;
1881 
1882   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1883   PetscFunctionReturn(0);
1884 }
1885 
1886 #undef __FUNCT__
1887 #define __FUNCT__ "PCASMSetSubMatType"
1888 /*@
1889  PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1890 
1891    Collective on Mat
1892 
1893    Input Parameters:
1894 +  pc             - the PC object
1895 -  sub_mat_type   - matrix type
1896 
1897    Options Database Key:
1898 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1899 
1900    Notes:
1901    See "${PETSC_DIR}/include/petscmat.h" for available types
1902 
1903   Level: advanced
1904 
1905 .keywords: PC, PCASM, MatType, set
1906 
1907 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1908 @*/
1909 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1910 {
1911   PetscErrorCode ierr;
1912 
1913   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1914   PetscFunctionReturn(0);
1915 }
1916