xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision bebe2cf65d55febe21a5af8db2bd2e168caaa2e7)
1 /*
2   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
3   In this version each processor may intersect multiple subdomains and any subdomain may
4   intersect multiple processors.  Intersections of subdomains with processors are called *local
5   subdomains*.
6 
7        N    - total number of local subdomains on all processors  (set in PCGASMSetTotalSubdomains() or calculated in PCSetUp_GASM())
8        n    - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
9        nmax - maximum number of local subdomains per processor    (calculated in PCGASMSetTotalSubdomains() or in PCSetUp_GASM())
10 */
11 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
12 #include <petscdm.h>
13 
14 typedef struct {
15   PetscInt   N,n,nmax;
16   PetscInt   overlap;             /* overlap requested by user */
17   KSP        *ksp;                /* linear solvers for each block */
18   Vec        gx,gy;               /* Merged work vectors */
19   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
20   VecScatter gorestriction;       /* merged restriction to disjoint union of outer subdomains */
21   VecScatter girestriction;       /* merged restriction to disjoint union of inner subdomains */
22   IS         *ois;                /* index sets that define the outer (conceptually, overlapping) subdomains */
23   IS         *iis;                /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
24   Mat        *pmat;               /* subdomain block matrices */
25   PCGASMType type;                /* use reduced interpolation, restriction or both */
26   PetscBool  create_local;           /* whether the autocreated subdomains are local or not. */
27   PetscBool  type_set;               /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_subdomain_solvers; /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;           /* flag to sort subdomain indices */
30   PetscBool  dm_subdomains;          /* whether DM is allowed to define subdomains */
31 } PC_GASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCGASMSubdomainView_Private"
35 static PetscErrorCode  PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
36 {
37   PC_GASM        *osm = (PC_GASM*)pc->data;
38   PetscInt       j,nidx;
39   const PetscInt *idx;
40   PetscViewer    sviewer;
41   char           *cidx;
42   PetscErrorCode ierr;
43 
44   PetscFunctionBegin;
45   if (i < 0 || i > osm->n) SETERRQ2(PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n);
46   /* Inner subdomains. */
47   ierr = ISGetLocalSize(osm->iis[i], &nidx);CHKERRQ(ierr);
48   /*
49    No more than 15 characters per index plus a space.
50    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
51    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
52    For nidx == 0, the whole string 16 '\0'.
53    */
54   ierr = PetscMalloc1(16*(nidx+1)+1, &cidx);CHKERRQ(ierr);
55   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
56   ierr = ISGetIndices(osm->iis[i], &idx);CHKERRQ(ierr);
57   for (j = 0; j < nidx; ++j) {
58     ierr = PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);CHKERRQ(ierr);
59   }
60   ierr = ISRestoreIndices(osm->iis[i],&idx);CHKERRQ(ierr);
61   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
62   ierr = PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");CHKERRQ(ierr);
63   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
64   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
65   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
66   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
67   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
68   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
69   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
70   ierr = PetscFree(cidx);CHKERRQ(ierr);
71   /* Outer subdomains. */
72   ierr = ISGetLocalSize(osm->ois[i], &nidx);CHKERRQ(ierr);
73   /*
74    No more than 15 characters per index plus a space.
75    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
76    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
77    For nidx == 0, the whole string 16 '\0'.
78    */
79   ierr = PetscMalloc1(16*(nidx+1)+1, &cidx);CHKERRQ(ierr);
80   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
81   ierr = ISGetIndices(osm->ois[i], &idx);CHKERRQ(ierr);
82   for (j = 0; j < nidx; ++j) {
83     ierr = PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);CHKERRQ(ierr);
84   }
85   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
86   ierr = ISRestoreIndices(osm->ois[i],&idx);CHKERRQ(ierr);
87   ierr = PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");CHKERRQ(ierr);
88   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
90   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
91   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
92   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
93   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
94   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
95   ierr = PetscFree(cidx);CHKERRQ(ierr);
96   PetscFunctionReturn(0);
97 }
98 
99 #undef __FUNCT__
100 #define __FUNCT__ "PCGASMPrintSubdomains"
101 static PetscErrorCode  PCGASMPrintSubdomains(PC pc)
102 {
103   PC_GASM        *osm = (PC_GASM*)pc->data;
104   const char     *prefix;
105   char           fname[PETSC_MAX_PATH_LEN+1];
106   PetscInt       i, l, d, count, gcount, *permutation, *numbering;
107   PetscBool      found;
108   PetscViewer    viewer, sviewer = NULL;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   ierr = PetscMalloc2(osm->n, &permutation, osm->n, &numbering);CHKERRQ(ierr);
113   for (i = 0; i < osm->n; ++i) permutation[i] = i;
114   ierr = PetscObjectsGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
115   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
116   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
117   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr);
118   if (!found) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
119   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
120   /*
121    Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
122    the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
123   */
124   ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
125   l    = 0;
126   for (count = 0; count < gcount; ++count) {
127     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
128     if (l<osm->n) {
129       d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
130       if (numbering[d] == count) {
131         ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
132         ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
133         ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
134         ++l;
135       }
136     }
137     ierr = MPI_Barrier(PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
138   }
139   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
140   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
141   PetscFunctionReturn(0);
142 }
143 
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "PCView_GASM"
147 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
148 {
149   PC_GASM        *osm = (PC_GASM*)pc->data;
150   const char     *prefix;
151   PetscErrorCode ierr;
152   PetscMPIInt    rank, size;
153   PetscInt       i,bsz;
154   PetscBool      iascii,view_subdomains=PETSC_FALSE;
155   PetscViewer    sviewer;
156   PetscInt       count, l, gcount, *numbering, *permutation;
157   char           overlap[256]     = "user-defined overlap";
158   char           gsubdomains[256] = "unknown total number of subdomains";
159   char           lsubdomains[256] = "unknown number of local  subdomains";
160   char           msubdomains[256] = "unknown max number of local subdomains";
161 
162   PetscFunctionBegin;
163   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
164   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
165   ierr = PetscMalloc2(osm->n, &permutation, osm->n, &numbering);CHKERRQ(ierr);
166   for (i = 0; i < osm->n; ++i) permutation[i] = i;
167   ierr = PetscObjectsGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
168   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
169 
170   if (osm->overlap >= 0) {
171     ierr = PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);CHKERRQ(ierr);
172   }
173   ierr = PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",gcount);CHKERRQ(ierr);
174   if (osm->N > 0) {
175     ierr = PetscSNPrintf(lsubdomains, sizeof(gsubdomains), "number of local subdomains = %D",osm->N);CHKERRQ(ierr);
176   }
177   if (osm->nmax > 0) {
178     ierr = PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);CHKERRQ(ierr);
179   }
180 
181   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
182   ierr = PetscOptionsGetBool(prefix,"-pc_gasm_view_subdomains",&view_subdomains,NULL);CHKERRQ(ierr);
183 
184   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
185   if (iascii) {
186     /*
187      Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
188      the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
189      collectively on the comm.
190      */
191     ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
192     ierr = PetscViewerASCIIPrintf(viewer,"Generalized additive Schwarz:\n");CHKERRQ(ierr);
193     ierr = PetscViewerASCIIPrintf(viewer,"Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
194     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",overlap);CHKERRQ(ierr);
195     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",gsubdomains);CHKERRQ(ierr);
196     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",lsubdomains);CHKERRQ(ierr);
197     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",msubdomains);CHKERRQ(ierr);
198     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
199     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d:%d] number of locally-supported subdomains = %D\n",rank,size,osm->n);CHKERRQ(ierr);
200     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
201     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
202     /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
203     ierr = PetscViewerASCIIPrintf(viewer,"Subdomain solver info is as follows:\n");CHKERRQ(ierr);
204     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
205     ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
206     /* Make sure that everybody waits for the banner to be printed. */
207     ierr = MPI_Barrier(PetscObjectComm((PetscObject)viewer));CHKERRQ(ierr);
208     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
209     l = 0;
210     for (count = 0; count < gcount; ++count) {
211       PetscMPIInt srank, ssize;
212       if (l<osm->n) {
213         PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
214         if (numbering[d] == count) {
215           ierr = MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);CHKERRQ(ierr);
216           ierr = MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);CHKERRQ(ierr);
217           ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
218           ierr = ISGetLocalSize(osm->ois[d],&bsz);CHKERRQ(ierr);
219           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_TRUE);CHKERRQ(ierr);
220           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d:%d] (subcomm [%d:%d]) local subdomain number %D, local size = %D\n",rank,size,srank,ssize,d,bsz);CHKERRQ(ierr);
221           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
222           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_FALSE);CHKERRQ(ierr);
223           if (view_subdomains) {
224             ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
225           }
226           if (!pc->setupcalled) {
227             PetscViewerASCIIPrintf(sviewer, "Solver not set up yet: PCSetUp() not yet called\n");CHKERRQ(ierr);
228           } else {
229             ierr = KSPView(osm->ksp[d],sviewer);CHKERRQ(ierr);
230           }
231           ierr = PetscViewerASCIIPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
232           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
233           ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
234           ++l;
235         }
236       }
237       ierr = MPI_Barrier(PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
238     }
239     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
242   PetscFunctionReturn(0);
243 }
244 
245 
246 
247 
248 
249 #undef __FUNCT__
250 #define __FUNCT__ "PCSetUp_GASM"
251 static PetscErrorCode PCSetUp_GASM(PC pc)
252 {
253   PC_GASM        *osm = (PC_GASM*)pc->data;
254   PetscErrorCode ierr;
255   PetscBool      symset,flg;
256   PetscInt       i;
257   PetscMPIInt    rank, size;
258   MatReuse       scall = MAT_REUSE_MATRIX;
259   KSP            ksp;
260   PC             subpc;
261   const char     *prefix,*pprefix;
262   Vec            x,y;
263   PetscInt       oni;       /* Number of indices in the i-th local outer subdomain.               */
264   const PetscInt *oidxi;    /* Indices from the i-th subdomain local outer subdomain.             */
265   PetscInt       on;        /* Number of indices in the disjoint union of local outer subdomains. */
266   PetscInt       *oidx;     /* Indices in the disjoint union of local outer subdomains. */
267   IS             gois;      /* Disjoint union the global indices of outer subdomains.             */
268   IS             goid;      /* Identity IS of the size of the disjoint union of outer subdomains. */
269   PetscScalar    *gxarray, *gyarray;
270   PetscInt       gofirst;   /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy -- over the disjoint union of outer subdomains. */
271   DM             *subdomain_dm = NULL;
272 
273   PetscFunctionBegin;
274   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
275   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
276   if (!pc->setupcalled) {
277 
278     if (!osm->type_set) {
279       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
280       if (symset && flg) osm->type = PC_GASM_BASIC;
281     }
282 
283     /*
284      If subdomains have been set, then the local number of subdomains, osm->n, is NOT PETSC_DECIDE and is at least 1.
285      The total number of subdomains, osm->N is not necessarily set, might be PETSC_DECIDE, and then will have to be calculated from osm->n.
286      */
287     if (osm->n == PETSC_DECIDE) {
288       /* no subdomains given */
289       /* try pc->dm first, if allowed */
290       if (osm->dm_subdomains && pc->dm) {
291         PetscInt  num_subdomains, d;
292         char      **subdomain_names;
293         IS        *inner_subdomain_is, *outer_subdomain_is;
294         ierr = DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);CHKERRQ(ierr);
295         if (num_subdomains) {
296           ierr = PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);CHKERRQ(ierr);
297         }
298         for (d = 0; d < num_subdomains; ++d) {
299           if (subdomain_names)    {ierr = PetscFree(subdomain_names[d]);CHKERRQ(ierr);}
300           if (inner_subdomain_is) {ierr = ISDestroy(&inner_subdomain_is[d]);CHKERRQ(ierr);}
301           if (outer_subdomain_is) {ierr = ISDestroy(&outer_subdomain_is[d]);CHKERRQ(ierr);}
302           if (subdomain_dm)       {ierr = DMDestroy(&subdomain_dm[d]);CHKERRQ(ierr);}
303         }
304         ierr = PetscFree(subdomain_names);CHKERRQ(ierr);
305         ierr = PetscFree(inner_subdomain_is);CHKERRQ(ierr);
306         ierr = PetscFree(outer_subdomain_is);CHKERRQ(ierr);
307         ierr = PetscFree(subdomain_dm);CHKERRQ(ierr);
308       }
309       if (osm->n == PETSC_DECIDE) { /* still no subdomains; use one per processor */
310         osm->nmax = osm->n = 1;
311         ierr      = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
312         osm->N    = size;
313       }
314     }
315     if (!osm->iis) {
316       /*
317        The local number of subdomains was set just above, or in PCGASMSetTotalSubdomains(), or in PCGASMSetSubdomains(),
318        but the actual subdomains have not been supplied (in PCGASMSetSubdomains()).
319        We create the requisite number of inner subdomains on PETSC_COMM_SELF (for now).
320        */
321       ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->overlap,osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr);
322     }
323     if (osm->N == PETSC_DECIDE) {
324       struct {PetscInt max,sum;} inwork,outwork;
325       /* determine global number of subdomains and the max number of local subdomains */
326       inwork.max = osm->n;
327       inwork.sum = osm->n;
328       ierr       = MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
329       osm->nmax  = outwork.max;
330       osm->N     = outwork.sum;
331     }
332 
333     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
334     flg  = PETSC_FALSE;
335     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
336     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
337 
338     if (osm->sort_indices) {
339       for (i=0; i<osm->n; i++) {
340         ierr = ISSort(osm->ois[i]);CHKERRQ(ierr);
341         ierr = ISSort(osm->iis[i]);CHKERRQ(ierr);
342       }
343     }
344     /*
345      Merge the ISs, create merged vectors and restrictions.
346      */
347     /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
348     on = 0;
349     for (i=0; i<osm->n; i++) {
350       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
351       on  += oni;
352     }
353     ierr = PetscMalloc1(on, &oidx);CHKERRQ(ierr);
354     on   = 0;
355     for (i=0; i<osm->n; i++) {
356       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
357       ierr = ISGetIndices(osm->ois[i],&oidxi);CHKERRQ(ierr);
358       ierr = PetscMemcpy(oidx+on, oidxi, sizeof(PetscInt)*oni);CHKERRQ(ierr);
359       ierr = ISRestoreIndices(osm->ois[i], &oidxi);CHKERRQ(ierr);
360       on  += oni;
361     }
362     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois);CHKERRQ(ierr);
363     ierr = MatCreateVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
364     ierr = VecCreateMPI(PetscObjectComm((PetscObject)pc), on, PETSC_DECIDE, &osm->gx);CHKERRQ(ierr);
365     ierr = VecDuplicate(osm->gx,&osm->gy);CHKERRQ(ierr);
366     ierr = VecGetOwnershipRange(osm->gx, &gofirst, NULL);CHKERRQ(ierr);
367     ierr = ISCreateStride(PetscObjectComm((PetscObject)pc),on,gofirst,1, &goid);CHKERRQ(ierr);
368     ierr = VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));CHKERRQ(ierr);
369     ierr = VecDestroy(&x);CHKERRQ(ierr);
370     ierr = ISDestroy(&gois);CHKERRQ(ierr);
371 
372     /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
373     {
374       PetscInt ini;           /* Number of indices the i-th a local inner subdomain. */
375       PetscInt in;            /* Number of indices in the disjoint uniont of local inner subdomains. */
376       PetscInt *iidx;         /* Global indices in the merged local inner subdomain. */
377       PetscInt *ioidx;        /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
378       IS       giis;          /* IS for the disjoint union of inner subdomains. */
379       IS       giois;         /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
380       /**/
381       in = 0;
382       for (i=0; i<osm->n; i++) {
383         ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
384         in  += ini;
385       }
386       ierr = PetscMalloc1(in, &iidx);CHKERRQ(ierr);
387       ierr = PetscMalloc1(in, &ioidx);CHKERRQ(ierr);
388       ierr = VecGetOwnershipRange(osm->gx,&gofirst, NULL);CHKERRQ(ierr);
389       in   = 0;
390       on   = 0;
391       for (i=0; i<osm->n; i++) {
392         const PetscInt         *iidxi; /* Global indices of the i-th local inner subdomain. */
393         ISLocalToGlobalMapping ltogi; /* Map from global to local indices of the i-th outer local subdomain. */
394         PetscInt               *ioidxi; /* Local indices of the i-th local inner subdomain within the local outer subdomain. */
395         PetscInt               ioni;  /* Number of indices in ioidxi; if ioni != ini the inner subdomain is not a subdomain of the outer subdomain (error). */
396         PetscInt               k;
397         ierr   = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
398         ierr   = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
399         ierr   = ISGetIndices(osm->iis[i],&iidxi);CHKERRQ(ierr);
400         ierr   = PetscMemcpy(iidx+in, iidxi, sizeof(PetscInt)*ini);CHKERRQ(ierr);
401         ierr   = ISLocalToGlobalMappingCreateIS(osm->ois[i],&ltogi);CHKERRQ(ierr);
402         ioidxi = ioidx+in;
403         ierr   = ISGlobalToLocalMappingApply(ltogi,IS_GTOLM_DROP,ini,iidxi,&ioni,ioidxi);CHKERRQ(ierr);
404         ierr   = ISLocalToGlobalMappingDestroy(&ltogi);CHKERRQ(ierr);
405         ierr   = ISRestoreIndices(osm->iis[i], &iidxi);CHKERRQ(ierr);
406         if (ioni != ini) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inner subdomain %D contains %D indices outside of its outer subdomain", i, ini - ioni);
407         for (k = 0; k < ini; ++k) ioidxi[k] += gofirst+on;
408         in += ini;
409         on += oni;
410       }
411       ierr = ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, iidx,  PETSC_OWN_POINTER, &giis);CHKERRQ(ierr);
412       ierr = ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, ioidx, PETSC_OWN_POINTER, &giois);CHKERRQ(ierr);
413       ierr = VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);CHKERRQ(ierr);
414       ierr = VecDestroy(&y);CHKERRQ(ierr);
415       ierr = ISDestroy(&giis);CHKERRQ(ierr);
416       ierr = ISDestroy(&giois);CHKERRQ(ierr);
417     }
418     ierr = ISDestroy(&goid);CHKERRQ(ierr);
419 
420     /* Create the subdomain work vectors. */
421     ierr = PetscMalloc1(osm->n,&osm->x);CHKERRQ(ierr);
422     ierr = PetscMalloc1(osm->n,&osm->y);CHKERRQ(ierr);
423     ierr = VecGetArray(osm->gx, &gxarray);CHKERRQ(ierr);
424     ierr = VecGetArray(osm->gy, &gyarray);CHKERRQ(ierr);
425     for (i=0, on=0; i<osm->n; ++i, on += oni) {
426       PetscInt oNi;
427       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
428       ierr = ISGetSize(osm->ois[i],&oNi);CHKERRQ(ierr);
429       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);CHKERRQ(ierr);
430       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);CHKERRQ(ierr);
431     }
432     ierr = VecRestoreArray(osm->gx, &gxarray);CHKERRQ(ierr);
433     ierr = VecRestoreArray(osm->gy, &gyarray);CHKERRQ(ierr);
434     /* Create the local solvers */
435     ierr = PetscMalloc1(osm->n,&osm->ksp);CHKERRQ(ierr);
436     for (i=0; i<osm->n; i++) { /* KSPs are local */
437       ierr        = KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);CHKERRQ(ierr);
438       ierr        = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
439       ierr        = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
440       ierr        = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
441       ierr        = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
442       ierr        = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
443       ierr        = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
444       ierr        = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
445       osm->ksp[i] = ksp;
446     }
447     scall = MAT_INITIAL_MATRIX;
448 
449   } else { /*if (!pc->setupcalled)*/
450     /*
451        Destroy the blocks from the previous iteration
452     */
453     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
454       ierr  = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
455       scall = MAT_INITIAL_MATRIX;
456     }
457   }
458 
459   /*
460      Extract out the submatrices.
461   */
462   if (size > 1) {
463     ierr = MatGetSubMatricesParallel(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
464   } else {
465     ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
466   }
467   if (scall == MAT_INITIAL_MATRIX) {
468     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
469     for (i=0; i<osm->n; i++) {
470       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
471       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
472     }
473   }
474 
475   /* Return control to the user so that the submatrices can be modified (e.g., to apply
476      different boundary conditions for the submatrices than for the global problem) */
477   ierr = PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
478 
479   /*
480      Loop over submatrices putting them into local ksps
481   */
482   for (i=0; i<osm->n; i++) {
483     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
484     if (!pc->setupcalled) {
485       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
486     }
487   }
488   PetscFunctionReturn(0);
489 }
490 
491 #undef __FUNCT__
492 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
493 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
494 {
495   PC_GASM        *osm = (PC_GASM*)pc->data;
496   PetscErrorCode ierr;
497   PetscInt       i;
498 
499   PetscFunctionBegin;
500   for (i=0; i<osm->n; i++) {
501     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
502   }
503   PetscFunctionReturn(0);
504 }
505 
506 #undef __FUNCT__
507 #define __FUNCT__ "PCApply_GASM"
508 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
509 {
510   PC_GASM        *osm = (PC_GASM*)pc->data;
511   PetscErrorCode ierr;
512   PetscInt       i;
513   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
514 
515   PetscFunctionBegin;
516   /*
517      Support for limiting the restriction or interpolation only to the inner
518      subdomain values (leaving the other values 0).
519   */
520   if (!(osm->type & PC_GASM_RESTRICT)) {
521     /* have to zero the work RHS since scatter may leave some slots empty */
522     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
523     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
524   } else {
525     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
526   }
527   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
528   if (!(osm->type & PC_GASM_RESTRICT)) {
529     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
530   } else {
531     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
532   }
533   /* do the subdomain solves */
534   for (i=0; i<osm->n; ++i) {
535     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
536   }
537   ierr = VecZeroEntries(y);CHKERRQ(ierr);
538   if (!(osm->type & PC_GASM_INTERPOLATE)) {
539     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
540     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
541   } else {
542     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
543     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
544   }
545 }
546 
547 #undef __FUNCT__
548 #define __FUNCT__ "PCApplyTranspose_GASM"
549 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
550 {
551   PC_GASM        *osm = (PC_GASM*)pc->data;
552   PetscErrorCode ierr;
553   PetscInt       i;
554   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
555 
556   PetscFunctionBegin;
557   /*
558      Support for limiting the restriction or interpolation to only local
559      subdomain values (leaving the other values 0).
560 
561      Note: these are reversed from the PCApply_GASM() because we are applying the
562      transpose of the three terms
563   */
564   if (!(osm->type & PC_GASM_INTERPOLATE)) {
565     /* have to zero the work RHS since scatter may leave some slots empty */
566     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
567     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
568   } else {
569     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
570   }
571   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
572   if (!(osm->type & PC_GASM_INTERPOLATE)) {
573     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
574   } else {
575     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
576   }
577   /* do the local solves */
578   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
579     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
580   }
581   ierr = VecZeroEntries(y);CHKERRQ(ierr);
582   if (!(osm->type & PC_GASM_RESTRICT)) {
583     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
584     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
585   } else {
586     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
587     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
588   }
589   PetscFunctionReturn(0);
590 }
591 
592 #undef __FUNCT__
593 #define __FUNCT__ "PCReset_GASM"
594 static PetscErrorCode PCReset_GASM(PC pc)
595 {
596   PC_GASM        *osm = (PC_GASM*)pc->data;
597   PetscErrorCode ierr;
598   PetscInt       i;
599 
600   PetscFunctionBegin;
601   if (osm->ksp) {
602     for (i=0; i<osm->n; i++) {
603       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
604     }
605   }
606   if (osm->pmat) {
607     if (osm->n > 0) {
608       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
609     }
610   }
611   if (osm->x) {
612     for (i=0; i<osm->n; i++) {
613       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
614       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
615     }
616   }
617   ierr = VecDestroy(&osm->gx);CHKERRQ(ierr);
618   ierr = VecDestroy(&osm->gy);CHKERRQ(ierr);
619 
620   ierr     = VecScatterDestroy(&osm->gorestriction);CHKERRQ(ierr);
621   ierr     = VecScatterDestroy(&osm->girestriction);CHKERRQ(ierr);
622   ierr     = PCGASMDestroySubdomains(osm->n,osm->ois,osm->iis);CHKERRQ(ierr);
623   osm->ois = 0;
624   osm->iis = 0;
625   PetscFunctionReturn(0);
626 }
627 
628 #undef __FUNCT__
629 #define __FUNCT__ "PCDestroy_GASM"
630 static PetscErrorCode PCDestroy_GASM(PC pc)
631 {
632   PC_GASM        *osm = (PC_GASM*)pc->data;
633   PetscErrorCode ierr;
634   PetscInt       i;
635 
636   PetscFunctionBegin;
637   ierr = PCReset_GASM(pc);CHKERRQ(ierr);
638   if (osm->ksp) {
639     for (i=0; i<osm->n; i++) {
640       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
641     }
642     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
643   }
644   ierr = PetscFree(osm->x);CHKERRQ(ierr);
645   ierr = PetscFree(osm->y);CHKERRQ(ierr);
646   ierr = PetscFree(pc->data);CHKERRQ(ierr);
647   PetscFunctionReturn(0);
648 }
649 
650 #undef __FUNCT__
651 #define __FUNCT__ "PCSetFromOptions_GASM"
652 static PetscErrorCode PCSetFromOptions_GASM(PetscOptions *PetscOptionsObject,PC pc)
653 {
654   PC_GASM        *osm = (PC_GASM*)pc->data;
655   PetscErrorCode ierr;
656   PetscInt       blocks,ovl;
657   PetscBool      symset,flg;
658   PCGASMType     gasmtype;
659 
660   PetscFunctionBegin;
661   /* set the type to symmetric if matrix is symmetric */
662   if (!osm->type_set && pc->pmat) {
663     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
664     if (symset && flg) osm->type = PC_GASM_BASIC;
665   }
666   ierr = PetscOptionsHead(PetscOptionsObject,"Generalized additive Schwarz options");CHKERRQ(ierr);
667   ierr = PetscOptionsBool("-pc_gasm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCGASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
668   ierr = PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
669   if (flg) {
670     osm->create_local = PETSC_TRUE;
671     ierr = PetscOptionsBool("-pc_gasm_subdomains_create_local","Whether to make autocreated subdomains local (true by default)","PCGASMSetTotalSubdomains",osm->create_local,&osm->create_local,NULL);CHKERRQ(ierr);
672     ierr = PCGASMSetTotalSubdomains(pc,blocks,osm->create_local);CHKERRQ(ierr);
673     osm->dm_subdomains = PETSC_FALSE;
674   }
675   ierr = PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
676   if (flg) {
677     ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr);
678     osm->dm_subdomains = PETSC_FALSE;
679   }
680   flg  = PETSC_FALSE;
681   ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
682   if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr);}
683   ierr = PetscOptionsTail();CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 /*------------------------------------------------------------------------------------*/
688 
689 #undef __FUNCT__
690 #define __FUNCT__ "PCGASMSetSubdomains_GASM"
691 static PetscErrorCode  PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[])
692 {
693   PC_GASM        *osm = (PC_GASM*)pc->data;
694   PetscErrorCode ierr;
695   PetscInt       i;
696 
697   PetscFunctionBegin;
698   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, n = %D",n);
699   if (pc->setupcalled && (n != osm->n || iis || ois)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp().");
700 
701   if (!pc->setupcalled) {
702     osm->n   = n;
703     osm->ois = 0;
704     osm->iis = 0;
705     if (ois) {
706       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
707     }
708     if (iis) {
709       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);}
710     }
711     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
712     if (ois) {
713       ierr = PetscMalloc1(n,&osm->ois);CHKERRQ(ierr);
714       for (i=0; i<n; i++) osm->ois[i] = ois[i];
715       /* Flag indicating that the user has set outer subdomains, so PCGASM should not increase their size. */
716       osm->overlap = -1;
717       if (!iis) {
718         ierr = PetscMalloc1(n,&osm->iis);CHKERRQ(ierr);
719         for (i=0; i<n; i++) {
720           ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);
721           osm->iis[i] = ois[i];
722         }
723       }
724     }
725     if (iis) {
726       ierr = PetscMalloc1(n,&osm->iis);CHKERRQ(ierr);
727       for (i=0; i<n; i++) osm->iis[i] = iis[i];
728       if (!ois) {
729         ierr = PetscMalloc1(n,&osm->ois);CHKERRQ(ierr);
730         for (i=0; i<n; i++) {
731 	  ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);
732 	  osm->ois[i] = iis[i];
733         }
734         if (osm->overlap > 0) {
735           /* Extend the "overlapping" regions by a number of steps */
736           ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->ois,osm->overlap);CHKERRQ(ierr);
737         }
738       }
739     }
740   }
741   PetscFunctionReturn(0);
742 }
743 
744 #undef __FUNCT__
745 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
746 static PetscErrorCode  PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N, PetscBool create_local)
747 {
748   PC_GASM        *osm = (PC_GASM*)pc->data;
749   PetscErrorCode ierr;
750   PetscMPIInt    rank,size;
751   PetscInt       n;
752   PetscInt       Nmin, Nmax;
753 
754   PetscFunctionBegin;
755   if (!create_local) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "No suppor for autocreation of nonlocal subdomains.");
756   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be > 0, N = %D",N);
757   ierr = MPI_Allreduce(&N,&Nmin,1,MPIU_INT,MPI_MIN,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
758   ierr = MPI_Allreduce(&N,&Nmax,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
759   if (Nmin != Nmax) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "All processors must use the same number of subdomains.  min(N) = %D != %D = max(N)", Nmin, Nmax);
760 
761   osm->create_local = create_local;
762   /*
763      Split the subdomains equally among all processors
764   */
765   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
766   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
767   n    = N/size + ((N % size) > rank);
768   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one subdomain: total processors %d total blocks %D",(int)rank,(int)size,N);
769   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
770   if (!pc->setupcalled) {
771     ierr      = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
772     osm->N    = N;
773     osm->n    = n;
774     osm->nmax = N/size + ((N%size) ? 1 : 0);
775     osm->ois  = 0;
776     osm->iis  = 0;
777   }
778   PetscFunctionReturn(0);
779 }
780 
781 #undef __FUNCT__
782 #define __FUNCT__ "PCGASMSetOverlap_GASM"
783 static PetscErrorCode  PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
784 {
785   PC_GASM *osm = (PC_GASM*)pc->data;
786 
787   PetscFunctionBegin;
788   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
789   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
790   if (!pc->setupcalled) osm->overlap = ovl;
791   PetscFunctionReturn(0);
792 }
793 
794 #undef __FUNCT__
795 #define __FUNCT__ "PCGASMSetType_GASM"
796 static PetscErrorCode  PCGASMSetType_GASM(PC pc,PCGASMType type)
797 {
798   PC_GASM *osm = (PC_GASM*)pc->data;
799 
800   PetscFunctionBegin;
801   osm->type     = type;
802   osm->type_set = PETSC_TRUE;
803   PetscFunctionReturn(0);
804 }
805 
806 #undef __FUNCT__
807 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
808 static PetscErrorCode  PCGASMSetSortIndices_GASM(PC pc,PetscBool doSort)
809 {
810   PC_GASM *osm = (PC_GASM*)pc->data;
811 
812   PetscFunctionBegin;
813   osm->sort_indices = doSort;
814   PetscFunctionReturn(0);
815 }
816 
817 #undef __FUNCT__
818 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
819 /*
820    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
821         In particular, it would upset the global subdomain number calculation.
822 */
823 static PetscErrorCode  PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
824 {
825   PC_GASM        *osm = (PC_GASM*)pc->data;
826   PetscErrorCode ierr;
827 
828   PetscFunctionBegin;
829   if (osm->n < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
830 
831   if (n) *n = osm->n;
832   if (first) {
833     ierr    = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
834     *first -= osm->n;
835   }
836   if (ksp) {
837     /* Assume that local solves are now different; not necessarily
838        true, though!  This flag is used only for PCView_GASM() */
839     *ksp                        = osm->ksp;
840     osm->same_subdomain_solvers = PETSC_FALSE;
841   }
842   PetscFunctionReturn(0);
843 } /* PCGASMGetSubKSP_GASM() */
844 
845 #undef __FUNCT__
846 #define __FUNCT__ "PCGASMSetSubdomains"
847 /*@C
848     PCGASMSetSubdomains - Sets the subdomains for this processor
849     for the additive Schwarz preconditioner.
850 
851     Collective on PC
852 
853     Input Parameters:
854 +   pc  - the preconditioner context
855 .   n   - the number of subdomains for this processor
856 .   iis - the index sets that define this processor's local inner subdomains
857          (or NULL for PETSc to determine subdomains)
858 -   ois- the index sets that define this processor's local outer subdomains
859          (or NULL to use the same as iis)
860 
861     Notes:
862     The IS indices use the parallel, global numbering of the vector entries.
863     Inner subdomains are those where the correction is applied.
864     Outer subdomains are those where the residual necessary to obtain the
865     corrections is obtained (see PCGASMType for the use of inner/outer subdomains).
866     Both inner and outer subdomains can extend over several processors.
867     This processor's portion of a subdomain is known as a local subdomain.
868 
869     By default the GASM preconditioner uses 1 (local) subdomain per processor.
870     Use PCGASMSetTotalSubdomains() to set the total number of subdomains across
871     all processors that PCGASM will create automatically, and to specify whether
872     they should be local or not.
873 
874 
875     Level: advanced
876 
877 .keywords: PC, GASM, set, subdomains, additive Schwarz
878 
879 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
880           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
881 @*/
882 PetscErrorCode  PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[])
883 {
884   PetscErrorCode ierr;
885 
886   PetscFunctionBegin;
887   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
888   ierr = PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "PCGASMSetTotalSubdomains"
894 /*@C
895     PCGASMSetTotalSubdomains - Sets the total number of subdomains to use in the generalized additive
896     Schwarz preconditioner.  The number of subdomains is cumulative across all processors in pc's
897     communicator. Either all or no processors in the PC communicator must call this routine with
898     the same N.  The subdomains will be created automatically during PCSetUp().
899 
900     Collective on PC
901 
902     Input Parameters:
903 +   pc           - the preconditioner context
904 .   N            - the total number of subdomains cumulative across all processors
905 -   create_local - whether the subdomains to be created are to be local
906 
907     Options Database Key:
908     To set the total number of subdomains and let PCGASM autocreate them, rather than specify the index sets, use the following options:
909 +    -pc_gasm_total_subdomains <n>                  - sets the total number of subdomains to be autocreated by PCGASM
910 -    -pc_gasm_subdomains_create_local <true|false>  - whether autocreated subdomains should be local or not (default is true)
911 
912     By default the GASM preconditioner uses 1 subdomain per processor.
913 
914 
915     Use PCGASMSetSubdomains() to set subdomains explicitly or to set different numbers
916     of subdomains per processor.
917 
918     Level: advanced
919 
920 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
921 
922 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
923           PCGASMCreateSubdomains2D()
924 @*/
925 PetscErrorCode  PCGASMSetTotalSubdomains(PC pc,PetscInt N, PetscBool create_local)
926 {
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
931   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt,PetscBool),(pc,N,create_local));CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 #undef __FUNCT__
936 #define __FUNCT__ "PCGASMSetOverlap"
937 /*@
938     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
939     additive Schwarz preconditioner.  Either all or no processors in the
940     PC communicator must call this routine.
941 
942     Logically Collective on PC
943 
944     Input Parameters:
945 +   pc  - the preconditioner context
946 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
947 
948     Options Database Key:
949 .   -pc_gasm_overlap <overlap> - Sets overlap
950 
951     Notes:
952     By default the GASM preconditioner uses 1 subdomain per processor.  To use
953     multiple subdomain per perocessor, see PCGASMSetTotalSubdomains() or
954     PCGASMSetSubdomains() (and the option -pc_gasm_total_subdomains <n>).
955 
956     The overlap defaults to 1, so if one desires that no additional
957     overlap be computed beyond what may have been set with a call to
958     PCGASMSetTotalSubdomains() or PCGASMSetSubdomains(), then ovl
959     must be set to be 0.  In particular, if one does not explicitly set
960     the subdomains in application code, then all overlap would be computed
961     internally by PETSc, and using an overlap of 0 would result in an GASM
962     variant that is equivalent to the block Jacobi preconditioner.
963 
964     Note that one can define initial index sets with any overlap via
965     PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows
966     PETSc to extend that overlap further, if desired.
967 
968     Level: intermediate
969 
970 .keywords: PC, GASM, set, overlap
971 
972 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
973           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
974 @*/
975 PetscErrorCode  PCGASMSetOverlap(PC pc,PetscInt ovl)
976 {
977   PetscErrorCode ierr;
978 
979   PetscFunctionBegin;
980   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
981   PetscValidLogicalCollectiveInt(pc,ovl,2);
982   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
983   PetscFunctionReturn(0);
984 }
985 
986 #undef __FUNCT__
987 #define __FUNCT__ "PCGASMSetType"
988 /*@
989     PCGASMSetType - Sets the type of restriction and interpolation used
990     for local problems in the additive Schwarz method.
991 
992     Logically Collective on PC
993 
994     Input Parameters:
995 +   pc  - the preconditioner context
996 -   type - variant of GASM, one of
997 .vb
998       PC_GASM_BASIC       - full interpolation and restriction
999       PC_GASM_RESTRICT    - full restriction, local processor interpolation
1000       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
1001       PC_GASM_NONE        - local processor restriction and interpolation
1002 .ve
1003 
1004     Options Database Key:
1005 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1006 
1007     Level: intermediate
1008 
1009 .keywords: PC, GASM, set, type
1010 
1011 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1012           PCGASMCreateSubdomains2D()
1013 @*/
1014 PetscErrorCode  PCGASMSetType(PC pc,PCGASMType type)
1015 {
1016   PetscErrorCode ierr;
1017 
1018   PetscFunctionBegin;
1019   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1020   PetscValidLogicalCollectiveEnum(pc,type,2);
1021   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 #undef __FUNCT__
1026 #define __FUNCT__ "PCGASMSetSortIndices"
1027 /*@
1028     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
1029 
1030     Logically Collective on PC
1031 
1032     Input Parameters:
1033 +   pc  - the preconditioner context
1034 -   doSort - sort the subdomain indices
1035 
1036     Level: intermediate
1037 
1038 .keywords: PC, GASM, set, type
1039 
1040 .seealso: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
1041           PCGASMCreateSubdomains2D()
1042 @*/
1043 PetscErrorCode  PCGASMSetSortIndices(PC pc,PetscBool doSort)
1044 {
1045   PetscErrorCode ierr;
1046 
1047   PetscFunctionBegin;
1048   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1049   PetscValidLogicalCollectiveBool(pc,doSort,2);
1050   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 #undef __FUNCT__
1055 #define __FUNCT__ "PCGASMGetSubKSP"
1056 /*@C
1057    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
1058    this processor.
1059 
1060    Collective on PC iff first_local is requested
1061 
1062    Input Parameter:
1063 .  pc - the preconditioner context
1064 
1065    Output Parameters:
1066 +  n_local - the number of blocks on this processor or NULL
1067 .  first_local - the global number of the first block on this processor or NULL,
1068                  all processors must request or all must pass NULL
1069 -  ksp - the array of KSP contexts
1070 
1071    Note:
1072    After PCGASMGetSubKSP() the array of KSPes is not to be freed
1073 
1074    Currently for some matrix implementations only 1 block per processor
1075    is supported.
1076 
1077    You must call KSPSetUp() before calling PCGASMGetSubKSP().
1078 
1079    Level: advanced
1080 
1081 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
1082 
1083 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap(),
1084           PCGASMCreateSubdomains2D(),
1085 @*/
1086 PetscErrorCode  PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1087 {
1088   PetscErrorCode ierr;
1089 
1090   PetscFunctionBegin;
1091   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1092   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1093   PetscFunctionReturn(0);
1094 }
1095 
1096 /* -------------------------------------------------------------------------------------*/
1097 /*MC
1098    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1099            its own KSP object.
1100 
1101    Options Database Keys:
1102 +  -pc_gasm_total_block_count <n> - Sets total number of local subdomains (known as blocks) to be distributed among processors
1103 .  -pc_gasm_view_subdomains       - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view
1104 .  -pc_gasm_print_subdomains      - activates the printing of subdomain indices in PCSetUp()
1105 .  -pc_gasm_overlap <ovl>         - Sets overlap by which to (automatically) extend local subdomains
1106 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1107 
1108      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1109       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
1110       -pc_gasm_type basic to use the standard GASM.
1111 
1112    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1113      than one processor. Defaults to one block per processor.
1114 
1115      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1116         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1117 
1118      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
1119          and set the options directly on the resulting KSP object (you can access its PC
1120          with KSPGetPC())
1121 
1122 
1123    Level: beginner
1124 
1125    Concepts: additive Schwarz method
1126 
1127     References:
1128     An additive variant of the Schwarz alternating method for the case of many subregions
1129     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1130 
1131     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1132     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1133 
1134 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1135            PCBJACOBI,  PCGASMGetSubKSP(), PCGASMSetSubdomains(),
1136            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
1137 
1138 M*/
1139 
1140 #undef __FUNCT__
1141 #define __FUNCT__ "PCCreate_GASM"
1142 PETSC_EXTERN PetscErrorCode PCCreate_GASM(PC pc)
1143 {
1144   PetscErrorCode ierr;
1145   PC_GASM        *osm;
1146 
1147   PetscFunctionBegin;
1148   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1149 
1150   osm->N                      = PETSC_DECIDE;
1151   osm->n                      = PETSC_DECIDE;
1152   osm->nmax                   = 0;
1153   osm->overlap                = 1;
1154   osm->ksp                    = 0;
1155   osm->gorestriction          = 0;
1156   osm->girestriction          = 0;
1157   osm->gx                     = 0;
1158   osm->gy                     = 0;
1159   osm->x                      = 0;
1160   osm->y                      = 0;
1161   osm->ois                    = 0;
1162   osm->iis                    = 0;
1163   osm->pmat                   = 0;
1164   osm->type                   = PC_GASM_RESTRICT;
1165   osm->same_subdomain_solvers = PETSC_TRUE;
1166   osm->sort_indices           = PETSC_TRUE;
1167   osm->dm_subdomains          = PETSC_FALSE;
1168 
1169   pc->data                 = (void*)osm;
1170   pc->ops->apply           = PCApply_GASM;
1171   pc->ops->applytranspose  = PCApplyTranspose_GASM;
1172   pc->ops->setup           = PCSetUp_GASM;
1173   pc->ops->reset           = PCReset_GASM;
1174   pc->ops->destroy         = PCDestroy_GASM;
1175   pc->ops->setfromoptions  = PCSetFromOptions_GASM;
1176   pc->ops->setuponblocks   = PCSetUpOnBlocks_GASM;
1177   pc->ops->view            = PCView_GASM;
1178   pc->ops->applyrichardson = 0;
1179 
1180   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSubdomains_C",PCGASMSetSubdomains_GASM);CHKERRQ(ierr);
1181   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetTotalSubdomains_C",PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1182   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetOverlap_C",PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1183   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetType_C",PCGASMSetType_GASM);CHKERRQ(ierr);
1184   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSortIndices_C",PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1185   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMGetSubKSP_C",PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1186   PetscFunctionReturn(0);
1187 }
1188 
1189 
1190 #undef __FUNCT__
1191 #define __FUNCT__ "PCGASMCreateLocalSubdomains"
1192 /*@C
1193    PCGASMCreateLocalSubdomains - Creates n local index sets for the overlapping
1194    Schwarz preconditioner for a any problem based on its matrix.
1195 
1196    Collective
1197 
1198    Input Parameters:
1199 +  A       - The global matrix operator
1200 .  overlap - amount of overlap in outer subdomains
1201 -  n       - the number of local subdomains
1202 
1203    Output Parameters:
1204 +  iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1205 -  ois - the array of index sets defining the local outer subdomains (on which the residual is computed)
1206 
1207    Level: advanced
1208 
1209    Note: this generates n nonoverlapping local inner subdomains on PETSC_COMM_SELF;
1210          PCGASM will generate the overlap from these if you use them in PCGASMSetSubdomains() and set a
1211          nonzero overlap with PCGASMSetOverlap()
1212 
1213     In the Fortran version you must provide the array outis[] already allocated of length n.
1214 
1215 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1216 
1217 .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains()
1218 @*/
1219 PetscErrorCode  PCGASMCreateLocalSubdomains(Mat A, PetscInt overlap, PetscInt n, IS *iis[], IS *ois[])
1220 {
1221   MatPartitioning mpart;
1222   const char      *prefix;
1223   PetscErrorCode  (*f)(Mat,MatReuse,Mat*);
1224   PetscMPIInt     size;
1225   PetscInt        i,j,rstart,rend,bs;
1226   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1227   Mat             Ad     = NULL, adj;
1228   IS              ispart,isnumb,*is;
1229   PetscErrorCode  ierr;
1230 
1231   PetscFunctionBegin;
1232   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1233   PetscValidPointer(iis,4);
1234   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1235 
1236   /* Get prefix, row distribution, and block size */
1237   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1238   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1239   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1240   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1241 
1242   /* Get diagonal block from matrix if possible */
1243   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1244   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
1245   if (f) {
1246     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1247   } else if (size == 1) {
1248     Ad = A;
1249   }
1250   if (Ad) {
1251     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1252     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1253   }
1254   if (Ad && n > 1) {
1255     PetscBool  match,done;
1256     /* Try to setup a good matrix partitioning if available */
1257     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1258     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1259     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1260     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1261     if (!match) {
1262       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1263     }
1264     if (!match) { /* assume a "good" partitioner is available */
1265       PetscInt       na;
1266       const PetscInt *ia,*ja;
1267       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1268       if (done) {
1269         /* Build adjacency matrix by hand. Unfortunately a call to
1270            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1271            remove the block-aij structure and we cannot expect
1272            MatPartitioning to split vertices as we need */
1273         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1274         const PetscInt *row;
1275         nnz = 0;
1276         for (i=0; i<na; i++) { /* count number of nonzeros */
1277           len = ia[i+1] - ia[i];
1278           row = ja + ia[i];
1279           for (j=0; j<len; j++) {
1280             if (row[j] == i) { /* don't count diagonal */
1281               len--; break;
1282             }
1283           }
1284           nnz += len;
1285         }
1286         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1287         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1288         nnz    = 0;
1289         iia[0] = 0;
1290         for (i=0; i<na; i++) { /* fill adjacency */
1291           cnt = 0;
1292           len = ia[i+1] - ia[i];
1293           row = ja + ia[i];
1294           for (j=0; j<len; j++) {
1295             if (row[j] != i) jja[nnz+cnt++] = row[j]; /* if not diagonal */
1296           }
1297           nnz += cnt;
1298           iia[i+1] = nnz;
1299         }
1300         /* Partitioning of the adjacency matrix */
1301         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1302         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1303         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1304         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1305         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1306         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1307         foundpart = PETSC_TRUE;
1308       }
1309       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1310     }
1311     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1312   }
1313   ierr = PetscMalloc1(n,&is);CHKERRQ(ierr);
1314   if (!foundpart) {
1315 
1316     /* Partitioning by contiguous chunks of rows */
1317 
1318     PetscInt mbs   = (rend-rstart)/bs;
1319     PetscInt start = rstart;
1320     for (i=0; i<n; i++) {
1321       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1322       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1323       start += count;
1324     }
1325 
1326   } else {
1327 
1328     /* Partitioning by adjacency of diagonal block  */
1329 
1330     const PetscInt *numbering;
1331     PetscInt       *count,nidx,*indices,*newidx,start=0;
1332     /* Get node count in each partition */
1333     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1334     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1335     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1336       for (i=0; i<n; i++) count[i] *= bs;
1337     }
1338     /* Build indices from node numbering */
1339     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1340     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1341     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1342     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1343     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1344     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1345     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1346       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1347       for (i=0; i<nidx; i++) {
1348         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1349       }
1350       ierr    = PetscFree(indices);CHKERRQ(ierr);
1351       nidx   *= bs;
1352       indices = newidx;
1353     }
1354     /* Shift to get global indices */
1355     for (i=0; i<nidx; i++) indices[i] += rstart;
1356 
1357     /* Build the index sets for each block */
1358     for (i=0; i<n; i++) {
1359       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1360       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1361       start += count[i];
1362     }
1363 
1364     ierr = PetscFree(count);CHKERRQ(ierr);
1365     ierr = PetscFree(indices);CHKERRQ(ierr);
1366     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1367     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1368   }
1369   *iis = is;
1370   if (!ois) PetscFunctionReturn(0);
1371   /*
1372    Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
1373    has been requested, copy the inner subdomains over so they can be modified.
1374    */
1375   ierr = PetscMalloc1(n,ois);CHKERRQ(ierr);
1376   for (i=0; i<n; ++i) {
1377     if (overlap > 0) { /* With positive overlap, (*iis)[i] will be modified */
1378       ierr = ISDuplicate((*iis)[i],(*ois)+i);CHKERRQ(ierr);
1379       ierr = ISCopy((*iis)[i],(*ois)[i]);CHKERRQ(ierr);
1380     } else {
1381       ierr      = PetscObjectReference((PetscObject)(*iis)[i]);CHKERRQ(ierr);
1382       (*ois)[i] = (*iis)[i];
1383     }
1384   }
1385   if (overlap > 0) {
1386     /* Extend the "overlapping" regions by a number of steps */
1387     ierr = MatIncreaseOverlap(A,n,*ois,overlap);CHKERRQ(ierr);
1388   }
1389   PetscFunctionReturn(0);
1390 }
1391 
1392 #undef __FUNCT__
1393 #define __FUNCT__ "PCGASMDestroySubdomains"
1394 /*@C
1395    PCGASMDestroySubdomains - Destroys the index sets created with
1396    PCGASMCreateLocalSubdomains() or PCGASMCreateSubdomains2D. Should be
1397    called after setting subdomains with PCGASMSetSubdomains().
1398 
1399    Collective
1400 
1401    Input Parameters:
1402 +  n   - the number of index sets
1403 .  iis - the array of inner subdomains,
1404 -  ois - the array of outer subdomains, can be NULL
1405 
1406    Level: intermediate
1407 
1408    Notes: this is merely a convenience subroutine that walks each list,
1409    destroys each IS on the list, and then frees the list.
1410 
1411 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1412 
1413 .seealso: PCGASMCreateLocalSubdomains(), PCGASMSetSubdomains()
1414 @*/
1415 PetscErrorCode  PCGASMDestroySubdomains(PetscInt n, IS iis[], IS ois[])
1416 {
1417   PetscInt       i;
1418   PetscErrorCode ierr;
1419 
1420   PetscFunctionBegin;
1421   if (n <= 0) PetscFunctionReturn(0);
1422   if (iis) {
1423     PetscValidPointer(iis,2);
1424     for (i=0; i<n; i++) {
1425       ierr = ISDestroy(&iis[i]);CHKERRQ(ierr);
1426     }
1427     ierr = PetscFree(iis);CHKERRQ(ierr);
1428   }
1429   if (ois) {
1430     for (i=0; i<n; i++) {
1431       ierr = ISDestroy(&ois[i]);CHKERRQ(ierr);
1432     }
1433     ierr = PetscFree(ois);CHKERRQ(ierr);
1434   }
1435   PetscFunctionReturn(0);
1436 }
1437 
1438 
1439 #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \
1440   {                                                                                                       \
1441     PetscInt first_row = first/M, last_row = last/M+1;                                                     \
1442     /*                                                                                                    \
1443      Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners       \
1444      of the bounding box of the intersection of the subdomain with the local ownership range (local       \
1445      subdomain).                                                                                          \
1446      Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows       \
1447      of the intersection.                                                                                 \
1448     */                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             \
1449     /* ylow_loc is the grid row containing the first element of the local sumbdomain */                   \
1450     *ylow_loc = PetscMax(first_row,ylow);                                                                    \
1451     /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1452     *xleft_loc = *ylow_loc==first_row ? PetscMax(first%M,xleft) : xleft;                                                                            \
1453     /* yhigh_loc is the grid row above the last local subdomain element */                                                                    \
1454     *yhigh_loc = PetscMin(last_row,yhigh);                                                                                                     \
1455     /* xright is the offset of the end of the  local subdomain within its grid row (might actually be outside the local subdomain) */         \
1456     *xright_loc = *yhigh_loc==last_row ? PetscMin(xright,last%M) : xright;                                                                          \
1457     /* Now compute the size of the local subdomain n. */ \
1458     *n = 0;                                               \
1459     if (*ylow_loc < *yhigh_loc) {                           \
1460       PetscInt width = xright-xleft;                     \
1461       *n += width*(*yhigh_loc-*ylow_loc-1);                 \
1462       *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \
1463       *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \
1464     } \
1465   }
1466 
1467 
1468 
1469 #undef __FUNCT__
1470 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1471 /*@
1472    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1473    preconditioner for a two-dimensional problem on a regular grid.
1474 
1475    Collective
1476 
1477    Input Parameters:
1478 +  M, N               - the global number of grid points in the x and y directions
1479 .  Mdomains, Ndomains - the global number of subdomains in the x and y directions
1480 .  dof                - degrees of freedom per node
1481 -  overlap            - overlap in mesh lines
1482 
1483    Output Parameters:
1484 +  Nsub - the number of local subdomains created
1485 .  iis  - array of index sets defining inner (nonoverlapping) subdomains
1486 -  ois  - array of index sets defining outer (overlapping, if overlap > 0) subdomains
1487 
1488 
1489    Level: advanced
1490 
1491 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1492 
1493 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1494           PCGASMSetOverlap()
1495 @*/
1496 PetscErrorCode  PCGASMCreateSubdomains2D(PC pc, PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap, PetscInt *nsub,IS **iis,IS **ois)
1497 {
1498   PetscErrorCode ierr;
1499   PetscMPIInt    size, rank;
1500   PetscInt       i, j;
1501   PetscInt       maxheight, maxwidth;
1502   PetscInt       xstart, xleft, xright, xleft_loc, xright_loc;
1503   PetscInt       ystart, ylow,  yhigh,  ylow_loc,  yhigh_loc;
1504   PetscInt       x[2][2], y[2][2], n[2];
1505   PetscInt       first, last;
1506   PetscInt       nidx, *idx;
1507   PetscInt       ii,jj,s,q,d;
1508   PetscInt       k,kk;
1509   PetscMPIInt    color;
1510   MPI_Comm       comm, subcomm;
1511   IS             **xis = 0, **is = ois, **is_local = iis;
1512 
1513   PetscFunctionBegin;
1514   ierr = PetscObjectGetComm((PetscObject)pc, &comm);CHKERRQ(ierr);
1515   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1516   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1517   ierr = MatGetOwnershipRange(pc->pmat, &first, &last);CHKERRQ(ierr);
1518   if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) "
1519                                       "does not respect the number of degrees of freedom per grid point %D", first, last, dof);
1520 
1521   /* Determine the number of domains with nonzero intersections with the local ownership range. */
1522   s      = 0;
1523   ystart = 0;
1524   for (j=0; j<Ndomains; ++j) {
1525     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1526     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1527     /* Vertical domain limits with an overlap. */
1528     ylow   = PetscMax(ystart - overlap,0);
1529     yhigh  = PetscMin(ystart + maxheight + overlap,N);
1530     xstart = 0;
1531     for (i=0; i<Mdomains; ++i) {
1532       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1533       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1534       /* Horizontal domain limits with an overlap. */
1535       xleft  = PetscMax(xstart - overlap,0);
1536       xright = PetscMin(xstart + maxwidth + overlap,M);
1537       /*
1538          Determine whether this subdomain intersects this processor's ownership range of pc->pmat.
1539       */
1540       PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1541       if (nidx) ++s;
1542       xstart += maxwidth;
1543     } /* for (i = 0; i < Mdomains; ++i) */
1544     ystart += maxheight;
1545   } /* for (j = 0; j < Ndomains; ++j) */
1546 
1547   /* Now we can allocate the necessary number of ISs. */
1548   *nsub  = s;
1549   ierr   = PetscMalloc1(*nsub,is);CHKERRQ(ierr);
1550   ierr   = PetscMalloc1(*nsub,is_local);CHKERRQ(ierr);
1551   s      = 0;
1552   ystart = 0;
1553   for (j=0; j<Ndomains; ++j) {
1554     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1555     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1556     /* Vertical domain limits with an overlap. */
1557     y[0][0] = PetscMax(ystart - overlap,0);
1558     y[0][1] = PetscMin(ystart + maxheight + overlap,N);
1559     /* Vertical domain limits without an overlap. */
1560     y[1][0] = ystart;
1561     y[1][1] = PetscMin(ystart + maxheight,N);
1562     xstart  = 0;
1563     for (i=0; i<Mdomains; ++i) {
1564       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1565       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1566       /* Horizontal domain limits with an overlap. */
1567       x[0][0] = PetscMax(xstart - overlap,0);
1568       x[0][1] = PetscMin(xstart + maxwidth + overlap,M);
1569       /* Horizontal domain limits without an overlap. */
1570       x[1][0] = xstart;
1571       x[1][1] = PetscMin(xstart+maxwidth,M);
1572       /*
1573          Determine whether this domain intersects this processor's ownership range of pc->pmat.
1574          Do this twice: first for the domains with overlaps, and once without.
1575          During the first pass create the subcommunicators, and use them on the second pass as well.
1576       */
1577       for (q = 0; q < 2; ++q) {
1578         /*
1579           domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1580           according to whether the domain with an overlap or without is considered.
1581         */
1582         xleft = x[q][0]; xright = x[q][1];
1583         ylow  = y[q][0]; yhigh  = y[q][1];
1584         PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1585         nidx *= dof;
1586         n[q]  = nidx;
1587         /*
1588          Based on the counted number of indices in the local domain *with an overlap*,
1589          construct a subcommunicator of all the processors supporting this domain.
1590          Observe that a domain with an overlap might have nontrivial local support,
1591          while the domain without an overlap might not.  Hence, the decision to participate
1592          in the subcommunicator must be based on the domain with an overlap.
1593          */
1594         if (q == 0) {
1595           if (nidx) color = 1;
1596           else color = MPI_UNDEFINED;
1597 
1598           ierr = MPI_Comm_split(comm, color, rank, &subcomm);CHKERRQ(ierr);
1599         }
1600         /*
1601          Proceed only if the number of local indices *with an overlap* is nonzero.
1602          */
1603         if (n[0]) {
1604           if (q == 0) xis = is;
1605           if (q == 1) {
1606             /*
1607              The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1608              Moreover, if the overlap is zero, the two ISs are identical.
1609              */
1610             if (overlap == 0) {
1611               (*is_local)[s] = (*is)[s];
1612               ierr           = PetscObjectReference((PetscObject)(*is)[s]);CHKERRQ(ierr);
1613               continue;
1614             } else {
1615               xis     = is_local;
1616               subcomm = ((PetscObject)(*is)[s])->comm;
1617             }
1618           } /* if (q == 1) */
1619           idx  = NULL;
1620           ierr = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1621           if (nidx) {
1622             k = 0;
1623             for (jj=ylow_loc; jj<yhigh_loc; ++jj) {
1624               PetscInt x0 = (jj==ylow_loc) ? xleft_loc : xleft;
1625               PetscInt x1 = (jj==yhigh_loc-1) ? xright_loc : xright;
1626               kk = dof*(M*jj + x0);
1627               for (ii=x0; ii<x1; ++ii) {
1628                 for (d = 0; d < dof; ++d) {
1629                   idx[k++] = kk++;
1630                 }
1631               }
1632             }
1633           }
1634           ierr = ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);CHKERRQ(ierr);
1635         }/* if (n[0]) */
1636       }/* for (q = 0; q < 2; ++q) */
1637       if (n[0]) ++s;
1638       xstart += maxwidth;
1639     } /* for (i = 0; i < Mdomains; ++i) */
1640     ystart += maxheight;
1641   } /* for (j = 0; j < Ndomains; ++j) */
1642   PetscFunctionReturn(0);
1643 }
1644 
1645 #undef __FUNCT__
1646 #define __FUNCT__ "PCGASMGetSubdomains"
1647 /*@C
1648     PCGASMGetSubdomains - Gets the subdomains supported on this processor
1649     for the additive Schwarz preconditioner.
1650 
1651     Not Collective
1652 
1653     Input Parameter:
1654 .   pc - the preconditioner context
1655 
1656     Output Parameters:
1657 +   n   - the number of subdomains for this processor (default value = 1)
1658 .   iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be NULL)
1659 -   ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be NULL)
1660 
1661 
1662     Notes:
1663     The user is responsible for destroying the ISs and freeing the returned arrays.
1664     The IS numbering is in the parallel, global numbering of the vector.
1665 
1666     Level: advanced
1667 
1668 .keywords: PC, GASM, get, subdomains, additive Schwarz
1669 
1670 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1671           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubmatrices()
1672 @*/
1673 PetscErrorCode  PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[])
1674 {
1675   PC_GASM        *osm;
1676   PetscErrorCode ierr;
1677   PetscBool      match;
1678   PetscInt       i;
1679 
1680   PetscFunctionBegin;
1681   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1682   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1683   if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1684   osm = (PC_GASM*)pc->data;
1685   if (n) *n = osm->n;
1686   if (iis) {
1687     ierr = PetscMalloc1(osm->n, iis);CHKERRQ(ierr);
1688   }
1689   if (ois) {
1690     ierr = PetscMalloc1(osm->n, ois);CHKERRQ(ierr);
1691   }
1692   if (iis || ois) {
1693     for (i = 0; i < osm->n; ++i) {
1694       if (iis) (*iis)[i] = osm->iis[i];
1695       if (ois) (*ois)[i] = osm->ois[i];
1696     }
1697   }
1698   PetscFunctionReturn(0);
1699 }
1700 
1701 #undef __FUNCT__
1702 #define __FUNCT__ "PCGASMGetSubmatrices"
1703 /*@C
1704     PCGASMGetSubmatrices - Gets the local submatrices (for this processor
1705     only) for the additive Schwarz preconditioner.
1706 
1707     Not Collective
1708 
1709     Input Parameter:
1710 .   pc - the preconditioner context
1711 
1712     Output Parameters:
1713 +   n   - the number of matrices for this processor (default value = 1)
1714 -   mat - the matrices
1715 
1716     Notes: matrices returned by this routine have the same communicators as the index sets (IS)
1717            used to define subdomains in PCGASMSetSubdomains(), or PETSC_COMM_SELF, if the
1718            subdomains were defined using PCGASMSetTotalSubdomains().
1719     Level: advanced
1720 
1721 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1722 
1723 .seealso: PCGASMSetTotalSubdomain(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1724           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains()
1725 @*/
1726 PetscErrorCode  PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1727 {
1728   PC_GASM        *osm;
1729   PetscErrorCode ierr;
1730   PetscBool      match;
1731 
1732   PetscFunctionBegin;
1733   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1734   PetscValidIntPointer(n,2);
1735   if (mat) PetscValidPointer(mat,3);
1736   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1737   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1738   if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1739   osm = (PC_GASM*)pc->data;
1740   if (n) *n = osm->n;
1741   if (mat) *mat = osm->pmat;
1742   PetscFunctionReturn(0);
1743 }
1744 
1745 #undef __FUNCT__
1746 #define __FUNCT__ "PCGASMSetDMSubdomains"
1747 /*@
1748     PCGASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1749     Logically Collective
1750 
1751     Input Parameter:
1752 +   pc  - the preconditioner
1753 -   flg - boolean indicating whether to use subdomains defined by the DM
1754 
1755     Options Database Key:
1756 .   -pc_gasm_dm_subdomains
1757 
1758     Level: intermediate
1759 
1760     Notes:
1761     PCGASMSetTotalSubdomains() and PCGASMSetOverlap() take precedence over PCGASMSetDMSubdomains(),
1762     so setting either of the first two effectively turns the latter off.
1763 
1764 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1765 
1766 .seealso: PCGASMGetDMSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap()
1767           PCGASMCreateSubdomains2D()
1768 @*/
1769 PetscErrorCode  PCGASMSetDMSubdomains(PC pc,PetscBool flg)
1770 {
1771   PC_GASM        *osm = (PC_GASM*)pc->data;
1772   PetscErrorCode ierr;
1773   PetscBool      match;
1774 
1775   PetscFunctionBegin;
1776   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1777   PetscValidLogicalCollectiveBool(pc,flg,2);
1778   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1779   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1780   if (match) {
1781     osm->dm_subdomains = flg;
1782   }
1783   PetscFunctionReturn(0);
1784 }
1785 
1786 #undef __FUNCT__
1787 #define __FUNCT__ "PCGASMGetDMSubdomains"
1788 /*@
1789     PCGASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1790     Not Collective
1791 
1792     Input Parameter:
1793 .   pc  - the preconditioner
1794 
1795     Output Parameter:
1796 .   flg - boolean indicating whether to use subdomains defined by the DM
1797 
1798     Level: intermediate
1799 
1800 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1801 
1802 .seealso: PCGASMSetDMSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap()
1803           PCGASMCreateSubdomains2D()
1804 @*/
1805 PetscErrorCode  PCGASMGetDMSubdomains(PC pc,PetscBool* flg)
1806 {
1807   PC_GASM        *osm = (PC_GASM*)pc->data;
1808   PetscErrorCode ierr;
1809   PetscBool      match;
1810 
1811   PetscFunctionBegin;
1812   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1813   PetscValidPointer(flg,2);
1814   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1815   if (match) {
1816     if (flg) *flg = osm->dm_subdomains;
1817   }
1818   PetscFunctionReturn(0);
1819 }
1820