xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision bcab024551cbaaaa7f49e357634a3584f91cb6d5)
1 /*
2   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
3   In this version each processor may intersect multiple subdomains and any subdomain may
4   intersect multiple processors.  Intersections of subdomains with processors are called *local
5   subdomains*.
6 
7        N    - total number of local subdomains on all processors  (set in PCGASMSetTotalSubdomains() or calculated in PCSetUp_GASM())
8        n    - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
9        nmax - maximum number of local subdomains per processor    (calculated in PCGASMSetTotalSubdomains() or in PCSetUp_GASM())
10 */
11 #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
12 #include <petscdm.h>
13 
14 typedef struct {
15   PetscInt   N,n,nmax;
16   PetscInt   overlap;             /* overlap requested by user */
17   KSP        *ksp;                /* linear solvers for each block */
18   Vec        gx,gy;               /* Merged work vectors */
19   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
20   VecScatter gorestriction;       /* merged restriction to disjoint union of outer subdomains */
21   VecScatter girestriction;       /* merged restriction to disjoint union of inner subdomains */
22   IS         *ois;                /* index sets that define the outer (conceptually, overlapping) subdomains */
23   IS         *iis;                /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
24   Mat        *pmat;               /* subdomain block matrices */
25   PCGASMType type;                /* use reduced interpolation, restriction or both */
26   PetscBool  create_local;           /* whether the autocreated subdomains are local or not. */
27   PetscBool  type_set;               /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_subdomain_solvers; /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;           /* flag to sort subdomain indices */
30   PetscBool  dm_subdomains;          /* whether DM is allowed to define subdomains */
31 } PC_GASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCGASMSubdomainView_Private"
35 static PetscErrorCode  PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
36 {
37   PC_GASM        *osm = (PC_GASM*)pc->data;
38   PetscInt       j,nidx;
39   const PetscInt *idx;
40   PetscViewer    sviewer;
41   char           *cidx;
42   PetscErrorCode ierr;
43 
44   PetscFunctionBegin;
45   if (i < 0 || i > osm->n) SETERRQ2(PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n);
46   /* Inner subdomains. */
47   ierr = ISGetLocalSize(osm->iis[i], &nidx);CHKERRQ(ierr);
48   /*
49    No more than 15 characters per index plus a space.
50    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
51    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
52    For nidx == 0, the whole string 16 '\0'.
53    */
54   ierr = PetscMalloc1(16*(nidx+1)+1, &cidx);CHKERRQ(ierr);
55   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
56   ierr = ISGetIndices(osm->iis[i], &idx);CHKERRQ(ierr);
57   for (j = 0; j < nidx; ++j) {
58     ierr = PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);CHKERRQ(ierr);
59   }
60   ierr = ISRestoreIndices(osm->iis[i],&idx);CHKERRQ(ierr);
61   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
62   ierr = PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");CHKERRQ(ierr);
63   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
64   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
65   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
66   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
67   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
68   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
69   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
70   ierr = PetscFree(cidx);CHKERRQ(ierr);
71   /* Outer subdomains. */
72   ierr = ISGetLocalSize(osm->ois[i], &nidx);CHKERRQ(ierr);
73   /*
74    No more than 15 characters per index plus a space.
75    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
76    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
77    For nidx == 0, the whole string 16 '\0'.
78    */
79   ierr = PetscMalloc1(16*(nidx+1)+1, &cidx);CHKERRQ(ierr);
80   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
81   ierr = ISGetIndices(osm->ois[i], &idx);CHKERRQ(ierr);
82   for (j = 0; j < nidx; ++j) {
83     ierr = PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);CHKERRQ(ierr);
84   }
85   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
86   ierr = ISRestoreIndices(osm->ois[i],&idx);CHKERRQ(ierr);
87   ierr = PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");CHKERRQ(ierr);
88   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
90   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
91   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
92   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
93   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
94   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
95   ierr = PetscFree(cidx);CHKERRQ(ierr);
96   PetscFunctionReturn(0);
97 }
98 
99 #undef __FUNCT__
100 #define __FUNCT__ "PCGASMPrintSubdomains"
101 static PetscErrorCode  PCGASMPrintSubdomains(PC pc)
102 {
103   PC_GASM        *osm = (PC_GASM*)pc->data;
104   const char     *prefix;
105   char           fname[PETSC_MAX_PATH_LEN+1];
106   PetscInt       i, l, d, count, gcount, *permutation, *numbering;
107   PetscBool      found;
108   PetscViewer    viewer, sviewer = NULL;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   ierr = PetscMalloc2(osm->n, &permutation, osm->n, &numbering);CHKERRQ(ierr);
113   for (i = 0; i < osm->n; ++i) permutation[i] = i;
114   ierr = PetscObjectsGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
115   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
116   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
117   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr);
118   if (!found) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
119   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
120   /*
121    Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
122    the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
123   */
124   ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
125   l    = 0;
126   for (count = 0; count < gcount; ++count) {
127     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
128     if (l<osm->n) {
129       d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
130       if (numbering[d] == count) {
131         ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
132         ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
133         ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
134         ++l;
135       }
136     }
137     ierr = MPI_Barrier(PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
138   }
139   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
140   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
141   PetscFunctionReturn(0);
142 }
143 
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "PCView_GASM"
147 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
148 {
149   PC_GASM        *osm = (PC_GASM*)pc->data;
150   const char     *prefix;
151   PetscErrorCode ierr;
152   PetscMPIInt    rank, size;
153   PetscInt       i,bsz;
154   PetscBool      iascii,view_subdomains=PETSC_FALSE;
155   PetscViewer    sviewer;
156   PetscInt       count, l, gcount, *numbering, *permutation;
157   char           overlap[256]     = "user-defined overlap";
158   char           gsubdomains[256] = "unknown total number of subdomains";
159   char           lsubdomains[256] = "unknown number of local  subdomains";
160   char           msubdomains[256] = "unknown max number of local subdomains";
161 
162   PetscFunctionBegin;
163   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
164   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
165   ierr = PetscMalloc2(osm->n, &permutation, osm->n, &numbering);CHKERRQ(ierr);
166   for (i = 0; i < osm->n; ++i) permutation[i] = i;
167   ierr = PetscObjectsGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
168   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
169 
170   if (osm->overlap >= 0) {
171     ierr = PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);CHKERRQ(ierr);
172   }
173   ierr = PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",gcount);CHKERRQ(ierr);
174   if (osm->N > 0) {
175     ierr = PetscSNPrintf(lsubdomains, sizeof(gsubdomains), "number of local subdomains = %D",osm->N);CHKERRQ(ierr);
176   }
177   if (osm->nmax > 0) {
178     ierr = PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);CHKERRQ(ierr);
179   }
180 
181   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
182   ierr = PetscOptionsGetBool(prefix,"-pc_gasm_view_subdomains",&view_subdomains,NULL);CHKERRQ(ierr);
183 
184   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
185   if (iascii) {
186     /*
187      Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
188      the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
189      collectively on the comm.
190      */
191     ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
192     ierr = PetscViewerASCIIPrintf(viewer,"Generalized additive Schwarz:\n");CHKERRQ(ierr);
193     ierr = PetscViewerASCIIPrintf(viewer,"Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
194     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",overlap);CHKERRQ(ierr);
195     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",gsubdomains);CHKERRQ(ierr);
196     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",lsubdomains);CHKERRQ(ierr);
197     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",msubdomains);CHKERRQ(ierr);
198     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
199     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d:%d] number of locally-supported subdomains = %D\n",rank,size,osm->n);CHKERRQ(ierr);
200     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
201     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
202     /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
203     ierr = PetscViewerASCIIPrintf(viewer,"Subdomain solver info is as follows:\n");CHKERRQ(ierr);
204     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
205     ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
206     /* Make sure that everybody waits for the banner to be printed. */
207     ierr = MPI_Barrier(PetscObjectComm((PetscObject)viewer));CHKERRQ(ierr);
208     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
209     l = 0;
210     for (count = 0; count < gcount; ++count) {
211       PetscMPIInt srank, ssize;
212       if (l<osm->n) {
213         PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
214         if (numbering[d] == count) {
215           ierr = MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);CHKERRQ(ierr);
216           ierr = MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);CHKERRQ(ierr);
217           ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
218           ierr = ISGetLocalSize(osm->ois[d],&bsz);CHKERRQ(ierr);
219           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_TRUE);CHKERRQ(ierr);
220           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d:%d] (subcomm [%d:%d]) local subdomain number %D, local size = %D\n",rank,size,srank,ssize,d,bsz);CHKERRQ(ierr);
221           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
222           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_FALSE);CHKERRQ(ierr);
223           if (view_subdomains) {
224             ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
225           }
226           if (!pc->setupcalled) {
227             PetscViewerASCIIPrintf(sviewer, "Solver not set up yet: PCSetUp() not yet called\n");CHKERRQ(ierr);
228           } else {
229             ierr = KSPView(osm->ksp[d],sviewer);CHKERRQ(ierr);
230           }
231           ierr = PetscViewerASCIIPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
232           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
233           ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
234           ++l;
235         }
236       }
237       ierr = MPI_Barrier(PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
238     }
239     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
242   PetscFunctionReturn(0);
243 }
244 
245 
246 
247 
248 
249 #undef __FUNCT__
250 #define __FUNCT__ "PCSetUp_GASM"
251 static PetscErrorCode PCSetUp_GASM(PC pc)
252 {
253   PC_GASM        *osm = (PC_GASM*)pc->data;
254   PetscErrorCode ierr;
255   PetscBool      symset,flg;
256   PetscInt       i;
257   PetscMPIInt    rank, size;
258   MatReuse       scall = MAT_REUSE_MATRIX;
259   KSP            ksp;
260   PC             subpc;
261   const char     *prefix,*pprefix;
262   Vec            x,y;
263   PetscInt       oni;       /* Number of indices in the i-th local outer subdomain.               */
264   const PetscInt *oidxi;    /* Indices from the i-th subdomain local outer subdomain.             */
265   PetscInt       on;        /* Number of indices in the disjoint union of local outer subdomains. */
266   PetscInt       *oidx;     /* Indices in the disjoint union of local outer subdomains. */
267   IS             gois;      /* Disjoint union the global indices of outer subdomains.             */
268   IS             goid;      /* Identity IS of the size of the disjoint union of outer subdomains. */
269   PetscScalar    *gxarray, *gyarray;
270   PetscInt       gofirst;   /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy -- over the disjoint union of outer subdomains. */
271   DM             *subdomain_dm = NULL;
272 
273   PetscFunctionBegin;
274   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
275   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
276   if (!pc->setupcalled) {
277 
278     if (!osm->type_set) {
279       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
280       if (symset && flg) osm->type = PC_GASM_BASIC;
281     }
282 
283     /*
284      If subdomains have been set, then the local number of subdomains, osm->n, is NOT PETSC_DECIDE and is at least 1.
285      The total number of subdomains, osm->N is not necessarily set, might be PETSC_DECIDE, and then will have to be calculated from osm->n.
286      */
287     if (osm->n == PETSC_DECIDE) {
288       /* no subdomains given */
289       /* try pc->dm first, if allowed */
290       if (osm->dm_subdomains && pc->dm) {
291         PetscInt  num_subdomains, d;
292         char      **subdomain_names;
293         IS        *inner_subdomain_is, *outer_subdomain_is;
294         ierr = DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);CHKERRQ(ierr);
295         if (num_subdomains) {
296           ierr = PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);CHKERRQ(ierr);
297         }
298         for (d = 0; d < num_subdomains; ++d) {
299           if (subdomain_names)    {ierr = PetscFree(subdomain_names[d]);CHKERRQ(ierr);}
300           if (inner_subdomain_is) {ierr = ISDestroy(&inner_subdomain_is[d]);CHKERRQ(ierr);}
301           if (outer_subdomain_is) {ierr = ISDestroy(&outer_subdomain_is[d]);CHKERRQ(ierr);}
302           if (subdomain_dm)       {ierr = DMDestroy(&subdomain_dm[d]);CHKERRQ(ierr);}
303         }
304         ierr = PetscFree(subdomain_names);CHKERRQ(ierr);
305         ierr = PetscFree(inner_subdomain_is);CHKERRQ(ierr);
306         ierr = PetscFree(outer_subdomain_is);CHKERRQ(ierr);
307         ierr = PetscFree(subdomain_dm);CHKERRQ(ierr);
308       }
309       if (osm->n == PETSC_DECIDE) { /* still no subdomains; use one per processor */
310         osm->nmax = osm->n = 1;
311         ierr      = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
312         osm->N    = size;
313       }
314     }
315     if (!osm->iis) {
316       /*
317        The local number of subdomains was set just above, or in PCGASMSetTotalSubdomains(), or in PCGASMSetSubdomains(),
318        but the actual subdomains have not been supplied (in PCGASMSetSubdomains()).
319        We create the requisite number of inner subdomains on PETSC_COMM_SELF (for now).
320        */
321       ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->overlap,osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr);
322     }
323     if (osm->N == PETSC_DECIDE) {
324       struct {PetscInt max,sum;} inwork,outwork;
325       /* determine global number of subdomains and the max number of local subdomains */
326       inwork.max = osm->n;
327       inwork.sum = osm->n;
328       ierr       = MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
329       osm->nmax  = outwork.max;
330       osm->N     = outwork.sum;
331     }
332 
333     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
334     flg  = PETSC_FALSE;
335     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
336     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
337 
338     if (osm->sort_indices) {
339       for (i=0; i<osm->n; i++) {
340         ierr = ISSort(osm->ois[i]);CHKERRQ(ierr);
341         ierr = ISSort(osm->iis[i]);CHKERRQ(ierr);
342       }
343     }
344     /*
345      Merge the ISs, create merged vectors and restrictions.
346      */
347     /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
348     on = 0;
349     for (i=0; i<osm->n; i++) {
350       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
351       on  += oni;
352     }
353     ierr = PetscMalloc1(on, &oidx);CHKERRQ(ierr);
354     on   = 0;
355     for (i=0; i<osm->n; i++) {
356       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
357       ierr = ISGetIndices(osm->ois[i],&oidxi);CHKERRQ(ierr);
358       ierr = PetscMemcpy(oidx+on, oidxi, sizeof(PetscInt)*oni);CHKERRQ(ierr);
359       ierr = ISRestoreIndices(osm->ois[i], &oidxi);CHKERRQ(ierr);
360       on  += oni;
361     }
362     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois);CHKERRQ(ierr);
363     ierr = MatCreateVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
364     ierr = VecCreateMPI(PetscObjectComm((PetscObject)pc), on, PETSC_DECIDE, &osm->gx);CHKERRQ(ierr);
365     ierr = VecDuplicate(osm->gx,&osm->gy);CHKERRQ(ierr);
366     ierr = VecGetOwnershipRange(osm->gx, &gofirst, NULL);CHKERRQ(ierr);
367     ierr = ISCreateStride(PetscObjectComm((PetscObject)pc),on,gofirst,1, &goid);CHKERRQ(ierr);
368     ierr = VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));CHKERRQ(ierr);
369     ierr = VecDestroy(&x);CHKERRQ(ierr);
370     ierr = ISDestroy(&gois);CHKERRQ(ierr);
371 
372     /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
373     {
374       PetscInt ini;           /* Number of indices the i-th a local inner subdomain. */
375       PetscInt in;            /* Number of indices in the disjoint uniont of local inner subdomains. */
376       PetscInt *iidx;         /* Global indices in the merged local inner subdomain. */
377       PetscInt *ioidx;        /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
378       IS       giis;          /* IS for the disjoint union of inner subdomains. */
379       IS       giois;         /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
380       /**/
381       in = 0;
382       for (i=0; i<osm->n; i++) {
383         ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
384         in  += ini;
385       }
386       ierr = PetscMalloc1(in, &iidx);CHKERRQ(ierr);
387       ierr = PetscMalloc1(in, &ioidx);CHKERRQ(ierr);
388       ierr = VecGetOwnershipRange(osm->gx,&gofirst, NULL);CHKERRQ(ierr);
389       in   = 0;
390       on   = 0;
391       for (i=0; i<osm->n; i++) {
392         const PetscInt         *iidxi; /* Global indices of the i-th local inner subdomain. */
393         ISLocalToGlobalMapping ltogi; /* Map from global to local indices of the i-th outer local subdomain. */
394         PetscInt               *ioidxi; /* Local indices of the i-th local inner subdomain within the local outer subdomain. */
395         PetscInt               ioni;  /* Number of indices in ioidxi; if ioni != ini the inner subdomain is not a subdomain of the outer subdomain (error). */
396         PetscInt               k;
397         ierr   = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
398         ierr   = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
399         ierr   = ISGetIndices(osm->iis[i],&iidxi);CHKERRQ(ierr);
400         ierr   = PetscMemcpy(iidx+in, iidxi, sizeof(PetscInt)*ini);CHKERRQ(ierr);
401         ierr   = ISLocalToGlobalMappingCreateIS(osm->ois[i],&ltogi);CHKERRQ(ierr);
402         ioidxi = ioidx+in;
403         ierr   = ISGlobalToLocalMappingApply(ltogi,IS_GTOLM_DROP,ini,iidxi,&ioni,ioidxi);CHKERRQ(ierr);
404         ierr   = ISLocalToGlobalMappingDestroy(&ltogi);CHKERRQ(ierr);
405         ierr   = ISRestoreIndices(osm->iis[i], &iidxi);CHKERRQ(ierr);
406         if (ioni != ini) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inner subdomain %D contains %D indices outside of its outer subdomain", i, ini - ioni);
407         for (k = 0; k < ini; ++k) ioidxi[k] += gofirst+on;
408         in += ini;
409         on += oni;
410       }
411       ierr = ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, iidx,  PETSC_OWN_POINTER, &giis);CHKERRQ(ierr);
412       ierr = ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, ioidx, PETSC_OWN_POINTER, &giois);CHKERRQ(ierr);
413       ierr = VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);CHKERRQ(ierr);
414       ierr = VecDestroy(&y);CHKERRQ(ierr);
415       ierr = ISDestroy(&giis);CHKERRQ(ierr);
416       ierr = ISDestroy(&giois);CHKERRQ(ierr);
417     }
418     ierr = ISDestroy(&goid);CHKERRQ(ierr);
419 
420     /* Create the subdomain work vectors. */
421     ierr = PetscMalloc1(osm->n,&osm->x);CHKERRQ(ierr);
422     ierr = PetscMalloc1(osm->n,&osm->y);CHKERRQ(ierr);
423     ierr = VecGetArray(osm->gx, &gxarray);CHKERRQ(ierr);
424     ierr = VecGetArray(osm->gy, &gyarray);CHKERRQ(ierr);
425     for (i=0, on=0; i<osm->n; ++i, on += oni) {
426       PetscInt oNi;
427       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
428       ierr = ISGetSize(osm->ois[i],&oNi);CHKERRQ(ierr);
429       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);CHKERRQ(ierr);
430       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);CHKERRQ(ierr);
431     }
432     ierr = VecRestoreArray(osm->gx, &gxarray);CHKERRQ(ierr);
433     ierr = VecRestoreArray(osm->gy, &gyarray);CHKERRQ(ierr);
434     /* Create the local solvers */
435     ierr = PetscMalloc1(osm->n,&osm->ksp);CHKERRQ(ierr);
436     for (i=0; i<osm->n; i++) { /* KSPs are local */
437       ierr        = KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);CHKERRQ(ierr);
438       ierr        = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
439       ierr        = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
440       ierr        = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
441       ierr        = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
442       ierr        = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
443       ierr        = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
444       ierr        = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
445       osm->ksp[i] = ksp;
446     }
447     scall = MAT_INITIAL_MATRIX;
448 
449   } else { /*if (!pc->setupcalled)*/
450     /*
451        Destroy the blocks from the previous iteration
452     */
453     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
454       ierr  = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
455       scall = MAT_INITIAL_MATRIX;
456     }
457   }
458 
459   /*
460      Extract out the submatrices.
461   */
462   if (size > 1) {
463     ierr = MatGetSubMatricesParallel(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
464   } else {
465     ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
466   }
467   if (scall == MAT_INITIAL_MATRIX) {
468     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
469     for (i=0; i<osm->n; i++) {
470       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
471       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
472     }
473   }
474 
475   /* Return control to the user so that the submatrices can be modified (e.g., to apply
476      different boundary conditions for the submatrices than for the global problem) */
477   ierr = PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
478 
479   /*
480      Loop over submatrices putting them into local ksps
481   */
482   for (i=0; i<osm->n; i++) {
483     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
484     if (!pc->setupcalled) {
485       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
486     }
487   }
488   PetscFunctionReturn(0);
489 }
490 
491 #undef __FUNCT__
492 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
493 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
494 {
495   PC_GASM        *osm = (PC_GASM*)pc->data;
496   PetscErrorCode ierr;
497   PetscInt       i;
498 
499   PetscFunctionBegin;
500   for (i=0; i<osm->n; i++) {
501     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
502   }
503   PetscFunctionReturn(0);
504 }
505 
506 #undef __FUNCT__
507 #define __FUNCT__ "PCApply_GASM"
508 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
509 {
510   PC_GASM        *osm = (PC_GASM*)pc->data;
511   PetscErrorCode ierr;
512   PetscInt       i;
513   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
514 
515   PetscFunctionBegin;
516   /*
517      Support for limiting the restriction or interpolation only to the inner
518      subdomain values (leaving the other values 0).
519   */
520   if (!(osm->type & PC_GASM_RESTRICT)) {
521     /* have to zero the work RHS since scatter may leave some slots empty */
522     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
523     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
524   } else {
525     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
526   }
527   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
528   if (!(osm->type & PC_GASM_RESTRICT)) {
529     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
530   } else {
531     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
532   }
533   /* do the subdomain solves */
534   for (i=0; i<osm->n; ++i) {
535     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
536   }
537   ierr = VecZeroEntries(y);CHKERRQ(ierr);
538   if (!(osm->type & PC_GASM_INTERPOLATE)) {
539     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
540     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
541   } else {
542     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
543     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
544   }
545 }
546 
547 #undef __FUNCT__
548 #define __FUNCT__ "PCApplyTranspose_GASM"
549 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
550 {
551   PC_GASM        *osm = (PC_GASM*)pc->data;
552   PetscErrorCode ierr;
553   PetscInt       i;
554   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
555 
556   PetscFunctionBegin;
557   /*
558      Support for limiting the restriction or interpolation to only local
559      subdomain values (leaving the other values 0).
560 
561      Note: these are reversed from the PCApply_GASM() because we are applying the
562      transpose of the three terms
563   */
564   if (!(osm->type & PC_GASM_INTERPOLATE)) {
565     /* have to zero the work RHS since scatter may leave some slots empty */
566     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
567     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
568   } else {
569     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
570   }
571   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
572   if (!(osm->type & PC_GASM_INTERPOLATE)) {
573     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
574   } else {
575     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
576   }
577   /* do the local solves */
578   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
579     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
580   }
581   ierr = VecZeroEntries(y);CHKERRQ(ierr);
582   if (!(osm->type & PC_GASM_RESTRICT)) {
583     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
584     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
585   } else {
586     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
587     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
588   }
589   PetscFunctionReturn(0);
590 }
591 
592 #undef __FUNCT__
593 #define __FUNCT__ "PCReset_GASM"
594 static PetscErrorCode PCReset_GASM(PC pc)
595 {
596   PC_GASM        *osm = (PC_GASM*)pc->data;
597   PetscErrorCode ierr;
598   PetscInt       i;
599 
600   PetscFunctionBegin;
601   if (osm->ksp) {
602     for (i=0; i<osm->n; i++) {
603       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
604     }
605   }
606   if (osm->pmat) {
607     if (osm->n > 0) {
608       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
609     }
610   }
611   if (osm->x) {
612     for (i=0; i<osm->n; i++) {
613       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
614       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
615     }
616   }
617   ierr = VecDestroy(&osm->gx);CHKERRQ(ierr);
618   ierr = VecDestroy(&osm->gy);CHKERRQ(ierr);
619 
620   ierr     = VecScatterDestroy(&osm->gorestriction);CHKERRQ(ierr);
621   ierr     = VecScatterDestroy(&osm->girestriction);CHKERRQ(ierr);
622   ierr     = PCGASMDestroySubdomains(osm->n,osm->ois,osm->iis);CHKERRQ(ierr);
623   osm->ois = 0;
624   osm->iis = 0;
625   PetscFunctionReturn(0);
626 }
627 
628 #undef __FUNCT__
629 #define __FUNCT__ "PCDestroy_GASM"
630 static PetscErrorCode PCDestroy_GASM(PC pc)
631 {
632   PC_GASM        *osm = (PC_GASM*)pc->data;
633   PetscErrorCode ierr;
634   PetscInt       i;
635 
636   PetscFunctionBegin;
637   ierr = PCReset_GASM(pc);CHKERRQ(ierr);
638   if (osm->ksp) {
639     for (i=0; i<osm->n; i++) {
640       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
641     }
642     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
643   }
644   ierr = PetscFree(osm->x);CHKERRQ(ierr);
645   ierr = PetscFree(osm->y);CHKERRQ(ierr);
646   ierr = PetscFree(pc->data);CHKERRQ(ierr);
647   PetscFunctionReturn(0);
648 }
649 
650 #undef __FUNCT__
651 #define __FUNCT__ "PCSetFromOptions_GASM"
652 static PetscErrorCode PCSetFromOptions_GASM(PC pc)
653 {
654   PC_GASM        *osm = (PC_GASM*)pc->data;
655   PetscErrorCode ierr;
656   PetscInt       blocks,ovl;
657   PetscBool      symset,flg;
658   PCGASMType     gasmtype;
659 
660   PetscFunctionBegin;
661   /* set the type to symmetric if matrix is symmetric */
662   if (!osm->type_set && pc->pmat) {
663     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
664     if (symset && flg) osm->type = PC_GASM_BASIC;
665   }
666   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
667   ierr = PetscOptionsBool("-pc_gasm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCGASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
668   ierr = PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
669   if (flg) {
670     osm->create_local = PETSC_TRUE;
671     ierr = PetscOptionsBool("-pc_gasm_subdomains_create_local","Whether to make autocreated subdomains local (true by default)","PCGASMSetTotalSubdomains",osm->create_local,&osm->create_local,NULL);CHKERRQ(ierr);
672     ierr = PCGASMSetTotalSubdomains(pc,blocks,osm->create_local);CHKERRQ(ierr);
673     osm->dm_subdomains = PETSC_FALSE;
674   }
675   ierr = PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
676   if (flg) {
677     ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr);
678     osm->dm_subdomains = PETSC_FALSE;
679   }
680   flg  = PETSC_FALSE;
681   ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
682   if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr);}
683   ierr = PetscOptionsTail();CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 /*------------------------------------------------------------------------------------*/
688 
689 #undef __FUNCT__
690 #define __FUNCT__ "PCGASMSetSubdomains_GASM"
691 static PetscErrorCode  PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[])
692 {
693   PC_GASM        *osm = (PC_GASM*)pc->data;
694   PetscErrorCode ierr;
695   PetscInt       i;
696 
697   PetscFunctionBegin;
698   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, n = %D",n);
699   if (pc->setupcalled && (n != osm->n || iis || ois)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp().");
700 
701   if (!pc->setupcalled) {
702     osm->n   = n;
703     osm->ois = 0;
704     osm->iis = 0;
705     if (ois) {
706       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
707     }
708     if (iis) {
709       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);}
710     }
711     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
712     if (ois) {
713       ierr = PetscMalloc1(n,&osm->ois);CHKERRQ(ierr);
714       for (i=0; i<n; i++) osm->ois[i] = ois[i];
715       /* Flag indicating that the user has set outer subdomains, so PCGASM should not increase their size. */
716       osm->overlap = -1;
717       if (!iis) {
718         ierr = PetscMalloc1(n,&osm->iis);CHKERRQ(ierr);
719         for (i=0; i<n; i++) {
720           for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
721           osm->iis[i] = ois[i];
722         }
723       }
724     }
725     if (iis) {
726       ierr = PetscMalloc1(n,&osm->iis);CHKERRQ(ierr);
727       for (i=0; i<n; i++) osm->iis[i] = iis[i];
728       if (!ois) {
729         ierr = PetscMalloc1(n,&osm->ois);CHKERRQ(ierr);
730         for (i=0; i<n; i++) {
731           for (i=0; i<n; i++) {
732             ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);
733             osm->ois[i] = iis[i];
734           }
735         }
736         if (osm->overlap > 0) {
737           /* Extend the "overlapping" regions by a number of steps */
738           ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->ois,osm->overlap);CHKERRQ(ierr);
739         }
740       }
741     }
742   }
743   PetscFunctionReturn(0);
744 }
745 
746 #undef __FUNCT__
747 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
748 static PetscErrorCode  PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N, PetscBool create_local)
749 {
750   PC_GASM        *osm = (PC_GASM*)pc->data;
751   PetscErrorCode ierr;
752   PetscMPIInt    rank,size;
753   PetscInt       n;
754   PetscInt       Nmin, Nmax;
755 
756   PetscFunctionBegin;
757   if (!create_local) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "No suppor for autocreation of nonlocal subdomains.");
758   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be > 0, N = %D",N);
759   ierr = MPI_Allreduce(&N,&Nmin,1,MPIU_INT,MPIU_MIN,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
760   ierr = MPI_Allreduce(&N,&Nmax,1,MPIU_INT,MPIU_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
761   if (Nmin != Nmax) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "All processors must use the same number of subdomains.  min(N) = %D != %D = max(N)", Nmin, Nmax);
762 
763   osm->create_local = create_local;
764   /*
765      Split the subdomains equally among all processors
766   */
767   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
768   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
769   n    = N/size + ((N % size) > rank);
770   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one subdomain: total processors %d total blocks %D",(int)rank,(int)size,N);
771   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
772   if (!pc->setupcalled) {
773     ierr      = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
774     osm->N    = N;
775     osm->n    = n;
776     osm->nmax = N/size + ((N%size) ? 1 : 0);
777     osm->ois  = 0;
778     osm->iis  = 0;
779   }
780   PetscFunctionReturn(0);
781 }
782 
783 #undef __FUNCT__
784 #define __FUNCT__ "PCGASMSetOverlap_GASM"
785 static PetscErrorCode  PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
786 {
787   PC_GASM *osm = (PC_GASM*)pc->data;
788 
789   PetscFunctionBegin;
790   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
791   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
792   if (!pc->setupcalled) osm->overlap = ovl;
793   PetscFunctionReturn(0);
794 }
795 
796 #undef __FUNCT__
797 #define __FUNCT__ "PCGASMSetType_GASM"
798 static PetscErrorCode  PCGASMSetType_GASM(PC pc,PCGASMType type)
799 {
800   PC_GASM *osm = (PC_GASM*)pc->data;
801 
802   PetscFunctionBegin;
803   osm->type     = type;
804   osm->type_set = PETSC_TRUE;
805   PetscFunctionReturn(0);
806 }
807 
808 #undef __FUNCT__
809 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
810 static PetscErrorCode  PCGASMSetSortIndices_GASM(PC pc,PetscBool doSort)
811 {
812   PC_GASM *osm = (PC_GASM*)pc->data;
813 
814   PetscFunctionBegin;
815   osm->sort_indices = doSort;
816   PetscFunctionReturn(0);
817 }
818 
819 #undef __FUNCT__
820 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
821 /*
822    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
823         In particular, it would upset the global subdomain number calculation.
824 */
825 static PetscErrorCode  PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
826 {
827   PC_GASM        *osm = (PC_GASM*)pc->data;
828   PetscErrorCode ierr;
829 
830   PetscFunctionBegin;
831   if (osm->n < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
832 
833   if (n) *n = osm->n;
834   if (first) {
835     ierr    = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
836     *first -= osm->n;
837   }
838   if (ksp) {
839     /* Assume that local solves are now different; not necessarily
840        true, though!  This flag is used only for PCView_GASM() */
841     *ksp                        = osm->ksp;
842     osm->same_subdomain_solvers = PETSC_FALSE;
843   }
844   PetscFunctionReturn(0);
845 } /* PCGASMGetSubKSP_GASM() */
846 
847 #undef __FUNCT__
848 #define __FUNCT__ "PCGASMSetSubdomains"
849 /*@C
850     PCGASMSetSubdomains - Sets the subdomains for this processor
851     for the additive Schwarz preconditioner.
852 
853     Collective on PC
854 
855     Input Parameters:
856 +   pc  - the preconditioner context
857 .   n   - the number of subdomains for this processor
858 .   iis - the index sets that define this processor's local inner subdomains
859          (or NULL for PETSc to determine subdomains)
860 -   ois- the index sets that define this processor's local outer subdomains
861          (or NULL to use the same as iis)
862 
863     Notes:
864     The IS indices use the parallel, global numbering of the vector entries.
865     Inner subdomains are those where the correction is applied.
866     Outer subdomains are those where the residual necessary to obtain the
867     corrections is obtained (see PCGASMType for the use of inner/outer subdomains).
868     Both inner and outer subdomains can extend over several processors.
869     This processor's portion of a subdomain is known as a local subdomain.
870 
871     By default the GASM preconditioner uses 1 (local) subdomain per processor.
872     Use PCGASMSetTotalSubdomains() to set the total number of subdomains across
873     all processors that PCGASM will create automatically, and to specify whether
874     they should be local or not.
875 
876 
877     Level: advanced
878 
879 .keywords: PC, GASM, set, subdomains, additive Schwarz
880 
881 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
882           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
883 @*/
884 PetscErrorCode  PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[])
885 {
886   PetscErrorCode ierr;
887 
888   PetscFunctionBegin;
889   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
890   ierr = PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));CHKERRQ(ierr);
891   PetscFunctionReturn(0);
892 }
893 
894 #undef __FUNCT__
895 #define __FUNCT__ "PCGASMSetTotalSubdomains"
896 /*@C
897     PCGASMSetTotalSubdomains - Sets the total number of subdomains to use in the generalized additive
898     Schwarz preconditioner.  The number of subdomains is cumulative across all processors in pc's
899     communicator. Either all or no processors in the PC communicator must call this routine with
900     the same N.  The subdomains will be created automatically during PCSetUp().
901 
902     Collective on PC
903 
904     Input Parameters:
905 +   pc           - the preconditioner context
906 .   N            - the total number of subdomains cumulative across all processors
907 -   create_local - whether the subdomains to be created are to be local
908 
909     Options Database Key:
910     To set the total number of subdomains and let PCGASM autocreate them, rather than specify the index sets, use the following options:
911 +    -pc_gasm_total_subdomains <n>                  - sets the total number of subdomains to be autocreated by PCGASM
912 -    -pc_gasm_subdomains_create_local <true|false>  - whether autocreated subdomains should be local or not (default is true)
913 
914     By default the GASM preconditioner uses 1 subdomain per processor.
915 
916 
917     Use PCGASMSetSubdomains() to set subdomains explicitly or to set different numbers
918     of subdomains per processor.
919 
920     Level: advanced
921 
922 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
923 
924 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
925           PCGASMCreateSubdomains2D()
926 @*/
927 PetscErrorCode  PCGASMSetTotalSubdomains(PC pc,PetscInt N, PetscBool create_local)
928 {
929   PetscErrorCode ierr;
930 
931   PetscFunctionBegin;
932   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
933   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt,PetscBool),(pc,N,create_local));CHKERRQ(ierr);
934   PetscFunctionReturn(0);
935 }
936 
937 #undef __FUNCT__
938 #define __FUNCT__ "PCGASMSetOverlap"
939 /*@
940     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
941     additive Schwarz preconditioner.  Either all or no processors in the
942     PC communicator must call this routine.
943 
944     Logically Collective on PC
945 
946     Input Parameters:
947 +   pc  - the preconditioner context
948 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
949 
950     Options Database Key:
951 .   -pc_gasm_overlap <overlap> - Sets overlap
952 
953     Notes:
954     By default the GASM preconditioner uses 1 subdomain per processor.  To use
955     multiple subdomain per perocessor, see PCGASMSetTotalSubdomains() or
956     PCGASMSetSubdomains() (and the option -pc_gasm_total_subdomains <n>).
957 
958     The overlap defaults to 1, so if one desires that no additional
959     overlap be computed beyond what may have been set with a call to
960     PCGASMSetTotalSubdomains() or PCGASMSetSubdomains(), then ovl
961     must be set to be 0.  In particular, if one does not explicitly set
962     the subdomains in application code, then all overlap would be computed
963     internally by PETSc, and using an overlap of 0 would result in an GASM
964     variant that is equivalent to the block Jacobi preconditioner.
965 
966     Note that one can define initial index sets with any overlap via
967     PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows
968     PETSc to extend that overlap further, if desired.
969 
970     Level: intermediate
971 
972 .keywords: PC, GASM, set, overlap
973 
974 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
975           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
976 @*/
977 PetscErrorCode  PCGASMSetOverlap(PC pc,PetscInt ovl)
978 {
979   PetscErrorCode ierr;
980 
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
983   PetscValidLogicalCollectiveInt(pc,ovl,2);
984   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
985   PetscFunctionReturn(0);
986 }
987 
988 #undef __FUNCT__
989 #define __FUNCT__ "PCGASMSetType"
990 /*@
991     PCGASMSetType - Sets the type of restriction and interpolation used
992     for local problems in the additive Schwarz method.
993 
994     Logically Collective on PC
995 
996     Input Parameters:
997 +   pc  - the preconditioner context
998 -   type - variant of GASM, one of
999 .vb
1000       PC_GASM_BASIC       - full interpolation and restriction
1001       PC_GASM_RESTRICT    - full restriction, local processor interpolation
1002       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
1003       PC_GASM_NONE        - local processor restriction and interpolation
1004 .ve
1005 
1006     Options Database Key:
1007 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1008 
1009     Level: intermediate
1010 
1011 .keywords: PC, GASM, set, type
1012 
1013 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1014           PCGASMCreateSubdomains2D()
1015 @*/
1016 PetscErrorCode  PCGASMSetType(PC pc,PCGASMType type)
1017 {
1018   PetscErrorCode ierr;
1019 
1020   PetscFunctionBegin;
1021   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1022   PetscValidLogicalCollectiveEnum(pc,type,2);
1023   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 #undef __FUNCT__
1028 #define __FUNCT__ "PCGASMSetSortIndices"
1029 /*@
1030     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
1031 
1032     Logically Collective on PC
1033 
1034     Input Parameters:
1035 +   pc  - the preconditioner context
1036 -   doSort - sort the subdomain indices
1037 
1038     Level: intermediate
1039 
1040 .keywords: PC, GASM, set, type
1041 
1042 .seealso: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
1043           PCGASMCreateSubdomains2D()
1044 @*/
1045 PetscErrorCode  PCGASMSetSortIndices(PC pc,PetscBool doSort)
1046 {
1047   PetscErrorCode ierr;
1048 
1049   PetscFunctionBegin;
1050   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1051   PetscValidLogicalCollectiveBool(pc,doSort,2);
1052   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 #undef __FUNCT__
1057 #define __FUNCT__ "PCGASMGetSubKSP"
1058 /*@C
1059    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
1060    this processor.
1061 
1062    Collective on PC iff first_local is requested
1063 
1064    Input Parameter:
1065 .  pc - the preconditioner context
1066 
1067    Output Parameters:
1068 +  n_local - the number of blocks on this processor or NULL
1069 .  first_local - the global number of the first block on this processor or NULL,
1070                  all processors must request or all must pass NULL
1071 -  ksp - the array of KSP contexts
1072 
1073    Note:
1074    After PCGASMGetSubKSP() the array of KSPes is not to be freed
1075 
1076    Currently for some matrix implementations only 1 block per processor
1077    is supported.
1078 
1079    You must call KSPSetUp() before calling PCGASMGetSubKSP().
1080 
1081    Level: advanced
1082 
1083 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
1084 
1085 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap(),
1086           PCGASMCreateSubdomains2D(),
1087 @*/
1088 PetscErrorCode  PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1089 {
1090   PetscErrorCode ierr;
1091 
1092   PetscFunctionBegin;
1093   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1094   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 /* -------------------------------------------------------------------------------------*/
1099 /*MC
1100    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1101            its own KSP object.
1102 
1103    Options Database Keys:
1104 +  -pc_gasm_total_block_count <n> - Sets total number of local subdomains (known as blocks) to be distributed among processors
1105 .  -pc_gasm_view_subdomains       - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view
1106 .  -pc_gasm_print_subdomains      - activates the printing of subdomain indices in PCSetUp()
1107 .  -pc_gasm_overlap <ovl>         - Sets overlap by which to (automatically) extend local subdomains
1108 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1109 
1110      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1111       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
1112       -pc_gasm_type basic to use the standard GASM.
1113 
1114    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1115      than one processor. Defaults to one block per processor.
1116 
1117      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1118         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1119 
1120      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
1121          and set the options directly on the resulting KSP object (you can access its PC
1122          with KSPGetPC())
1123 
1124 
1125    Level: beginner
1126 
1127    Concepts: additive Schwarz method
1128 
1129     References:
1130     An additive variant of the Schwarz alternating method for the case of many subregions
1131     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1132 
1133     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1134     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1135 
1136 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1137            PCBJACOBI,  PCGASMGetSubKSP(), PCGASMSetSubdomains(),
1138            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
1139 
1140 M*/
1141 
1142 #undef __FUNCT__
1143 #define __FUNCT__ "PCCreate_GASM"
1144 PETSC_EXTERN PetscErrorCode PCCreate_GASM(PC pc)
1145 {
1146   PetscErrorCode ierr;
1147   PC_GASM        *osm;
1148 
1149   PetscFunctionBegin;
1150   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1151 
1152   osm->N                      = PETSC_DECIDE;
1153   osm->n                      = PETSC_DECIDE;
1154   osm->nmax                   = 0;
1155   osm->overlap                = 1;
1156   osm->ksp                    = 0;
1157   osm->gorestriction          = 0;
1158   osm->girestriction          = 0;
1159   osm->gx                     = 0;
1160   osm->gy                     = 0;
1161   osm->x                      = 0;
1162   osm->y                      = 0;
1163   osm->ois                    = 0;
1164   osm->iis                    = 0;
1165   osm->pmat                   = 0;
1166   osm->type                   = PC_GASM_RESTRICT;
1167   osm->same_subdomain_solvers = PETSC_TRUE;
1168   osm->sort_indices           = PETSC_TRUE;
1169   osm->dm_subdomains          = PETSC_FALSE;
1170 
1171   pc->data                 = (void*)osm;
1172   pc->ops->apply           = PCApply_GASM;
1173   pc->ops->applytranspose  = PCApplyTranspose_GASM;
1174   pc->ops->setup           = PCSetUp_GASM;
1175   pc->ops->reset           = PCReset_GASM;
1176   pc->ops->destroy         = PCDestroy_GASM;
1177   pc->ops->setfromoptions  = PCSetFromOptions_GASM;
1178   pc->ops->setuponblocks   = PCSetUpOnBlocks_GASM;
1179   pc->ops->view            = PCView_GASM;
1180   pc->ops->applyrichardson = 0;
1181 
1182   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSubdomains_C",PCGASMSetSubdomains_GASM);CHKERRQ(ierr);
1183   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetTotalSubdomains_C",PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1184   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetOverlap_C",PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1185   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetType_C",PCGASMSetType_GASM);CHKERRQ(ierr);
1186   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSortIndices_C",PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1187   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMGetSubKSP_C",PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1188   PetscFunctionReturn(0);
1189 }
1190 
1191 
1192 #undef __FUNCT__
1193 #define __FUNCT__ "PCGASMCreateLocalSubdomains"
1194 /*@C
1195    PCGASMCreateLocalSubdomains - Creates n local index sets for the overlapping
1196    Schwarz preconditioner for a any problem based on its matrix.
1197 
1198    Collective
1199 
1200    Input Parameters:
1201 +  A       - The global matrix operator
1202 .  overlap - amount of overlap in outer subdomains
1203 -  n       - the number of local subdomains
1204 
1205    Output Parameters:
1206 +  iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1207 -  ois - the array of index sets defining the local outer subdomains (on which the residual is computed)
1208 
1209    Level: advanced
1210 
1211    Note: this generates n nonoverlapping local inner subdomains on PETSC_COMM_SELF;
1212          PCGASM will generate the overlap from these if you use them in PCGASMSetSubdomains() and set a
1213          nonzero overlap with PCGASMSetOverlap()
1214 
1215     In the Fortran version you must provide the array outis[] already allocated of length n.
1216 
1217 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1218 
1219 .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains()
1220 @*/
1221 PetscErrorCode  PCGASMCreateLocalSubdomains(Mat A, PetscInt overlap, PetscInt n, IS *iis[], IS *ois[])
1222 {
1223   MatPartitioning mpart;
1224   const char      *prefix;
1225   PetscErrorCode  (*f)(Mat,MatReuse,Mat*);
1226   PetscMPIInt     size;
1227   PetscInt        i,j,rstart,rend,bs;
1228   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1229   Mat             Ad     = NULL, adj;
1230   IS              ispart,isnumb,*is;
1231   PetscErrorCode  ierr;
1232 
1233   PetscFunctionBegin;
1234   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1235   PetscValidPointer(iis,4);
1236   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1237 
1238   /* Get prefix, row distribution, and block size */
1239   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1240   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1241   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1242   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1243 
1244   /* Get diagonal block from matrix if possible */
1245   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1246   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
1247   if (f) {
1248     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1249   } else if (size == 1) {
1250     Ad = A;
1251   }
1252   if (Ad) {
1253     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1254     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1255   }
1256   if (Ad && n > 1) {
1257     PetscBool  match,done;
1258     /* Try to setup a good matrix partitioning if available */
1259     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1260     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1261     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1262     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1263     if (!match) {
1264       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1265     }
1266     if (!match) { /* assume a "good" partitioner is available */
1267       PetscInt       na;
1268       const PetscInt *ia,*ja;
1269       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1270       if (done) {
1271         /* Build adjacency matrix by hand. Unfortunately a call to
1272            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1273            remove the block-aij structure and we cannot expect
1274            MatPartitioning to split vertices as we need */
1275         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1276         const PetscInt *row;
1277         nnz = 0;
1278         for (i=0; i<na; i++) { /* count number of nonzeros */
1279           len = ia[i+1] - ia[i];
1280           row = ja + ia[i];
1281           for (j=0; j<len; j++) {
1282             if (row[j] == i) { /* don't count diagonal */
1283               len--; break;
1284             }
1285           }
1286           nnz += len;
1287         }
1288         ierr   = PetscMalloc1((na+1),&iia);CHKERRQ(ierr);
1289         ierr   = PetscMalloc1((nnz),&jja);CHKERRQ(ierr);
1290         nnz    = 0;
1291         iia[0] = 0;
1292         for (i=0; i<na; i++) { /* fill adjacency */
1293           cnt = 0;
1294           len = ia[i+1] - ia[i];
1295           row = ja + ia[i];
1296           for (j=0; j<len; j++) {
1297             if (row[j] != i) jja[nnz+cnt++] = row[j]; /* if not diagonal */
1298           }
1299           nnz += cnt;
1300           iia[i+1] = nnz;
1301         }
1302         /* Partitioning of the adjacency matrix */
1303         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1304         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1305         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1306         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1307         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1308         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1309         foundpart = PETSC_TRUE;
1310       }
1311       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1312     }
1313     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1314   }
1315   ierr = PetscMalloc1(n,&is);CHKERRQ(ierr);
1316   if (!foundpart) {
1317 
1318     /* Partitioning by contiguous chunks of rows */
1319 
1320     PetscInt mbs   = (rend-rstart)/bs;
1321     PetscInt start = rstart;
1322     for (i=0; i<n; i++) {
1323       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1324       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1325       start += count;
1326     }
1327 
1328   } else {
1329 
1330     /* Partitioning by adjacency of diagonal block  */
1331 
1332     const PetscInt *numbering;
1333     PetscInt       *count,nidx,*indices,*newidx,start=0;
1334     /* Get node count in each partition */
1335     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1336     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1337     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1338       for (i=0; i<n; i++) count[i] *= bs;
1339     }
1340     /* Build indices from node numbering */
1341     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1342     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1343     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1344     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1345     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1346     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1347     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1348       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1349       for (i=0; i<nidx; i++) {
1350         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1351       }
1352       ierr    = PetscFree(indices);CHKERRQ(ierr);
1353       nidx   *= bs;
1354       indices = newidx;
1355     }
1356     /* Shift to get global indices */
1357     for (i=0; i<nidx; i++) indices[i] += rstart;
1358 
1359     /* Build the index sets for each block */
1360     for (i=0; i<n; i++) {
1361       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1362       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1363       start += count[i];
1364     }
1365 
1366     ierr = PetscFree(count);
1367     ierr = PetscFree(indices);
1368     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1369     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1370   }
1371   *iis = is;
1372   if (!ois) PetscFunctionReturn(0);
1373   /*
1374    Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
1375    has been requested, copy the inner subdomains over so they can be modified.
1376    */
1377   ierr = PetscMalloc1(n,ois);CHKERRQ(ierr);
1378   for (i=0; i<n; ++i) {
1379     if (overlap > 0) { /* With positive overlap, (*iis)[i] will be modified */
1380       ierr = ISDuplicate((*iis)[i],(*ois)+i);CHKERRQ(ierr);
1381       ierr = ISCopy((*iis)[i],(*ois)[i]);CHKERRQ(ierr);
1382     } else {
1383       ierr      = PetscObjectReference((PetscObject)(*iis)[i]);CHKERRQ(ierr);
1384       (*ois)[i] = (*iis)[i];
1385     }
1386   }
1387   if (overlap > 0) {
1388     /* Extend the "overlapping" regions by a number of steps */
1389     ierr = MatIncreaseOverlap(A,n,*ois,overlap);CHKERRQ(ierr);
1390   }
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 #undef __FUNCT__
1395 #define __FUNCT__ "PCGASMDestroySubdomains"
1396 /*@C
1397    PCGASMDestroySubdomains - Destroys the index sets created with
1398    PCGASMCreateLocalSubdomains() or PCGASMCreateSubdomains2D. Should be
1399    called after setting subdomains with PCGASMSetSubdomains().
1400 
1401    Collective
1402 
1403    Input Parameters:
1404 +  n   - the number of index sets
1405 .  iis - the array of inner subdomains,
1406 -  ois - the array of outer subdomains, can be NULL
1407 
1408    Level: intermediate
1409 
1410    Notes: this is merely a convenience subroutine that walks each list,
1411    destroys each IS on the list, and then frees the list.
1412 
1413 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1414 
1415 .seealso: PCGASMCreateLocalSubdomains(), PCGASMSetSubdomains()
1416 @*/
1417 PetscErrorCode  PCGASMDestroySubdomains(PetscInt n, IS iis[], IS ois[])
1418 {
1419   PetscInt       i;
1420   PetscErrorCode ierr;
1421 
1422   PetscFunctionBegin;
1423   if (n <= 0) PetscFunctionReturn(0);
1424   if (iis) {
1425     PetscValidPointer(iis,2);
1426     for (i=0; i<n; i++) {
1427       ierr = ISDestroy(&iis[i]);CHKERRQ(ierr);
1428     }
1429     ierr = PetscFree(iis);CHKERRQ(ierr);
1430   }
1431   if (ois) {
1432     for (i=0; i<n; i++) {
1433       ierr = ISDestroy(&ois[i]);CHKERRQ(ierr);
1434     }
1435     ierr = PetscFree(ois);CHKERRQ(ierr);
1436   }
1437   PetscFunctionReturn(0);
1438 }
1439 
1440 
1441 #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \
1442   {                                                                                                       \
1443     PetscInt first_row = first/M, last_row = last/M+1;                                                     \
1444     /*                                                                                                    \
1445      Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners       \
1446      of the bounding box of the intersection of the subdomain with the local ownership range (local       \
1447      subdomain).                                                                                          \
1448      Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows       \
1449      of the intersection.                                                                                 \
1450     */                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             \
1451     /* ylow_loc is the grid row containing the first element of the local sumbdomain */                   \
1452     *ylow_loc = PetscMax(first_row,ylow);                                                                    \
1453     /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1454     *xleft_loc = *ylow_loc==first_row ? PetscMax(first%M,xleft) : xleft;                                                                            \
1455     /* yhigh_loc is the grid row above the last local subdomain element */                                                                    \
1456     *yhigh_loc = PetscMin(last_row,yhigh);                                                                                                     \
1457     /* xright is the offset of the end of the  local subdomain within its grid row (might actually be outside the local subdomain) */         \
1458     *xright_loc = *yhigh_loc==last_row ? PetscMin(xright,last%M) : xright;                                                                          \
1459     /* Now compute the size of the local subdomain n. */ \
1460     *n = 0;                                               \
1461     if (*ylow_loc < *yhigh_loc) {                           \
1462       PetscInt width = xright-xleft;                     \
1463       *n += width*(*yhigh_loc-*ylow_loc-1);                 \
1464       *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \
1465       *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \
1466     } \
1467   }
1468 
1469 
1470 
1471 #undef __FUNCT__
1472 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1473 /*@
1474    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1475    preconditioner for a two-dimensional problem on a regular grid.
1476 
1477    Collective
1478 
1479    Input Parameters:
1480 +  M, N               - the global number of grid points in the x and y directions
1481 .  Mdomains, Ndomains - the global number of subdomains in the x and y directions
1482 .  dof                - degrees of freedom per node
1483 -  overlap            - overlap in mesh lines
1484 
1485    Output Parameters:
1486 +  Nsub - the number of local subdomains created
1487 .  iis  - array of index sets defining inner (nonoverlapping) subdomains
1488 -  ois  - array of index sets defining outer (overlapping, if overlap > 0) subdomains
1489 
1490 
1491    Level: advanced
1492 
1493 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1494 
1495 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1496           PCGASMSetOverlap()
1497 @*/
1498 PetscErrorCode  PCGASMCreateSubdomains2D(PC pc, PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap, PetscInt *nsub,IS **iis,IS **ois)
1499 {
1500   PetscErrorCode ierr;
1501   PetscMPIInt    size, rank;
1502   PetscInt       i, j;
1503   PetscInt       maxheight, maxwidth;
1504   PetscInt       xstart, xleft, xright, xleft_loc, xright_loc;
1505   PetscInt       ystart, ylow,  yhigh,  ylow_loc,  yhigh_loc;
1506   PetscInt       x[2][2], y[2][2], n[2];
1507   PetscInt       first, last;
1508   PetscInt       nidx, *idx;
1509   PetscInt       ii,jj,s,q,d;
1510   PetscInt       k,kk;
1511   PetscMPIInt    color;
1512   MPI_Comm       comm, subcomm;
1513   IS             **xis = 0, **is = ois, **is_local = iis;
1514 
1515   PetscFunctionBegin;
1516   ierr = PetscObjectGetComm((PetscObject)pc, &comm);CHKERRQ(ierr);
1517   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1518   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1519   ierr = MatGetOwnershipRange(pc->pmat, &first, &last);CHKERRQ(ierr);
1520   if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) "
1521                                       "does not respect the number of degrees of freedom per grid point %D", first, last, dof);
1522 
1523   /* Determine the number of domains with nonzero intersections with the local ownership range. */
1524   s      = 0;
1525   ystart = 0;
1526   for (j=0; j<Ndomains; ++j) {
1527     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1528     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1529     /* Vertical domain limits with an overlap. */
1530     ylow   = PetscMax(ystart - overlap,0);
1531     yhigh  = PetscMin(ystart + maxheight + overlap,N);
1532     xstart = 0;
1533     for (i=0; i<Mdomains; ++i) {
1534       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1535       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1536       /* Horizontal domain limits with an overlap. */
1537       xleft  = PetscMax(xstart - overlap,0);
1538       xright = PetscMin(xstart + maxwidth + overlap,M);
1539       /*
1540          Determine whether this subdomain intersects this processor's ownership range of pc->pmat.
1541       */
1542       PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1543       if (nidx) ++s;
1544       xstart += maxwidth;
1545     } /* for (i = 0; i < Mdomains; ++i) */
1546     ystart += maxheight;
1547   } /* for (j = 0; j < Ndomains; ++j) */
1548 
1549   /* Now we can allocate the necessary number of ISs. */
1550   *nsub  = s;
1551   ierr   = PetscMalloc1((*nsub),is);CHKERRQ(ierr);
1552   ierr   = PetscMalloc1((*nsub),is_local);CHKERRQ(ierr);
1553   s      = 0;
1554   ystart = 0;
1555   for (j=0; j<Ndomains; ++j) {
1556     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1557     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1558     /* Vertical domain limits with an overlap. */
1559     y[0][0] = PetscMax(ystart - overlap,0);
1560     y[0][1] = PetscMin(ystart + maxheight + overlap,N);
1561     /* Vertical domain limits without an overlap. */
1562     y[1][0] = ystart;
1563     y[1][1] = PetscMin(ystart + maxheight,N);
1564     xstart  = 0;
1565     for (i=0; i<Mdomains; ++i) {
1566       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1567       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1568       /* Horizontal domain limits with an overlap. */
1569       x[0][0] = PetscMax(xstart - overlap,0);
1570       x[0][1] = PetscMin(xstart + maxwidth + overlap,M);
1571       /* Horizontal domain limits without an overlap. */
1572       x[1][0] = xstart;
1573       x[1][1] = PetscMin(xstart+maxwidth,M);
1574       /*
1575          Determine whether this domain intersects this processor's ownership range of pc->pmat.
1576          Do this twice: first for the domains with overlaps, and once without.
1577          During the first pass create the subcommunicators, and use them on the second pass as well.
1578       */
1579       for (q = 0; q < 2; ++q) {
1580         /*
1581           domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1582           according to whether the domain with an overlap or without is considered.
1583         */
1584         xleft = x[q][0]; xright = x[q][1];
1585         ylow  = y[q][0]; yhigh  = y[q][1];
1586         PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1587         nidx *= dof;
1588         n[q]  = nidx;
1589         /*
1590          Based on the counted number of indices in the local domain *with an overlap*,
1591          construct a subcommunicator of all the processors supporting this domain.
1592          Observe that a domain with an overlap might have nontrivial local support,
1593          while the domain without an overlap might not.  Hence, the decision to participate
1594          in the subcommunicator must be based on the domain with an overlap.
1595          */
1596         if (q == 0) {
1597           if (nidx) color = 1;
1598           else color = MPI_UNDEFINED;
1599 
1600           ierr = MPI_Comm_split(comm, color, rank, &subcomm);CHKERRQ(ierr);
1601         }
1602         /*
1603          Proceed only if the number of local indices *with an overlap* is nonzero.
1604          */
1605         if (n[0]) {
1606           if (q == 0) xis = is;
1607           if (q == 1) {
1608             /*
1609              The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1610              Moreover, if the overlap is zero, the two ISs are identical.
1611              */
1612             if (overlap == 0) {
1613               (*is_local)[s] = (*is)[s];
1614               ierr           = PetscObjectReference((PetscObject)(*is)[s]);CHKERRQ(ierr);
1615               continue;
1616             } else {
1617               xis     = is_local;
1618               subcomm = ((PetscObject)(*is)[s])->comm;
1619             }
1620           } /* if (q == 1) */
1621           idx  = NULL;
1622           ierr = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1623           if (nidx) {
1624             k = 0;
1625             for (jj=ylow_loc; jj<yhigh_loc; ++jj) {
1626               PetscInt x0 = (jj==ylow_loc) ? xleft_loc : xleft;
1627               PetscInt x1 = (jj==yhigh_loc-1) ? xright_loc : xright;
1628               kk = dof*(M*jj + x0);
1629               for (ii=x0; ii<x1; ++ii) {
1630                 for (d = 0; d < dof; ++d) {
1631                   idx[k++] = kk++;
1632                 }
1633               }
1634             }
1635           }
1636           ierr = ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);CHKERRQ(ierr);
1637         }/* if (n[0]) */
1638       }/* for (q = 0; q < 2; ++q) */
1639       if (n[0]) ++s;
1640       xstart += maxwidth;
1641     } /* for (i = 0; i < Mdomains; ++i) */
1642     ystart += maxheight;
1643   } /* for (j = 0; j < Ndomains; ++j) */
1644   PetscFunctionReturn(0);
1645 }
1646 
1647 #undef __FUNCT__
1648 #define __FUNCT__ "PCGASMGetSubdomains"
1649 /*@C
1650     PCGASMGetSubdomains - Gets the subdomains supported on this processor
1651     for the additive Schwarz preconditioner.
1652 
1653     Not Collective
1654 
1655     Input Parameter:
1656 .   pc - the preconditioner context
1657 
1658     Output Parameters:
1659 +   n   - the number of subdomains for this processor (default value = 1)
1660 .   iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be NULL)
1661 -   ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be NULL)
1662 
1663 
1664     Notes:
1665     The user is responsible for destroying the ISs and freeing the returned arrays.
1666     The IS numbering is in the parallel, global numbering of the vector.
1667 
1668     Level: advanced
1669 
1670 .keywords: PC, GASM, get, subdomains, additive Schwarz
1671 
1672 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1673           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubmatrices()
1674 @*/
1675 PetscErrorCode  PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[])
1676 {
1677   PC_GASM        *osm;
1678   PetscErrorCode ierr;
1679   PetscBool      match;
1680   PetscInt       i;
1681 
1682   PetscFunctionBegin;
1683   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1684   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1685   if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1686   osm = (PC_GASM*)pc->data;
1687   if (n) *n = osm->n;
1688   if (iis) {
1689     ierr = PetscMalloc1(osm->n, iis);CHKERRQ(ierr);
1690   }
1691   if (ois) {
1692     ierr = PetscMalloc1(osm->n, ois);CHKERRQ(ierr);
1693   }
1694   if (iis || ois) {
1695     for (i = 0; i < osm->n; ++i) {
1696       if (iis) (*iis)[i] = osm->iis[i];
1697       if (ois) (*ois)[i] = osm->ois[i];
1698     }
1699   }
1700   PetscFunctionReturn(0);
1701 }
1702 
1703 #undef __FUNCT__
1704 #define __FUNCT__ "PCGASMGetSubmatrices"
1705 /*@C
1706     PCGASMGetSubmatrices - Gets the local submatrices (for this processor
1707     only) for the additive Schwarz preconditioner.
1708 
1709     Not Collective
1710 
1711     Input Parameter:
1712 .   pc - the preconditioner context
1713 
1714     Output Parameters:
1715 +   n   - the number of matrices for this processor (default value = 1)
1716 -   mat - the matrices
1717 
1718     Notes: matrices returned by this routine have the same communicators as the index sets (IS)
1719            used to define subdomains in PCGASMSetSubdomains(), or PETSC_COMM_SELF, if the
1720            subdomains were defined using PCGASMSetTotalSubdomains().
1721     Level: advanced
1722 
1723 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1724 
1725 .seealso: PCGASMSetTotalSubdomain(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1726           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains()
1727 @*/
1728 PetscErrorCode  PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1729 {
1730   PC_GASM        *osm;
1731   PetscErrorCode ierr;
1732   PetscBool      match;
1733 
1734   PetscFunctionBegin;
1735   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1736   PetscValidIntPointer(n,2);
1737   if (mat) PetscValidPointer(mat,3);
1738   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1739   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1740   if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1741   osm = (PC_GASM*)pc->data;
1742   if (n) *n = osm->n;
1743   if (mat) *mat = osm->pmat;
1744   PetscFunctionReturn(0);
1745 }
1746 
1747 #undef __FUNCT__
1748 #define __FUNCT__ "PCGASMSetDMSubdomains"
1749 /*@
1750     PCGASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1751     Logically Collective
1752 
1753     Input Parameter:
1754 +   pc  - the preconditioner
1755 -   flg - boolean indicating whether to use subdomains defined by the DM
1756 
1757     Options Database Key:
1758 .   -pc_gasm_dm_subdomains
1759 
1760     Level: intermediate
1761 
1762     Notes:
1763     PCGASMSetTotalSubdomains() and PCGASMSetOverlap() take precedence over PCGASMSetDMSubdomains(),
1764     so setting either of the first two effectively turns the latter off.
1765 
1766 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1767 
1768 .seealso: PCGASMGetDMSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap()
1769           PCGASMCreateSubdomains2D()
1770 @*/
1771 PetscErrorCode  PCGASMSetDMSubdomains(PC pc,PetscBool flg)
1772 {
1773   PC_GASM        *osm = (PC_GASM*)pc->data;
1774   PetscErrorCode ierr;
1775   PetscBool      match;
1776 
1777   PetscFunctionBegin;
1778   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1779   PetscValidLogicalCollectiveBool(pc,flg,2);
1780   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1781   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1782   if (match) {
1783     osm->dm_subdomains = flg;
1784   }
1785   PetscFunctionReturn(0);
1786 }
1787 
1788 #undef __FUNCT__
1789 #define __FUNCT__ "PCGASMGetDMSubdomains"
1790 /*@
1791     PCGASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1792     Not Collective
1793 
1794     Input Parameter:
1795 .   pc  - the preconditioner
1796 
1797     Output Parameter:
1798 .   flg - boolean indicating whether to use subdomains defined by the DM
1799 
1800     Level: intermediate
1801 
1802 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1803 
1804 .seealso: PCGASMSetDMSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap()
1805           PCGASMCreateSubdomains2D()
1806 @*/
1807 PetscErrorCode  PCGASMGetDMSubdomains(PC pc,PetscBool* flg)
1808 {
1809   PC_GASM        *osm = (PC_GASM*)pc->data;
1810   PetscErrorCode ierr;
1811   PetscBool      match;
1812 
1813   PetscFunctionBegin;
1814   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1815   PetscValidPointer(flg,2);
1816   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1817   if (match) {
1818     if (flg) *flg = osm->dm_subdomains;
1819   }
1820   PetscFunctionReturn(0);
1821 }
1822